hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
742864bd2a489edccd1cf9060b5f76ff6dfe4c2a
| 5,872
|
py
|
Python
|
pipeline/cf_create_bq_layers/main.py
|
allenday/bigquery-openstreetmap
|
3011c51d2ef129360607946819838dc33d33076f
|
[
"BSD-3-Clause"
] | 10
|
2020-01-22T14:49:08.000Z
|
2022-02-15T15:54:20.000Z
|
pipeline/cf_create_bq_layers/main.py
|
allenday/bigquery-openstreetmap
|
3011c51d2ef129360607946819838dc33d33076f
|
[
"BSD-3-Clause"
] | 61
|
2019-09-05T07:11:11.000Z
|
2019-11-18T03:35:51.000Z
|
pipeline/cf_create_bq_layers/main.py
|
allenday/bigquery-openstreetmap
|
3011c51d2ef129360607946819838dc33d33076f
|
[
"BSD-3-Clause"
] | 3
|
2019-08-21T08:00:59.000Z
|
2020-03-27T22:52:12.000Z
|
"""
Creates OSM layers in BigQuery based on SQL scripts
"""
import os
import time
import logging
from typing import List
from google.cloud import bigquery
from google.cloud import storage
from google.api_core.exceptions import NotFound
from copy_public_tables import copy_tables_to_public_dataset
GCP_PROJECT = os.environ['GCP_PROJECT']
GCS_BUCKET = os.environ['GCS_BUCKET'].replace('gs://', '')
BQ_SOURCE_DATASET = os.environ['BQ_SOURCE_DATASET']
BQ_TEMP_DATASET = os.environ['BQ_TEMP_DATASET']
BQ_TARGET_DATASET = os.environ['BQ_TARGET_DATASET']
BQ_LAYERS_TABLE = os.environ['BQ_LAYERS_TABLE']
bq = bigquery.Client(project=GCP_PROJECT)
temp_dataset_ref = bigquery.DatasetReference(GCP_PROJECT, BQ_TEMP_DATASET)
temp_table_ref = bigquery.TableReference(temp_dataset_ref, BQ_LAYERS_TABLE)
def create_temp_dataset():
"""Creates temporary dataset"""
bq.create_dataset(temp_dataset_ref, exists_ok=True)
def delete_temp_dataset():
"""Deletes temporary BigQuery dataset and table"""
bq.delete_dataset(temp_dataset_ref, delete_contents=True, not_found_ok=True)
def get_queries() -> List[str]:
"""gets SQL query files from Cloud Storage bucket. It expects them to be in "folder" layered_gis
:returns list of SQL queries
"""
logging.info("getting query files")
gcs = storage.Client(project=GCP_PROJECT)
bucket = gcs.bucket(GCS_BUCKET)
blobs = bucket.list_blobs(prefix='layered_gis')
queries = {}
for blob in blobs:
blob_name = blob.name
if '.sh' in blob_name:
continue
filename = blob_name.replace('layered_gis/', '')
layer, _ = filename.split('/')
sql_query = blob.download_as_string().decode('utf-8')
full_query = queries.get(layer, '')
if full_query:
full_query += 'UNION ALL \n'
full_query += sql_query + '\n'
queries[layer] = full_query
return queries.values()
def create_query_jobs(queries: List[str]):
"""Runs queries for concrete layers and save results in temporary file"""
logging.info("creating BQ query jobs")
for sql_query in queries:
job_config = bigquery.QueryJobConfig()
job_config.create_disposition = bigquery.CreateDisposition.CREATE_IF_NEEDED
job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND
job_config.destination = temp_table_ref
job_config.allow_large_results = True
query_job = bq.query(sql_query, job_config=job_config, location='US')
def wait_jobs_completed():
"""Checks if all BigQuery jobs are completed so it can copy temp table"""
logging.info("checking jobs")
time.sleep(30)
while True:
running_jobs = []
for job in bq.list_jobs(state_filter='RUNNING', all_users=True):
running_jobs.append(job)
logging.info("running jobs {}".format(len(running_jobs)))
if not running_jobs:
break
time.sleep(30)
def create_features_table():
"""creates 'features' table which is union of all 5 tables"""
table_name = 'features'
sql_query = f"""CREATE OR REPLACE TABLE `{GCP_PROJECT}.{BQ_TEMP_DATASET}.{table_name}`
AS
SELECT COALESCE(osm_id, osm_way_id) AS osm_id, osm_version, osm_timestamp, 'point' AS feature_type, all_tags, geometry FROM `{GCP_PROJECT}.{BQ_SOURCE_DATASET}.points`
UNION ALL
SELECT COALESCE(osm_id, osm_way_id) AS osm_id, osm_version, osm_timestamp, 'line' AS feature_type, all_tags, geometry FROM `{GCP_PROJECT}.{BQ_SOURCE_DATASET}.lines`
UNION ALL
SELECT COALESCE(osm_id, osm_way_id) AS osm_id, osm_version, osm_timestamp, 'multilinestring' AS feature_type, all_tags, geometry FROM `{GCP_PROJECT}.{BQ_SOURCE_DATASET}.multilinestrings`
UNION ALL
SELECT COALESCE(osm_id, osm_way_id) AS osm_id, osm_version, osm_timestamp, 'multipolygon' AS feature_type, all_tags, geometry FROM `{GCP_PROJECT}.{BQ_SOURCE_DATASET}.multipolygons`
UNION ALL
SELECT COALESCE(osm_id, osm_way_id) AS osm_id, osm_version, osm_timestamp, 'other_relation' AS feature_type, all_tags, geometry FROM `{GCP_PROJECT}.{BQ_SOURCE_DATASET}.other_relations`
"""
query_job = bq.query(sql_query)
def create_layers_table():
"""Creates layer partitioned table with queries, not yet partitioned"""
table_name = f"{BQ_LAYERS_TABLE}"
def deploy_features_table():
"""Copy the UNION temp features table to final destination"""
logging.info("copy table")
target_dataset_ref = bigquery.DatasetReference(GCP_PROJECT, BQ_TARGET_DATASET)
target_table_ref = bigquery.TableReference(target_dataset_ref, 'features')
copyjob_config = bigquery.CopyJobConfig()
copyjob_config.create_disposition = bigquery.CreateDisposition.CREATE_IF_NEEDED
copyjob_config.write_disposition = bigquery.WriteDisposition.WRITE_TRUNCATE
bq.copy_table(temp_table_ref, target_table_ref, job_config=copyjob_config)
def deploy_layers_table():
"""Copy and partition the layers table"""
table_name = f"{BQ_LAYERS_TABLE}"
sql_query = f"""CREATE OR REPLACE TABLE `{GCP_PROJECT}.{BQ_TEMP_DATASET}.{table_name}`
PARTITION BY layer_partition
AS
SELECT *,
`{GCP_PROJECT}.{BQ_TARGET_DATASET}`.layer_partition(name) as layer_partition
FROM `{GCP_PROJECT}.{BQ_TEMP_DATASET}.{BQ_LAYERS_TABLE}`"""
job_config = bigquery.QueryJobConfig()
query_job = bq.query(sql_query, job_config=job_config)
def process():
"""Complete flow"""
create_temp_dataset()
queries = get_queries()
create_query_jobs(queries)
wait_jobs_completed()
create_features_table()
create_layers_table()
deploy_features_table()
deploy_layers_table()
#delete_temp_dataset()
#copy_tables_to_public_dataset()
def main(data, context):
process()
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
# process()
| 34.541176
| 190
| 0.729053
|
73db7cf58ff061847268802673a43a2c5ac21c2a
| 1,192
|
py
|
Python
|
Managers/CollectionManager.py
|
fefelson/MLBProjections
|
444a1c071cb7b1e21eedc49cf63ae91e80d37978
|
[
"MIT"
] | null | null | null |
Managers/CollectionManager.py
|
fefelson/MLBProjections
|
444a1c071cb7b1e21eedc49cf63ae91e80d37978
|
[
"MIT"
] | null | null | null |
Managers/CollectionManager.py
|
fefelson/MLBProjections
|
444a1c071cb7b1e21eedc49cf63ae91e80d37978
|
[
"MIT"
] | null | null | null |
import os
import MLBProjections.MLBProjections.Environ as ENV
################################################################################
################################################################################
################################################################################
################################################################################
class CollectionManager:
def getMatchupFiles(self, gameDate):
matchFiles = []
filePath = "/".join(ENV.getPath("matchup", gameDate=gameDate).split("/")[:-1])
if os.path.exists(filePath):
for fileName in [filePath+"/"+fileName for fileName in os.listdir(filePath) if "M" in fileName]:
matchFiles.append(ENV.getJsonInfo(fileName))
return matchFiles
def getSingleFile(self, key, fileName):
info = {}
filePath = ENV.getPath(key, fileName=fileName)
if os.path.exists(filePath):
info = ENV.getJsonInfo(filePath)
return info
################################################################################
################################################################################
| 31.368421
| 108
| 0.373322
|
6e5d70920668cb4e728fb076dcab5e0566d89980
| 16,310
|
py
|
Python
|
train_openfold.py
|
ychnh/openfold
|
e61a00d063c0f2d939d24963929cea2b413d3e8e
|
[
"Apache-2.0"
] | null | null | null |
train_openfold.py
|
ychnh/openfold
|
e61a00d063c0f2d939d24963929cea2b413d3e8e
|
[
"Apache-2.0"
] | null | null | null |
train_openfold.py
|
ychnh/openfold
|
e61a00d063c0f2d939d24963929cea2b413d3e8e
|
[
"Apache-2.0"
] | null | null | null |
import argparse
import logging
import os
#os.environ["CUDA_VISIBLE_DEVICES"] = "0"
#os.environ["MASTER_ADDR"]="10.119.81.14"
#os.environ["MASTER_PORT"]="42069"
#os.environ["NODE_RANK"]="0"
import random
import time
import numpy as np
import pytorch_lightning as pl
from pytorch_lightning.callbacks.lr_monitor import LearningRateMonitor
from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint
from pytorch_lightning.loggers import WandbLogger
from pytorch_lightning.plugins.training_type import DeepSpeedPlugin, DDPPlugin
from pytorch_lightning.plugins.environments import SLURMEnvironment
import torch
from openfold.config import model_config
from openfold.data.data_modules import (
OpenFoldDataModule,
DummyDataLoader,
)
from openfold.model.model import AlphaFold
from openfold.model.torchscript import script_preset_
from openfold.np import residue_constants
from openfold.utils.callbacks import (
EarlyStoppingVerbose,
)
from openfold.utils.exponential_moving_average import ExponentialMovingAverage
from openfold.utils.argparse import remove_arguments
from openfold.utils.loss import AlphaFoldLoss, lddt_ca, compute_drmsd
from openfold.utils.seed import seed_everything
from openfold.utils.superimposition import superimpose
from openfold.utils.tensor_utils import tensor_tree_map
from openfold.utils.validation_metrics import (
gdt_ts,
gdt_ha,
)
from scripts.zero_to_fp32 import (
get_fp32_state_dict_from_zero_checkpoint
)
from openfold.utils.logger import PerformanceLoggingCallback
class OpenFoldWrapper(pl.LightningModule):
def __init__(self, config):
super(OpenFoldWrapper, self).__init__()
self.config = config
self.model = AlphaFold(config)
self.loss = AlphaFoldLoss(config.loss)
self.ema = ExponentialMovingAverage(
model=self.model, decay=config.ema.decay
)
self.cached_weights = None
self.last_lr_step = 0
def forward(self, batch):
return self.model(batch)
def training_step(self, batch, batch_idx):
if(self.ema.device != batch["aatype"].device):
self.ema.to(batch["aatype"].device)
# Run the model
outputs = self(batch)
# Remove the recycling dimension
batch = tensor_tree_map(lambda t: t[..., -1], batch)
# Compute loss
loss, loss_breakdown = self.loss(
outputs, batch, _return_breakdown=True
)
# Log it
self.log(
"train/loss",
loss,
on_step=True, logger=True,
)
self.log(
"train/loss_epoch",
loss,
on_step=False, on_epoch=True, logger=True,
)
for loss_name, indiv_loss in loss_breakdown.items():
self.log(
f"train/{loss_name}",
indiv_loss,
on_step=True, logger=True,
)
with torch.no_grad():
other_metrics = self.compute_validation_metrics(batch, outputs)
for k,v in other_metrics.items():
self.log(f"train/{k}", v, on_step=False, on_epoch=True, logger=True)
return loss
def on_before_zero_grad(self, *args, **kwargs):
self.ema.update(self.model)
# def training_step_end(self, outputs):
# # Temporary measure to address DeepSpeed scheduler bug
# if(self.trainer.global_step != self.last_lr_step):
# self.lr_schedulers().step()
# self.last_lr_step = self.trainer.global_step
def validation_step(self, batch, batch_idx):
# At the start of validation, load the EMA weights
if(self.cached_weights is None):
self.cached_weights = self.model.state_dict()
self.model.load_state_dict(self.ema.state_dict()["params"])
# Run the model
outputs = self(batch)
batch = tensor_tree_map(lambda t: t[..., -1], batch)
# Compute loss and other metrics
batch["use_clamped_fape"] = 0.
loss, loss_breakdown = self.loss(
outputs, batch, _return_breakdown=True
)
self.log("val/loss", loss, on_step=False, on_epoch=True, logger=True)
for loss_name, indiv_loss in loss_breakdown.items():
self.log(
f"val/{loss_name}",
indiv_loss,
on_step=False, on_epoch=True, logger=True,
)
other_metrics = self.compute_validation_metrics(
batch, outputs, superimposition_metrics=True,
)
for k,v in other_metrics.items():
self.log(f"val/{k}", v, on_step=False, on_epoch=True, logger=True)
def validation_epoch_end(self, _):
# Restore the model weights to normal
self.model.load_state_dict(self.cached_weights)
self.cached_weights = None
def compute_validation_metrics(self,
batch,
outputs,
superimposition_metrics=False
):
metrics = {}
gt_coords = batch["all_atom_positions"]
pred_coords = outputs["final_atom_positions"]
all_atom_mask = batch["all_atom_mask"]
# This is super janky for superimposition. Fix later
gt_coords_masked = gt_coords * all_atom_mask[..., None]
pred_coords_masked = pred_coords * all_atom_mask[..., None]
ca_pos = residue_constants.atom_order["CA"]
gt_coords_masked_ca = gt_coords_masked[..., ca_pos, :]
pred_coords_masked_ca = pred_coords_masked[..., ca_pos, :]
all_atom_mask_ca = all_atom_mask[..., ca_pos]
lddt_ca_score = lddt_ca(
pred_coords,
gt_coords,
all_atom_mask,
eps=self.config.globals.eps,
per_residue=False,
)
metrics["lddt_ca"] = lddt_ca_score
drmsd_ca_score = compute_drmsd(
pred_coords_masked_ca,
gt_coords_masked_ca,
mask=all_atom_mask_ca,
)
metrics["drmsd_ca"] = drmsd_ca_score
if(superimposition_metrics):
superimposed_pred, _ = superimpose(
gt_coords_masked_ca, pred_coords_masked_ca
)
gdt_ts_score = gdt_ts(
superimposed_pred, gt_coords_masked_ca, all_atom_mask_ca
)
gdt_ha_score = gdt_ha(
superimposed_pred, gt_coords_masked_ca, all_atom_mask_ca
)
metrics["gdt_ts"] = gdt_ts_score
metrics["gdt_ta"] = gdt_ha_score
return metrics
def configure_optimizers(self,
learning_rate: float = 1e-3,
eps: float = 1e-5,
) -> torch.optim.Adam:
# Ignored as long as a DeepSpeed optimizer is configured
return torch.optim.Adam(
self.model.parameters(),
lr=learning_rate,
eps=eps
)
def on_load_checkpoint(self, checkpoint):
self.ema.load_state_dict(checkpoint["ema"])
def on_save_checkpoint(self, checkpoint):
checkpoint["ema"] = self.ema.state_dict()
def main(args):
if(args.seed is not None):
seed_everything(args.seed)
config = model_config(
"initial_training",
train=True,
low_prec=(args.precision == "16")
)
model_module = OpenFoldWrapper(config)
if(args.resume_from_ckpt and args.resume_model_weights_only):
sd = get_fp32_state_dict_from_zero_checkpoint(args.resume_from_ckpt)
sd = {k[len("module."):]:v for k,v in sd.items()}
model_module.load_state_dict(sd)
logging.info("Successfully loaded model weights...")
# TorchScript components of the model
if(args.script_modules):
script_preset_(model_module)
#data_module = DummyDataLoader("new_batch.pickle")
data_module = OpenFoldDataModule(
config=config.data,
batch_seed=args.seed,
**vars(args)
)
data_module.prepare_data()
data_module.setup()
callbacks = []
if(args.checkpoint_every_epoch):
mc = ModelCheckpoint(
every_n_epochs=1,
)
callbacks.append(mc)
if(args.early_stopping):
es = EarlyStoppingVerbose(
monitor="val/lddt_ca",
min_delta=args.min_delta,
patience=args.patience,
verbose=False,
mode="max",
check_finite=True,
strict=True,
)
callbacks.append(es)
if(args.log_performance):
global_batch_size = args.num_nodes * args.gpus
perf = PerformanceLoggingCallback(
log_file=os.path.join(args.output_dir, "performance_log.json"),
global_batch_size=global_batch_size,
)
callbacks.append(perf)
if(args.log_lr):
lr_monitor = LearningRateMonitor(logging_interval="step")
callbacks.append(lr_monitor)
loggers = []
if(args.wandb):
wdb_logger = WandbLogger(
name=args.experiment_name,
save_dir=args.output_dir,
id=args.wandb_id,
project=args.wandb_project,
**{"entity": args.wandb_entity}
)
loggers.append(wdb_logger)
if(args.deepspeed_config_path is not None):
strategy = DeepSpeedPlugin(
config=args.deepspeed_config_path,
)
if(args.wandb):
wdb_logger.experiment.save(args.deepspeed_config_path)
wdb_logger.experiment.save("openfold/config.py")
elif (args.gpus is not None and args.gpus > 1) or args.num_nodes > 1:
strategy = DDPPlugin(find_unused_parameters=False)
else:
strategy = None
trainer = pl.Trainer.from_argparse_args(
args,
default_root_dir=args.output_dir,
strategy=strategy,
callbacks=callbacks,
logger=loggers,
)
if(args.resume_model_weights_only):
ckpt_path = None
else:
ckpt_path = args.resume_from_ckpt
trainer.fit(
model_module,
datamodule=data_module,
ckpt_path=ckpt_path,
)
def bool_type(bool_str: str):
bool_str_lower = bool_str.lower()
if bool_str_lower in ('false', 'f', 'no', 'n', '0'):
return False
elif bool_str_lower in ('true', 't', 'yes', 'y', '1'):
return True
else:
raise ValueError(f'Cannot interpret {bool_str} as bool')
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"train_data_dir", type=str,
help="Directory containing training mmCIF files"
)
parser.add_argument(
"train_alignment_dir", type=str,
help="Directory containing precomputed training alignments"
)
parser.add_argument(
"template_mmcif_dir", type=str,
help="Directory containing mmCIF files to search for templates"
)
parser.add_argument(
"output_dir", type=str,
help='''Directory in which to output checkpoints, logs, etc. Ignored
if not on rank 0'''
)
parser.add_argument(
"max_template_date", type=str,
help='''Cutoff for all templates. In training mode, templates are also
filtered by the release date of the target'''
)
parser.add_argument(
"--distillation_data_dir", type=str, default=None,
help="Directory containing training PDB files"
)
parser.add_argument(
"--distillation_alignment_dir", type=str, default=None,
help="Directory containing precomputed distillation alignments"
)
parser.add_argument(
"--val_data_dir", type=str, default=None,
help="Directory containing validation mmCIF files"
)
parser.add_argument(
"--val_alignment_dir", type=str, default=None,
help="Directory containing precomputed validation alignments"
)
parser.add_argument(
"--kalign_binary_path", type=str, default='/usr/bin/kalign',
help="Path to the kalign binary"
)
parser.add_argument(
"--train_mapping_path", type=str, default=None,
help='''Optional path to a .json file containing a mapping from
consecutive numerical indices to sample names. Used to filter
the training set'''
)
parser.add_argument(
"--distillation_mapping_path", type=str, default=None,
help="""See --train_mapping_path"""
)
parser.add_argument(
"--obsolete_pdbs_file_path", type=str, default=None,
help="""Path to obsolete.dat file containing list of obsolete PDBs and
their replacements."""
)
parser.add_argument(
"--template_release_dates_cache_path", type=str, default=None,
help="""Output of scripts/generate_mmcif_cache.py run on template mmCIF
files."""
)
parser.add_argument(
"--use_small_bfd", type=bool_type, default=False,
help="Whether to use a reduced version of the BFD database"
)
parser.add_argument(
"--seed", type=int, default=None,
help="Random seed"
)
parser.add_argument(
"--deepspeed_config_path", type=str, default=None,
help="Path to DeepSpeed config. If not provided, DeepSpeed is disabled"
)
parser.add_argument(
"--checkpoint_every_epoch", action="store_true", default=False,
help="""Whether to checkpoint at the end of every training epoch"""
)
parser.add_argument(
"--early_stopping", type=bool_type, default=False,
help="Whether to stop training when validation loss fails to decrease"
)
parser.add_argument(
"--min_delta", type=float, default=0,
help="""The smallest decrease in validation loss that counts as an
improvement for the purposes of early stopping"""
)
parser.add_argument(
"--patience", type=int, default=3,
help="Early stopping patience"
)
parser.add_argument(
"--resume_from_ckpt", type=str, default=None,
help="Path to a model checkpoint from which to restore training state"
)
parser.add_argument(
"--resume_model_weights_only", type=bool_type, default=False,
help="Whether to load just model weights as opposed to training state"
)
parser.add_argument(
"--log_performance", type=bool_type, default=False,
help="Measure performance"
)
parser.add_argument(
"--wandb", action="store_true", default=False,
)
parser.add_argument(
"--experiment_name", type=str, default=None,
)
parser.add_argument(
"--wandb_id", type=str, default=None,
)
parser.add_argument(
"--wandb_project", type=str, default=None,
)
parser.add_argument(
"--wandb_entity", type=str, default=None,
)
parser.add_argument(
"--script_modules", type=bool_type, default=False,
help="Whether to TorchScript eligible components of them model"
)
parser.add_argument(
"--train_chain_data_cache_path", type=str, default=None,
)
parser.add_argument(
"--distillation_chain_data_cache_path", type=str, default=None,
)
parser.add_argument(
"--train_epoch_len", type=int, default=10000,
)
parser.add_argument(
"--_alignment_index_path", type=str, default=None,
)
parser.add_argument(
"--log_lr", action="store_true", default=False,
)
parser = pl.Trainer.add_argparse_args(parser)
# Disable the initial validation pass
parser.set_defaults(
num_sanity_val_steps=0,
)
# Remove some buggy/redundant arguments introduced by the Trainer
remove_arguments(
parser,
[
"--accelerator",
"--resume_from_checkpoint",
"--reload_dataloaders_every_epoch",
"--reload_dataloaders_every_n_epochs",
]
)
args = parser.parse_args()
if(args.seed is None and
((args.gpus is not None and args.gpus > 1) or
(args.num_nodes is not None and args.num_nodes > 1))):
raise ValueError("For distributed training, --seed must be specified")
# This re-applies the training-time filters at the beginning of every epoch
args.reload_dataloaders_every_n_epochs = 1
main(args)
| 32.361111
| 80
| 0.634703
|
4c0617adddcead89cc386810a08bd63042d23702
| 1,326
|
py
|
Python
|
tests/test_algebra_onnx_operators_opset.py
|
xiaowuhu/sklearn-onnx
|
e85674a67a0a043e19c2ffe181e5d31eca8ce40b
|
[
"Apache-2.0"
] | 323
|
2018-12-18T20:23:19.000Z
|
2022-03-25T09:47:31.000Z
|
tests/test_algebra_onnx_operators_opset.py
|
xiaowuhu/sklearn-onnx
|
e85674a67a0a043e19c2ffe181e5d31eca8ce40b
|
[
"Apache-2.0"
] | 408
|
2019-01-02T12:16:10.000Z
|
2022-03-21T14:01:28.000Z
|
tests/test_algebra_onnx_operators_opset.py
|
xiaowuhu/sklearn-onnx
|
e85674a67a0a043e19c2ffe181e5d31eca8ce40b
|
[
"Apache-2.0"
] | 70
|
2018-12-20T19:36:07.000Z
|
2022-03-14T06:41:36.000Z
|
# SPDX-License-Identifier: Apache-2.0
import unittest
import numpy as np
from numpy.testing import assert_almost_equal
import onnx
import onnxruntime as ort
from skl2onnx.algebra.onnx_ops import OnnxPad # noqa
class TestOnnxOperatorsOpset(unittest.TestCase):
@unittest.skipIf(onnx.defs.onnx_opset_version() < 10, "irrelevant")
def test_pad_opset_10(self):
pad = OnnxPad('X', output_names=['Y'],
mode='constant', value=1.5,
pads=[0, 1, 0, 1],
op_version=2)
X = np.array([[0, 1]], dtype=np.float32)
model_def = pad.to_onnx({'X': X}, target_opset=10)
onnx.checker.check_model(model_def)
def predict_with_onnxruntime(model_def, *inputs):
sess = ort.InferenceSession(model_def.SerializeToString())
names = [i.name for i in sess.get_inputs()]
dinputs = {name: input for name, input in zip(names, inputs)}
res = sess.run(None, dinputs)
names = [o.name for o in sess.get_outputs()]
return {name: output for name, output in zip(names, res)}
Y = predict_with_onnxruntime(model_def, X)
assert_almost_equal(
np.array([[1.5, 0., 1., 1.5]], dtype=np.float32), Y['Y'])
if __name__ == "__main__":
unittest.main()
| 33.15
| 73
| 0.616893
|
7729d7d759748d087090722c02fc25dc7a50022c
| 8,188
|
py
|
Python
|
objects/chessboard.py
|
UnopposedQuill/ChessFinals
|
3c6142a3379ff9429599ea589c417717af3bbac6
|
[
"MIT"
] | null | null | null |
objects/chessboard.py
|
UnopposedQuill/ChessFinals
|
3c6142a3379ff9429599ea589c417717af3bbac6
|
[
"MIT"
] | null | null | null |
objects/chessboard.py
|
UnopposedQuill/ChessFinals
|
3c6142a3379ff9429599ea589c417717af3bbac6
|
[
"MIT"
] | null | null | null |
from objects.pieces import *
def location_translator(row, column):
if column == 'a':
return 8 - row, 0
elif column == 'b':
return 8 - row, 1
elif column == 'c':
return 8 - row, 2
elif column == 'd':
return 8 - row, 3
elif column == 'e':
return 8 - row, 4
elif column == 'f':
return 8 - row, 5
elif column == 'g':
return 8 - row, 6
elif column == 'h':
return 8 - row, 7
def str_local_translator(row, column):
if row == 0:
return "A" + str(8 - column)
elif row == 1:
return "B" + str(8 - column)
elif row == 2:
return "C" + str(8 - column)
elif row == 3:
return "D" + str(8 - column)
elif row == 4:
return "E" + str(8 - column)
elif row == 5:
return "F" + str(8 - column)
elif row == 6:
return "G" + str(8 - column)
else:
return "H" + str(8 - column)
"""
------------------------------------------------------------------------------------------------------------------------
---------- CLASE TABLERO -----------------------------------------------------------------------------------------------
------------------------------------------------------------------------------------------------------------------------
"""
class Chessboard:
def __init__(self, filename=None):
# Valores iniciales para el algoritmo de MinMax
self.score = 0
self.piece_values = {King: 200, Queen: 10, Rook: 5, Knight: 3, Bishop: 3, Pawn: 1}
self.log = ""
# Piezas negras.
self.black_rook_left = Rook("b", 0, 0)
self.black_knight_left = Knight("b", 0, 1)
self.black_bishop_left = Bishop("b", 0, 2)
self.black_queen = Queen("b", 0, 3)
self.black_king = King("b", 0, 4)
self.black_bishop_right = Bishop("b", 0, 5)
self.black_knight_right = Knight("b", 0, 6)
self.black_rook_right = Rook("b", 0, 7)
# Piezas blancas.
self.white_rook_left = Rook("w", 7, 0)
self.white_knight_left = Knight("w", 7, 1)
self.white_bishop_left = Bishop("w", 7, 2)
self.white_queen = Queen("w", 7, 3)
self.white_king = King("w", 7, 4)
self.white_bishop_right = Bishop("w", 7, 5)
self.white_knight_right = Knight("w", 7, 6)
self.white_rook_right = Rook("w", 7, 7)
# Tablero logico del juego.
# Valida si la carga es de un tablero prestablecido o si es tablero completo,
if filename is None:
self.matrix = [
[self.black_rook_left, self.black_knight_left, self.black_bishop_left, self.black_queen,
self.black_king, self.black_bishop_right, self.black_knight_right, self.black_rook_right],
self.get_pawn_row("b"),
self.get_null_row(), self.get_null_row(), self.get_null_row(), self.get_null_row(),
self.get_pawn_row("w"),
[self.white_rook_left, self.white_knight_left, self.white_bishop_left, self.white_queen,
self.white_king, self.white_bishop_right, self.white_knight_right, self.white_rook_right]
]
else:
self.matrix = [
[None, None, None, None, None, None, None, None],
[None, None, None, None, None, None, None, None],
[None, None, None, None, None, None, None, None],
[None, None, None, None, None, None, None, None],
[None, None, None, None, None, None, None, None],
[None, None, None, None, None, None, None, None],
[None, None, None, None, None, None, None, None],
[None, None, None, None, None, None, None, None]
]
file = open(filename, 'r')
line = file.readline()
while line != "":
x, y = location_translator(int(line[3]), line[2])
print("Pieza: " + line[0] + line[1] + ", coordenadas: (" + str(x) + ", " + str(y) + ")")
if line[0] == 'B':
if line[1] == 'R':
self.matrix[x][y] = King("b", x, y)
elif line[1] == 'D':
self.matrix[x][y] = Queen("b", x, y)
elif line[1] == 'T':
self.matrix[x][y] = Rook("b", x, y)
elif line[1] == 'A':
self.matrix[x][y] = Bishop("b", x, y)
elif line[1] == 'C':
self.matrix[x][y] = Knight("b", x, y)
elif line[1] == 'P':
self.matrix[x][y] = Pawn("b", x, y)
else:
if line[1] == 'R':
self.matrix[x][y] = King("w", x, y)
elif line[1] == 'D':
self.matrix[x][y] = Queen("w", x, y)
elif line[1] == 'T':
self.matrix[x][y] = Rook("w", x, y)
elif line[1] == 'A':
self.matrix[x][y] = Bishop("w", x, y)
elif line[1] == 'C':
self.matrix[x][y] = Knight("w", x, y)
elif line[1] == 'P':
self.matrix[x][y] = Pawn("w", x, y)
# self.save_current_status(0)
line = file.readline()
file.close()
# Se establece que todas las piezas han sido movidas al menos una vez
for row in self.matrix:
for piece in row:
if isinstance(piece, Pawn) or isinstance(piece, King) or isinstance(piece, Rook):
piece.moved = True
# Función que retorna una fila de peones para la matriz de juego.
@staticmethod
def get_pawn_row(color):
return [Pawn("w", 6, i) for i in range(8)] if color == "w" else [Pawn("b", 1, i) for i in range(8)]
# Función que devuelve una fila vacía.
# noinspection PyUnusedLocal
@staticmethod
def get_null_row():
return [None for i in range(8)]
# Función que devuelve una fila de coordenadas vacías.
# noinspection PyUnusedLocal
@staticmethod
def get_empty_row_column():
return [[None for x in range(8)] for y in range(8)]
# Valida la promoción de un peón.
@staticmethod
def is_promotion(piece, y):
if piece.color == "w":
row, inc = 1, -1
else:
row, inc = 6, 1
return True if type(piece) == Pawn and piece.y == row and y == piece.y + inc else False
# Recupera el log
def get_log_file(self):
return self.log
# Función que se encarga de setear el movimiento de la pieza por el tablero.
def move_piece(self, piece, y, x, np=False):
promotion = self.is_promotion(piece, y)
prev_x, prev_y = piece.x, piece.y
piece.x, piece.y = x, y
piece.rect.x, piece.rect.y = x * 70, y * 70
self.matrix[prev_y][prev_x] = None
# Aquí se gana una pieza por la promoción de un peón.
# Por defecto es una reina.
# Se modifica el score para el min max ya que se perdió un peon pero se ganó una reina.
if promotion and not np:
self.matrix[y][x] = Queen(piece.color, y, x)
if piece.color == "w":
self.score -= 9
elif piece.color == "b":
self.score += 9
return self.matrix[y][x], piece
else:
self.matrix[y][x] = piece
piece.not_select()
if np == "CR":
# Enroque derecho: corto
self.matrix[y][x-1] = self.matrix[y][x+1]
self.matrix[y][x+1] = None
elif np == "CL":
# Enroque izquierdo: largo
self.matrix[y][x + 1] = self.matrix[y][0]
self.matrix[y][0] = None
# Recupera la pieza Rey.
def get_king(self, color):
if color == "b":
for row in self.matrix:
for piece in row:
if isinstance(piece, King) and piece.color == "b":
return piece
else:
for row in self.matrix:
for piece in row:
if isinstance(piece, King) and piece.color == "w":
return piece
raise ValueError('King not found')
# Imprime el tablero.
def save_current_status(self, moves, piece, cell, isIa):
chars = ["A", "B", "C", "D", "E", "F", "G", "H"]
numbers = ["8", "7", "6", "5", "4", "3", "2", "1"]
# Se valida el tipo de jugador, ya que IA tiene invertidos el (x, y).
if isIa:
prev_pos = str_local_translator(cell[1], cell[0])
else:
prev_pos = str_local_translator(cell[0], cell[1])
# Recuperación de movimientos totales.
current_status = "Movimientos totales: " + str(moves) + "\n\n"
if moves % 2 == 0:
current_status += "Turno de IA.\n\n"
else:
current_status += "Turno de humano.\n\n"
# Se agrega el dato de movimiento.
current_status += "Movimiento registrado:\n" + piece.color + piece.symbol + "-" + prev_pos + " => " + \
piece.color + piece.symbol + "-" + str_local_translator(piece.x, piece.y) + "\n"
# Ciclo de almacenamiento de tablero.
for i in range(9):
if i < 8:
for j in range(9):
if j < 8:
piece = self.matrix[i][j]
if piece is not None:
current_status += "\t" + piece.color + piece.symbol + "\t"
else:
current_status += "\t\t"
else:
current_status += "\t" + numbers[i] + "\n\n"
else:
current_status += "\n"
for char in chars:
current_status += "\t" + char + "\t"
self.log += current_status + "\n\n\n\n"
| 32.109804
| 120
| 0.581705
|
b737042cd022913233be6f52432ae382353dee4a
| 13,198
|
py
|
Python
|
models/pt_3dpose/detectron/roi_data/retinanet.py
|
DreaJulca/uxsense
|
538800f3e071660f4a8242c90b25f500b8667aa8
|
[
"MIT"
] | 27,213
|
2018-01-22T22:28:13.000Z
|
2022-03-31T15:37:11.000Z
|
models/pt_3dpose/detectron/roi_data/retinanet.py
|
DreaJulca/uxsense
|
538800f3e071660f4a8242c90b25f500b8667aa8
|
[
"MIT"
] | 992
|
2018-01-23T05:13:19.000Z
|
2022-03-30T09:45:05.000Z
|
models/pt_3dpose/detectron/roi_data/retinanet.py
|
DreaJulca/uxsense
|
538800f3e071660f4a8242c90b25f500b8667aa8
|
[
"MIT"
] | 6,227
|
2018-01-22T22:28:48.000Z
|
2022-03-31T12:54:07.000Z
|
# Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
"""Compute minibatch blobs for training a RetinaNet network."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
import logging
import detectron.utils.boxes as box_utils
import detectron.roi_data.data_utils as data_utils
from detectron.core.config import cfg
logger = logging.getLogger(__name__)
def get_retinanet_blob_names(is_training=True):
"""
Returns blob names in the order in which they are read by the data
loader.
N = number of images per minibatch
A = number of anchors = num_scales * num_aspect_ratios
(for example 9 used in RetinaNet paper)
H, W = spatial dimensions (different for each FPN level)
M = Out of all the anchors generated, depending on the positive/negative IoU
overlap thresholds, we will have M positive anchors. These are the anchors
that bounding box branch will regress on.
retnet_cls_labels -> labels for the cls branch for each FPN level
Shape: N x A x H x W
retnet_roi_bbox_targets -> targets for the bbox regression branch
Shape: M x 4
retnet_roi_fg_bbox_locs -> for the bbox regression, since we are only
interested in regressing on fg bboxes which are
M in number and the output prediction of the network
is of shape N x (A * 4) x H x W
(in case of non class-specific bbox), so we
store the locations of positive fg boxes in this
blob retnet_roi_fg_bbox_locs of shape M x 4 where
each row looks like: [img_id, anchor_id, x_loc, y_loc]
"""
# im_info: (height, width, image scale)
blob_names = ['im_info']
assert cfg.FPN.FPN_ON, "RetinaNet uses FPN for dense detection"
# Same format as RPN blobs, but one per FPN level
if is_training:
blob_names += ['retnet_fg_num', 'retnet_bg_num']
for lvl in range(cfg.FPN.RPN_MIN_LEVEL, cfg.FPN.RPN_MAX_LEVEL + 1):
suffix = 'fpn{}'.format(lvl)
blob_names += [
'retnet_cls_labels_' + suffix,
'retnet_roi_bbox_targets_' + suffix,
'retnet_roi_fg_bbox_locs_' + suffix,
]
return blob_names
def add_retinanet_blobs(blobs, im_scales, roidb, image_width, image_height):
"""Add RetinaNet blobs."""
# RetinaNet is applied to many feature levels, as in the FPN paper
k_max, k_min = cfg.FPN.RPN_MAX_LEVEL, cfg.FPN.RPN_MIN_LEVEL
scales_per_octave = cfg.RETINANET.SCALES_PER_OCTAVE
num_aspect_ratios = len(cfg.RETINANET.ASPECT_RATIOS)
aspect_ratios = cfg.RETINANET.ASPECT_RATIOS
anchor_scale = cfg.RETINANET.ANCHOR_SCALE
# get anchors from all levels for all scales/aspect ratios
foas = []
for lvl in range(k_min, k_max + 1):
stride = 2. ** lvl
for octave in range(scales_per_octave):
octave_scale = 2 ** (octave / float(scales_per_octave))
for idx in range(num_aspect_ratios):
anchor_sizes = (stride * octave_scale * anchor_scale, )
anchor_aspect_ratios = (aspect_ratios[idx], )
foa = data_utils.get_field_of_anchors(
stride, anchor_sizes, anchor_aspect_ratios, octave, idx)
foas.append(foa)
all_anchors = np.concatenate([f.field_of_anchors for f in foas])
blobs['retnet_fg_num'], blobs['retnet_bg_num'] = 0.0, 0.0
for im_i, entry in enumerate(roidb):
scale = im_scales[im_i]
im_height = np.round(entry['height'] * scale)
im_width = np.round(entry['width'] * scale)
gt_inds = np.where(
(entry['gt_classes'] > 0) & (entry['is_crowd'] == 0))[0]
assert len(gt_inds) > 0, \
'Empty ground truth empty for image is not allowed. Please check.'
gt_rois = entry['boxes'][gt_inds, :] * scale
gt_classes = entry['gt_classes'][gt_inds]
im_info = np.array([[im_height, im_width, scale]], dtype=np.float32)
blobs['im_info'].append(im_info)
retinanet_blobs, fg_num, bg_num = _get_retinanet_blobs(
foas, all_anchors, gt_rois, gt_classes, image_width, image_height)
for i, foa in enumerate(foas):
for k, v in retinanet_blobs[i].items():
# the way it stacks is:
# [[anchors for image1] + [anchors for images 2]]
level = int(np.log2(foa.stride))
key = '{}_fpn{}'.format(k, level)
if k == 'retnet_roi_fg_bbox_locs':
v[:, 0] = im_i
# loc_stride: 80 * 4 if cls_specific else 4
loc_stride = 4 # 4 coordinate corresponding to bbox prediction
if cfg.RETINANET.CLASS_SPECIFIC_BBOX:
loc_stride *= (cfg.MODEL.NUM_CLASSES - 1)
anchor_ind = foa.octave * num_aspect_ratios + foa.aspect
# v[:, 1] is the class label [range 0-80] if we do
# class-specfic bbox otherwise it is 0. In case of class
# specific, based on the label, the location of current
# anchor is class_label * 4 and then we take into account
# the anchor_ind if the anchors
v[:, 1] *= 4
v[:, 1] += loc_stride * anchor_ind
blobs[key].append(v)
blobs['retnet_fg_num'] += fg_num
blobs['retnet_bg_num'] += bg_num
blobs['retnet_fg_num'] = blobs['retnet_fg_num'].astype(np.float32)
blobs['retnet_bg_num'] = blobs['retnet_bg_num'].astype(np.float32)
N = len(roidb)
for k, v in blobs.items():
if isinstance(v, list) and len(v) > 0:
# compute number of anchors
A = int(len(v) / N)
# for the cls branch labels [per fpn level],
# we have blobs['retnet_cls_labels_fpn{}'] as a list until this step
# and length of this list is N x A where
# N = num_images, A = num_anchors for example, N = 2, A = 9
# Each element of the list has the shape 1 x 1 x H x W where H, W are
# spatial dimension of curret fpn lvl. Let a{i} denote the element
# corresponding to anchor i [9 anchors total] in the list.
# The elements in the list are in order [[a0, ..., a9], [a0, ..., a9]]
# however the network will make predictions like 2 x (9 * 80) x H x W
# so we first concatenate the elements of each image to a numpy array
# and then concatenate the two images to get the 2 x 9 x H x W
if k.find('retnet_cls_labels') >= 0:
tmp = []
# concat anchors within an image
for i in range(0, len(v), A):
tmp.append(np.concatenate(v[i: i + A], axis=1))
# concat images
blobs[k] = np.concatenate(tmp, axis=0)
else:
# for the bbox branch elements [per FPN level],
# we have the targets and the fg boxes locations
# in the shape: M x 4 where M is the number of fg locations in a
# given image at the current FPN level. For the given level,
# the bbox predictions will be. The elements in the list are in
# order [[a0, ..., a9], [a0, ..., a9]]
# Concatenate them to form M x 4
blobs[k] = np.concatenate(v, axis=0)
return True
def _get_retinanet_blobs(
foas, all_anchors, gt_boxes, gt_classes, im_width, im_height):
total_anchors = all_anchors.shape[0]
logger.debug('Getting mad blobs: im_height {} im_width: {}'.format(
im_height, im_width))
inds_inside = np.arange(all_anchors.shape[0])
anchors = all_anchors
num_inside = len(inds_inside)
logger.debug('total_anchors: {}'.format(total_anchors))
logger.debug('inds_inside: {}'.format(num_inside))
logger.debug('anchors.shape: {}'.format(anchors.shape))
# Compute anchor labels:
# label=1 is positive, 0 is negative, -1 is don't care (ignore)
labels = np.empty((num_inside, ), dtype=np.float32)
labels.fill(-1)
if len(gt_boxes) > 0:
# Compute overlaps between the anchors and the gt boxes overlaps
anchor_by_gt_overlap = box_utils.bbox_overlaps(anchors, gt_boxes)
# Map from anchor to gt box that has highest overlap
anchor_to_gt_argmax = anchor_by_gt_overlap.argmax(axis=1)
# For each anchor, amount of overlap with most overlapping gt box
anchor_to_gt_max = anchor_by_gt_overlap[
np.arange(num_inside), anchor_to_gt_argmax]
# Map from gt box to an anchor that has highest overlap
gt_to_anchor_argmax = anchor_by_gt_overlap.argmax(axis=0)
# For each gt box, amount of overlap with most overlapping anchor
gt_to_anchor_max = anchor_by_gt_overlap[
gt_to_anchor_argmax, np.arange(anchor_by_gt_overlap.shape[1])]
# Find all anchors that share the max overlap amount
# (this includes many ties)
anchors_with_max_overlap = np.where(
anchor_by_gt_overlap == gt_to_anchor_max)[0]
# Fg label: for each gt use anchors with highest overlap
# (including ties)
gt_inds = anchor_to_gt_argmax[anchors_with_max_overlap]
labels[anchors_with_max_overlap] = gt_classes[gt_inds]
# Fg label: above threshold IOU
inds = anchor_to_gt_max >= cfg.RETINANET.POSITIVE_OVERLAP
gt_inds = anchor_to_gt_argmax[inds]
labels[inds] = gt_classes[gt_inds]
fg_inds = np.where(labels >= 1)[0]
bg_inds = np.where(anchor_to_gt_max < cfg.RETINANET.NEGATIVE_OVERLAP)[0]
labels[bg_inds] = 0
num_fg, num_bg = len(fg_inds), len(bg_inds)
bbox_targets = np.zeros((num_inside, 4), dtype=np.float32)
bbox_targets[fg_inds, :] = data_utils.compute_targets(
anchors[fg_inds, :], gt_boxes[anchor_to_gt_argmax[fg_inds], :])
# Map up to original set of anchors
labels = data_utils.unmap(labels, total_anchors, inds_inside, fill=-1)
bbox_targets = data_utils.unmap(bbox_targets, total_anchors, inds_inside, fill=0)
# Split the generated labels, etc. into labels per each field of anchors
blobs_out = []
start_idx = 0
for foa in foas:
H = foa.field_size
W = foa.field_size
end_idx = start_idx + H * W
_labels = labels[start_idx:end_idx]
_bbox_targets = bbox_targets[start_idx:end_idx, :]
start_idx = end_idx
# labels output with shape (1, height, width)
_labels = _labels.reshape((1, 1, H, W))
# bbox_targets output with shape (1, 4 * A, height, width)
_bbox_targets = _bbox_targets.reshape((1, H, W, 4)).transpose(0, 3, 1, 2)
stride = foa.stride
w = int(im_width / stride)
h = int(im_height / stride)
# data for select_smooth_l1 loss
num_classes = cfg.MODEL.NUM_CLASSES - 1
inds_4d = np.where(_labels > 0)
M = len(inds_4d)
_roi_bbox_targets = np.zeros((0, 4))
_roi_fg_bbox_locs = np.zeros((0, 4))
if M > 0:
im_inds, y, x = inds_4d[0], inds_4d[2], inds_4d[3]
_roi_bbox_targets = np.zeros((len(im_inds), 4))
_roi_fg_bbox_locs = np.zeros((len(im_inds), 4))
lbls = _labels[im_inds, :, y, x]
for i, lbl in enumerate(lbls):
l = lbl[0] - 1
if not cfg.RETINANET.CLASS_SPECIFIC_BBOX:
l = 0
assert l >= 0 and l < num_classes, 'label out of the range'
_roi_bbox_targets[i, :] = _bbox_targets[:, :, y[i], x[i]]
_roi_fg_bbox_locs[i, :] = np.array([[0, l, y[i], x[i]]])
blobs_out.append(
dict(
retnet_cls_labels=_labels[:, :, 0:h, 0:w].astype(np.int32),
retnet_roi_bbox_targets=_roi_bbox_targets.astype(np.float32),
retnet_roi_fg_bbox_locs=_roi_fg_bbox_locs.astype(np.float32),
))
out_num_fg = np.array([num_fg + 1.0], dtype=np.float32)
out_num_bg = (
np.array([num_bg + 1.0]) * (cfg.MODEL.NUM_CLASSES - 1) +
out_num_fg * (cfg.MODEL.NUM_CLASSES - 2))
return blobs_out, out_num_fg, out_num_bg
| 45.66782
| 85
| 0.610623
|
1cc4bb6a8b9a54582aeca66608312b28d3663e00
| 1,586
|
py
|
Python
|
scripts/from_ast_to_zip.py
|
masonproffitt/ServiceX_Code_Generator_FuncADL_uproot
|
74aed86affed999dfe0338a9c85c8ee67271fc99
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/from_ast_to_zip.py
|
masonproffitt/ServiceX_Code_Generator_FuncADL_uproot
|
74aed86affed999dfe0338a9c85c8ee67271fc99
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/from_ast_to_zip.py
|
masonproffitt/ServiceX_Code_Generator_FuncADL_uproot
|
74aed86affed999dfe0338a9c85c8ee67271fc99
|
[
"BSD-3-Clause"
] | null | null | null |
# A script that will take as input a text ast (on the command line) and
# write out a zip file.
import sys
from servicex.code_generator_service.ast_translator import AstTranslator
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-a", "--ast",
help="The text AST to be converted into zip file. STDIN if this is left off") # noqa: E501
parser.add_argument("-z", "--zipfile",
help="The name of the zip file to write out. STDOUT if this is left off")
parser.add_argument("--uproot",
help="Generate code to transform file using uproot",
action='store_true', default=False, required=False)
parser.add_argument("--xaod",
help="Generate code to transform file using EventLoop",
action='store_true', default=False, required=False)
args = parser.parse_args()
if args.xaod == args.uproot:
print("Please specify a target for generated code: Uproot or XAod")
sys.exit(-1)
if args.xaod:
target_backend = 'xAOD'
else:
target_backend = 'uproot'
# Get the input AST
ast_text = args.ast if args.ast is not None else sys.stdin.read().strip()
# Output file
translator = AstTranslator(target_backend)
zip_data = translator.translate_text_ast_to_zip(ast_text)
if args.zipfile is None:
sys.stdout.buffer.write(zip_data)
else:
with open(args.zipfile, 'wb') as w:
w.write(zip_data)
| 37.761905
| 115
| 0.627995
|
f3110ebd64fdf497e88d9d4a1212fae956b060c6
| 11,213
|
py
|
Python
|
docx/text/parfmt.py
|
tanglw2020/python-docx-v2.0
|
32a41342695d8cd496852d85a8d0d2341adbf23d
|
[
"MIT"
] | null | null | null |
docx/text/parfmt.py
|
tanglw2020/python-docx-v2.0
|
32a41342695d8cd496852d85a8d0d2341adbf23d
|
[
"MIT"
] | null | null | null |
docx/text/parfmt.py
|
tanglw2020/python-docx-v2.0
|
32a41342695d8cd496852d85a8d0d2341adbf23d
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
"""
Paragraph-related proxy types.
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals
)
from ..enum.text import WD_LINE_SPACING
from ..shared import ElementProxy, Emu, lazyproperty, Length, Pt, Twips
from .tabstops import TabStops
class ParagraphFormat(ElementProxy):
"""
Provides access to paragraph formatting such as justification,
indentation, line spacing, space before and after, and widow/orphan
control.
"""
__slots__ = ('_tab_stops',)
@property
def first_char_dropcap(self):
"""
first_char_dropcap value specifying for
the first char of the paragraph.
"""
pPr = self._element.pPr
if pPr is None:
return None
return pPr.first_char_dropcap
@property
def first_char_dropcap_lines(self):
"""
first_char_dropcap value specifying for
the first char of the paragraph.
"""
pPr = self._element.pPr
if pPr is None:
return None
return pPr.first_char_dropcap_lines
@property
def alignment(self):
"""
A member of the :ref:`WdParagraphAlignment` enumeration specifying
the justification setting for this paragraph. A value of |None|
indicates paragraph alignment is inherited from the style hierarchy.
"""
pPr = self._element.pPr
if pPr is None:
return None
return pPr.jc_val
@alignment.setter
def alignment(self, value):
pPr = self._element.get_or_add_pPr()
pPr.jc_val = value
@property
def first_line_indent(self):
"""
|Length| value specifying the relative difference in indentation for
the first line of the paragraph. A positive value causes the first
line to be indented. A negative value produces a hanging indent.
|None| indicates first line indentation is inherited from the style
hierarchy.
"""
pPr = self._element.pPr
if pPr is None:
return None
return pPr.first_line_indent
@first_line_indent.setter
def first_line_indent(self, value):
pPr = self._element.get_or_add_pPr()
pPr.first_line_indent = value
@property
def keep_together(self):
"""
|True| if the paragraph should be kept "in one piece" and not broken
across a page boundary when the document is rendered. |None|
indicates its effective value is inherited from the style hierarchy.
"""
pPr = self._element.pPr
if pPr is None:
return None
return pPr.keepLines_val
@keep_together.setter
def keep_together(self, value):
self._element.get_or_add_pPr().keepLines_val = value
@property
def keep_with_next(self):
"""
|True| if the paragraph should be kept on the same page as the
subsequent paragraph when the document is rendered. For example, this
property could be used to keep a section heading on the same page as
its first paragraph. |None| indicates its effective value is
inherited from the style hierarchy.
"""
pPr = self._element.pPr
if pPr is None:
return None
return pPr.keepNext_val
@keep_with_next.setter
def keep_with_next(self, value):
self._element.get_or_add_pPr().keepNext_val = value
@property
def left_indent(self):
"""
|Length| value specifying the space between the left margin and the
left side of the paragraph. |None| indicates the left indent value is
inherited from the style hierarchy. Use an |Inches| value object as
a convenient way to apply indentation in units of inches.
"""
pPr = self._element.pPr
if pPr is None:
return None
return pPr.ind_left
@left_indent.setter
def left_indent(self, value):
pPr = self._element.get_or_add_pPr()
pPr.ind_left = value
@property
def line_spacing(self):
"""
|float| or |Length| value specifying the space between baselines in
successive lines of the paragraph. A value of |None| indicates line
spacing is inherited from the style hierarchy. A float value, e.g.
``2.0`` or ``1.75``, indicates spacing is applied in multiples of
line heights. A |Length| value such as ``Pt(12)`` indicates spacing
is a fixed height. The |Pt| value class is a convenient way to apply
line spacing in units of points. Assigning |None| resets line spacing
to inherit from the style hierarchy.
"""
pPr = self._element.pPr
if pPr is None:
return None
return self._line_spacing(pPr.spacing_line, pPr.spacing_lineRule)
@line_spacing.setter
def line_spacing(self, value):
pPr = self._element.get_or_add_pPr()
if value is None:
pPr.spacing_line = None
pPr.spacing_lineRule = None
elif isinstance(value, Length):
pPr.spacing_line = value
if pPr.spacing_lineRule != WD_LINE_SPACING.AT_LEAST:
pPr.spacing_lineRule = WD_LINE_SPACING.EXACTLY
else:
pPr.spacing_line = Emu(value * Twips(240))
pPr.spacing_lineRule = WD_LINE_SPACING.MULTIPLE
@property
def line_spacing_rule(self):
"""
A member of the :ref:`WdLineSpacing` enumeration indicating how the
value of :attr:`line_spacing` should be interpreted. Assigning any of
the :ref:`WdLineSpacing` members :attr:`SINGLE`, :attr:`DOUBLE`, or
:attr:`ONE_POINT_FIVE` will cause the value of :attr:`line_spacing`
to be updated to produce the corresponding line spacing.
"""
pPr = self._element.pPr
if pPr is None:
return None
return self._line_spacing_rule(
pPr.spacing_line, pPr.spacing_lineRule
)
@line_spacing_rule.setter
def line_spacing_rule(self, value):
pPr = self._element.get_or_add_pPr()
if value == WD_LINE_SPACING.SINGLE:
pPr.spacing_line = Twips(240)
pPr.spacing_lineRule = WD_LINE_SPACING.MULTIPLE
elif value == WD_LINE_SPACING.ONE_POINT_FIVE:
pPr.spacing_line = Twips(360)
pPr.spacing_lineRule = WD_LINE_SPACING.MULTIPLE
elif value == WD_LINE_SPACING.DOUBLE:
pPr.spacing_line = Twips(480)
pPr.spacing_lineRule = WD_LINE_SPACING.MULTIPLE
else:
pPr.spacing_lineRule = value
@property
def page_break_before(self):
"""
|True| if the paragraph should appear at the top of the page
following the prior paragraph. |None| indicates its effective value
is inherited from the style hierarchy.
"""
pPr = self._element.pPr
if pPr is None:
return None
return pPr.pageBreakBefore_val
@page_break_before.setter
def page_break_before(self, value):
self._element.get_or_add_pPr().pageBreakBefore_val = value
@property
def right_indent(self):
"""
|Length| value specifying the space between the right margin and the
right side of the paragraph. |None| indicates the right indent value
is inherited from the style hierarchy. Use a |Cm| value object as
a convenient way to apply indentation in units of centimeters.
"""
pPr = self._element.pPr
if pPr is None:
return None
return pPr.ind_right
@right_indent.setter
def right_indent(self, value):
pPr = self._element.get_or_add_pPr()
pPr.ind_right = value
@property
def space_after(self):
"""
|Length| value specifying the spacing to appear between this
paragraph and the subsequent paragraph. |None| indicates this value
is inherited from the style hierarchy. |Length| objects provide
convenience properties, such as :attr:`~.Length.pt` and
:attr:`~.Length.inches`, that allow easy conversion to various length
units.
"""
pPr = self._element.pPr
if pPr is None:
return None
return pPr.spacing_after
@space_after.setter
def space_after(self, value):
self._element.get_or_add_pPr().spacing_after = value
@property
def space_before(self):
"""
|Length| value specifying the spacing to appear between this
paragraph and the prior paragraph. |None| indicates this value is
inherited from the style hierarchy. |Length| objects provide
convenience properties, such as :attr:`~.Length.pt` and
:attr:`~.Length.cm`, that allow easy conversion to various length
units.
"""
pPr = self._element.pPr
if pPr is None:
return None
return pPr.spacing_before
@space_before.setter
def space_before(self, value):
self._element.get_or_add_pPr().spacing_before = value
@lazyproperty
def tab_stops(self):
"""
|TabStops| object providing access to the tab stops defined for this
paragraph format.
"""
pPr = self._element.get_or_add_pPr()
return TabStops(pPr)
@property
def widow_control(self):
"""
|True| if the first and last lines in the paragraph remain on the
same page as the rest of the paragraph when Word repaginates the
document. |None| indicates its effective value is inherited from the
style hierarchy.
"""
pPr = self._element.pPr
if pPr is None:
return None
return pPr.widowControl_val
@widow_control.setter
def widow_control(self, value):
self._element.get_or_add_pPr().widowControl_val = value
@staticmethod
def _line_spacing(spacing_line, spacing_lineRule):
"""
Return the line spacing value calculated from the combination of
*spacing_line* and *spacing_lineRule*. Returns a |float| number of
lines when *spacing_lineRule* is ``WD_LINE_SPACING.MULTIPLE``,
otherwise a |Length| object of absolute line height is returned.
Returns |None| when *spacing_line* is |None|.
"""
if spacing_line is None:
return None
if spacing_lineRule == WD_LINE_SPACING.MULTIPLE:
return spacing_line / Pt(12)
return spacing_line
@staticmethod
def _line_spacing_rule(line, lineRule):
"""
Return the line spacing rule value calculated from the combination of
*line* and *lineRule*. Returns special members of the
:ref:`WdLineSpacing` enumeration when line spacing is single, double,
or 1.5 lines.
"""
if lineRule == WD_LINE_SPACING.MULTIPLE:
if line == Twips(240):
return WD_LINE_SPACING.SINGLE
if line == Twips(360):
return WD_LINE_SPACING.ONE_POINT_FIVE
if line == Twips(480):
return WD_LINE_SPACING.DOUBLE
return lineRule
| 34.082067
| 77
| 0.635869
|
557c829c2eb999ede338e32cfdfa1840dc680489
| 299
|
py
|
Python
|
PYTHON/pythonDesafios/desafio009.py
|
Santos1000/Curso-Python
|
549223a1633f6f619c87554dd8078cf7841bb1df
|
[
"MIT"
] | null | null | null |
PYTHON/pythonDesafios/desafio009.py
|
Santos1000/Curso-Python
|
549223a1633f6f619c87554dd8078cf7841bb1df
|
[
"MIT"
] | null | null | null |
PYTHON/pythonDesafios/desafio009.py
|
Santos1000/Curso-Python
|
549223a1633f6f619c87554dd8078cf7841bb1df
|
[
"MIT"
] | null | null | null |
z = int(input('Digite o numero desejado'))
a = z * 1
b = z * 2
c = z * 3
d = z * 4
e = z * 5
f = z * 6
g = z * 7
h = z * 8
i = z * 9
j = z * 10
print(f'Tabuada de {z}:\n{z}x1={a}')
print(f'{z}x2={b}\n{z}x3={c}\n{z}x4={d}\n{z}x5={e}\n{z}x6={f}\n{z}x7={g}\n{z}x8={h}\n{z}x9={i}\n{z}x10={j}')
| 13
| 108
| 0.444816
|
90db4097dc5b245981c57edf3b08787feb06cd8c
| 4,196
|
py
|
Python
|
app/mod_user/controller.py
|
meraki/investigo-spark-bot
|
34665143724522d463873d704dd8d77861748358
|
[
"Apache-2.0"
] | 2
|
2019-01-18T04:16:26.000Z
|
2020-03-12T22:13:17.000Z
|
web/app/mod_user/controller.py
|
rafael-carvalho/cmx-investigo
|
0bd3b9905ecd9ddd4398b12a7cb8553e79e35648
|
[
"Apache-2.0"
] | null | null | null |
web/app/mod_user/controller.py
|
rafael-carvalho/cmx-investigo
|
0bd3b9905ecd9ddd4398b12a7cb8553e79e35648
|
[
"Apache-2.0"
] | 2
|
2018-04-18T08:35:59.000Z
|
2020-03-12T22:13:19.000Z
|
"""
Copyright 2017 Rafael Carvalho
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import traceback
import json
from flask import Blueprint, render_template, redirect, request, url_for, Response
from app.database import db_session
from app.mod_user.models import RegisteredUser
mod_user = Blueprint('mod_user', __name__, url_prefix='/user')
@mod_user.route('/add', methods=['GET', 'POST'])
def add():
if request.method == 'GET':
output = render_template("user/add.html")
else:
output = {
'error': None,
'error_message': None,
'redirect_url': None,
}
try:
if request.json:
form_data = request.json
else:
form_data = request.form
name = form_data["user_name"]
phone = form_data["user_phone"]
mac_address = form_data["user_mac_address"]
user = RegisteredUser(name, mac_address, phone)
db_session.add(user)
db_session.commit()
output['redirect_url'] = url_for('mod_user.show')
except Exception as e:
output['error'] = True
output['error_message'] = str(e)
db_session.rollback()
finally:
output = Response(json.dumps(output), mimetype='application/json')
return output
@mod_user.route('/')
@mod_user.route('/show')
def show():
try:
users = db_session.query(RegisteredUser).all()
return render_template("user/list.html", object=users)
except:
traceback.print_exc()
return "No response"
@mod_user.route('/details/<id>')
def details(id):
user = db_session.query(RegisteredUser).filter(RegisteredUser.id == id).first()
return render_template("user/details.html", object=user)
@mod_user.route('/edit/<id>', methods=['GET', 'POST'])
def edit(id):
user = db_session.query(RegisteredUser).filter(RegisteredUser.id == id).first()
if not user:
output = redirect(url_for('error', message='User with id = {} does not exist'.format(id)))
else:
if request.method == 'GET':
output = render_template("user/edit.html", object=user)
else:
output = {
'error': None,
'error_message': None,
'redirect_url': None,
}
try:
if request.json:
form_data = request.json
else:
form_data = request.form
name = form_data["user_name"]
phone = form_data["user_phone"]
mac_address = form_data["user_mac_address"]
user.name = name
user.phone = phone
user.mac_address = mac_address
db_session.commit()
output['redirect_url'] = url_for('mod_user.show')
except Exception as e:
output['error'] = True
output['error_message'] = str(e)
db_session.rollback()
finally:
output = Response(json.dumps(output), mimetype='application/json')
return output
@mod_user.route('/delete/<id>', methods=['GET', 'POST'])
def delete(id):
user = db_session.query(RegisteredUser).filter(RegisteredUser.id == id).first()
if not user:
output = redirect(url_for('error', message='User with id = {} does not exist'.format(id)))
else:
if request.method == 'GET':
output = render_template("user/delete.html", object=user)
else:
db_session.delete(user)
output = redirect(url_for('mod_user.show'))
return output
| 33.568
| 98
| 0.594137
|
5293ee6df55d2109225229d40e421eb66c82bfe1
| 962
|
py
|
Python
|
crf/densecrf.py
|
sahan-liyanaarachchi/bcrf-detectron
|
7cbdd8a77e54f09cca1addd66c7359e17501b9e4
|
[
"Apache-2.0"
] | 4
|
2020-09-11T05:39:22.000Z
|
2022-02-03T20:50:51.000Z
|
crf/densecrf.py
|
sahan-liyanaarachchi/bcrf-detectron
|
7cbdd8a77e54f09cca1addd66c7359e17501b9e4
|
[
"Apache-2.0"
] | 4
|
2021-06-08T22:17:39.000Z
|
2022-03-12T00:46:18.000Z
|
crf/densecrf.py
|
sahan-liyanaarachchi/bcrf-detectron
|
7cbdd8a77e54f09cca1addd66c7359e17501b9e4
|
[
"Apache-2.0"
] | 3
|
2020-09-05T11:31:59.000Z
|
2020-10-03T00:11:48.000Z
|
from crf.pairwise import SpatialPairwise, BilateralPairwise
from crf.params import DenseCRFParams
from crf.util import softmax
class DenseCRF(object):
def __init__(self, image, params: DenseCRFParams):
alpha, beta, gamma = params.alpha, params.beta, params.gamma
self.sp = SpatialPairwise(image, gamma, gamma)
self.bp = BilateralPairwise(image, alpha, alpha, beta, beta, beta)
self.spatial_weight = params.spatial_ker_weight
self.bilateral_weight = params.bilateral_ker_weight
def infer(self, unary_logits, num_iterations=5):
q = softmax(unary_logits)
for _ in range(num_iterations):
tmp1 = unary_logits
output = self.sp.apply(q)
tmp1 = tmp1 + self.spatial_weight * output # Do NOT use the += operator here!
output = self.bp.apply(q)
tmp1 = tmp1 + self.bilateral_weight * output
q = softmax(tmp1)
return q
| 30.0625
| 90
| 0.658004
|
5fe8d5406683aa67e68fc74b8386ced7615df746
| 1,478
|
py
|
Python
|
2021/CVE-2021-37678/vultarget/src/CreateExampleModel.py
|
hjyuan/reapoc
|
ef515e56c44c2590ff8601582bf6c08e076e7083
|
[
"Apache-2.0"
] | 421
|
2021-12-07T08:46:40.000Z
|
2022-03-31T12:42:16.000Z
|
2021/CVE-2021-37678/vultarget/src/CreateExampleModel.py
|
LinShiG0ng/reapoc
|
99ebfcca818928eca79c0a2dcc2e466bc8f3e734
|
[
"Apache-2.0"
] | 1
|
2022-03-14T06:07:39.000Z
|
2022-03-14T15:52:22.000Z
|
2021/CVE-2021-37678/vultarget/src/CreateExampleModel.py
|
LinShiG0ng/reapoc
|
99ebfcca818928eca79c0a2dcc2e466bc8f3e734
|
[
"Apache-2.0"
] | 144
|
2021-12-07T11:06:14.000Z
|
2022-03-31T07:41:35.000Z
|
import tensorflow as tf
print('Se carga un set de datos que Google da como ejemplo para clasificación de imágenes\n\n')
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
print('\n\nSe crea un modelo de ML con una red neuronal para dicha clasificación de imágenes\n\n')
model = tf.keras.models.Sequential([
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.Dense(10)
])
predictions = model(x_train[:1]).numpy()
tf.nn.softmax(predictions).numpy()
loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
loss_fn(y_train[:1], predictions).numpy()
model.compile(optimizer='adam',
loss=loss_fn,
metrics=['accuracy'])
print('\n\nEntrenamos el modelo con el set de datos que teníamos...\n\n')
model.fit(x_train, y_train, epochs=5)
model.evaluate(x_test, y_test, verbose=2)
probability_model = tf.keras.Sequential([
model,
tf.keras.layers.Softmax()
])
probability_model(x_test[:5])
yaml_model = model.to_yaml()
print('\n\nEl modelo entrenado luce así en formato yaml...\n\n')
print(yaml_model)
json_model = model.to_json()
print('\n\nY luce así en formato json...\n\n')
print(json_model)
print('\n\nGuardamos el modelo en formato yaml...\n\n')
with open('exampleModel.yaml', 'w') as yaml_file:
yaml_file.write(yaml_model)
| 24.633333
| 98
| 0.716509
|
df1417a1fc1c426e2f58c0ff47cdea818f05ec2f
| 3,606
|
py
|
Python
|
scripts/catch_non_abstract_annotation.py
|
ticosax/django-stubs
|
2f7fac2eaf87fe1e50d635ab14bcbe6c475dabc8
|
[
"MIT"
] | 641
|
2015-01-01T11:27:21.000Z
|
2022-03-22T12:46:01.000Z
|
scripts/catch_non_abstract_annotation.py
|
ticosax/django-stubs
|
2f7fac2eaf87fe1e50d635ab14bcbe6c475dabc8
|
[
"MIT"
] | 177
|
2015-01-01T13:00:21.000Z
|
2022-03-15T08:23:28.000Z
|
scripts/catch_non_abstract_annotation.py
|
ticosax/django-stubs
|
2f7fac2eaf87fe1e50d635ab14bcbe6c475dabc8
|
[
"MIT"
] | 118
|
2015-01-14T03:03:08.000Z
|
2022-03-10T03:37:40.000Z
|
import os
from typing import Optional
import libcst
from libcst import Annotation, BaseExpression, FunctionDef, Name, Subscript
from libcst.metadata import SyntacticPositionProvider
BASE_DIR = 'django-stubs'
fpath = os.path.join(BASE_DIR, 'core', 'checks', 'model_checks.pyi')
with open(fpath, 'r') as f:
contents = f.read()
tree = libcst.parse_module(contents)
class TypeAnnotationsAnalyzer(libcst.CSTVisitor):
METADATA_DEPENDENCIES = (SyntacticPositionProvider,)
def __init__(self, fpath: str):
super().__init__()
self.fpath = fpath
def get_node_location(self, node: FunctionDef) -> str:
start_line = self.get_metadata(SyntacticPositionProvider, node).start.line
return f'{self.fpath}:{start_line}'
def show_error_for_node(self, node: FunctionDef, error_message: str):
print(self.get_node_location(node), error_message)
def check_subscripted_annotation(self, annotation: BaseExpression) -> Optional[str]:
if isinstance(annotation, Subscript):
if isinstance(annotation.value, Name):
error_message = self.check_concrete_class_usage(annotation.value)
if error_message:
return error_message
if annotation.value.value == 'Union':
for slice_param in annotation.slice:
if isinstance(slice_param.slice.value, Name):
error_message = self.check_concrete_class_usage(annotation.value)
if error_message:
return error_message
def check_concrete_class_usage(self, name_node: Name) -> Optional[str]:
if name_node.value == 'List':
return (f'Concrete class {name_node.value!r} used for an iterable annotation. '
f'Use abstract collection (Iterable, Collection, Sequence) instead')
def visit_FunctionDef(self, node: FunctionDef) -> Optional[bool]:
params_node = node.params
for param_node in [*params_node.params, *params_node.default_params]:
param_name = param_node.name.value
annotation_node = param_node.annotation # type: Annotation
if annotation_node is not None:
annotation = annotation_node.annotation
if annotation.value == 'None':
self.show_error_for_node(node, f'"None" type annotation used for parameter {param_name!r}')
continue
error_message = self.check_subscripted_annotation(annotation)
if error_message is not None:
self.show_error_for_node(node, error_message)
continue
if node.returns is not None:
return_annotation = node.returns.annotation
if isinstance(return_annotation, Subscript) and return_annotation.value.value == 'Union':
self.show_error_for_node(node, 'Union is return type annotation')
return False
for dirpath, dirnames, filenames in os.walk(BASE_DIR):
for filename in filenames:
fpath = os.path.join(dirpath, filename)
# skip all other checks for now, low priority
if not fpath.startswith(('django-stubs/db', 'django-stubs/views', 'django-stubs/apps',
'django-stubs/http', 'django-stubs/contrib/postgres')):
continue
with open(fpath, 'r') as f:
contents = f.read()
tree = libcst.MetadataWrapper(libcst.parse_module(contents))
analyzer = TypeAnnotationsAnalyzer(fpath)
tree.visit(analyzer)
| 40.516854
| 111
| 0.646977
|
4192e8055a0ba0ac2f86cebae4acc5923106a058
| 1,534
|
py
|
Python
|
0025.Reverse Nodes in k-Group/solution.py
|
zhlinh/leetcode
|
6dfa0a4df9ec07b2c746a13c8257780880ea04af
|
[
"Apache-2.0"
] | null | null | null |
0025.Reverse Nodes in k-Group/solution.py
|
zhlinh/leetcode
|
6dfa0a4df9ec07b2c746a13c8257780880ea04af
|
[
"Apache-2.0"
] | null | null | null |
0025.Reverse Nodes in k-Group/solution.py
|
zhlinh/leetcode
|
6dfa0a4df9ec07b2c746a13c8257780880ea04af
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
*****************************************
Author: zhlinh
Email: zhlinhng@gmail.com
Version: 0.0.1
Created Time: 2016-01-26
Last_modify: 2016-01-26
******************************************
'''
'''
Given a linked list, reverse the nodes of a linked list k at a time and return
its modified list.
If the number of nodes is not a multiple of k then left-out nodes in the end
should remain as it is.
You may not alter the values in the nodes, only nodes itself may be changed.
Only constant memory is allowed.
For example,
Given this linked list: 1->2->3->4->5
For k = 2, you should return: 2->1->4->3->5
For k = 3, you should return: 3->2->1->4->5
'''
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def reverseKGroup(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
dummy = lt = ListNode(0)
dummy.next = head
while lt.next:
tmp = lt.next
p = []
for i in range(k):
if not tmp:
return dummy.next
p.append(tmp)
tmp = tmp.next
lt.next = p[-1]
p[0].next = p[-1].next
j = k - 1
while j > 0:
p[j].next = p[j-1]
j -= 1
lt = p[0]
return dummy.next
| 24.349206
| 78
| 0.495437
|
3c9f39bf3f93787908d46f3d70458ec9d85a462d
| 5,987
|
py
|
Python
|
sdk/python/pulumi_aws/ec2/get_customer_gateway.py
|
sibuthomasmathew/pulumi-aws
|
6351f2182eb6f693d4e09e4136c385adfa0ab674
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/ec2/get_customer_gateway.py
|
sibuthomasmathew/pulumi-aws
|
6351f2182eb6f693d4e09e4136c385adfa0ab674
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/ec2/get_customer_gateway.py
|
sibuthomasmathew/pulumi-aws
|
6351f2182eb6f693d4e09e4136c385adfa0ab674
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = [
'GetCustomerGatewayResult',
'AwaitableGetCustomerGatewayResult',
'get_customer_gateway',
]
@pulumi.output_type
class GetCustomerGatewayResult:
"""
A collection of values returned by getCustomerGateway.
"""
def __init__(__self__, arn=None, bgp_asn=None, device_name=None, filters=None, id=None, ip_address=None, tags=None, type=None):
if arn and not isinstance(arn, str):
raise TypeError("Expected argument 'arn' to be a str")
pulumi.set(__self__, "arn", arn)
if bgp_asn and not isinstance(bgp_asn, int):
raise TypeError("Expected argument 'bgp_asn' to be a int")
pulumi.set(__self__, "bgp_asn", bgp_asn)
if device_name and not isinstance(device_name, str):
raise TypeError("Expected argument 'device_name' to be a str")
pulumi.set(__self__, "device_name", device_name)
if filters and not isinstance(filters, list):
raise TypeError("Expected argument 'filters' to be a list")
pulumi.set(__self__, "filters", filters)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if ip_address and not isinstance(ip_address, str):
raise TypeError("Expected argument 'ip_address' to be a str")
pulumi.set(__self__, "ip_address", ip_address)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def arn(self) -> str:
"""
The ARN of the customer gateway.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter(name="bgpAsn")
def bgp_asn(self) -> int:
"""
(Optional) The gateway's Border Gateway Protocol (BGP) Autonomous System Number (ASN).
"""
return pulumi.get(self, "bgp_asn")
@property
@pulumi.getter(name="deviceName")
def device_name(self) -> str:
"""
(Optional) A name for the customer gateway device.
"""
return pulumi.get(self, "device_name")
@property
@pulumi.getter
def filters(self) -> Optional[Sequence['outputs.GetCustomerGatewayFilterResult']]:
return pulumi.get(self, "filters")
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
@property
@pulumi.getter(name="ipAddress")
def ip_address(self) -> str:
"""
(Optional) The IP address of the gateway's Internet-routable external interface.
"""
return pulumi.get(self, "ip_address")
@property
@pulumi.getter
def tags(self) -> Mapping[str, str]:
"""
Map of key-value pairs assigned to the gateway.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
(Optional) The type of customer gateway. The only type AWS supports at this time is "ipsec.1".
"""
return pulumi.get(self, "type")
class AwaitableGetCustomerGatewayResult(GetCustomerGatewayResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetCustomerGatewayResult(
arn=self.arn,
bgp_asn=self.bgp_asn,
device_name=self.device_name,
filters=self.filters,
id=self.id,
ip_address=self.ip_address,
tags=self.tags,
type=self.type)
def get_customer_gateway(filters: Optional[Sequence[pulumi.InputType['GetCustomerGatewayFilterArgs']]] = None,
id: Optional[str] = None,
tags: Optional[Mapping[str, str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetCustomerGatewayResult:
"""
Get an existing AWS Customer Gateway.
## Example Usage
```python
import pulumi
import pulumi_aws as aws
foo = aws.ec2.get_customer_gateway(filters=[aws.ec2.GetCustomerGatewayFilterArgs(
name="tag:Name",
values=["foo-prod"],
)])
main = aws.ec2.VpnGateway("main",
vpc_id=aws_vpc["main"]["id"],
amazon_side_asn="7224")
transit = aws.ec2.VpnConnection("transit",
vpn_gateway_id=main.id,
customer_gateway_id=foo.id,
type=foo.type,
static_routes_only=False)
```
:param Sequence[pulumi.InputType['GetCustomerGatewayFilterArgs']] filters: One or more [name-value pairs][dcg-filters] to filter by.
:param str id: The ID of the gateway.
:param Mapping[str, str] tags: Map of key-value pairs assigned to the gateway.
"""
__args__ = dict()
__args__['filters'] = filters
__args__['id'] = id
__args__['tags'] = tags
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('aws:ec2/getCustomerGateway:getCustomerGateway', __args__, opts=opts, typ=GetCustomerGatewayResult).value
return AwaitableGetCustomerGatewayResult(
arn=__ret__.arn,
bgp_asn=__ret__.bgp_asn,
device_name=__ret__.device_name,
filters=__ret__.filters,
id=__ret__.id,
ip_address=__ret__.ip_address,
tags=__ret__.tags,
type=__ret__.type)
| 34.017045
| 141
| 0.632871
|
52583431818e8fe0c2df65bdb321e0e9befcd4a8
| 4,971
|
py
|
Python
|
test/torchaudio_unittest/transforms/torchscript_consistency_impl.py
|
vivekkumar7089/audio
|
0ea6d10d3cd5cd66b0831e218ac18f6b7d8cab3c
|
[
"BSD-2-Clause"
] | null | null | null |
test/torchaudio_unittest/transforms/torchscript_consistency_impl.py
|
vivekkumar7089/audio
|
0ea6d10d3cd5cd66b0831e218ac18f6b7d8cab3c
|
[
"BSD-2-Clause"
] | null | null | null |
test/torchaudio_unittest/transforms/torchscript_consistency_impl.py
|
vivekkumar7089/audio
|
0ea6d10d3cd5cd66b0831e218ac18f6b7d8cab3c
|
[
"BSD-2-Clause"
] | null | null | null |
"""Test suites for jit-ability and its numerical compatibility"""
import torch
import torchaudio.transforms as T
from parameterized import parameterized
from torchaudio_unittest import common_utils
from torchaudio_unittest.common_utils import (
skipIfRocm,
TempDirMixin,
TestBaseMixin,
)
class Transforms(TempDirMixin, TestBaseMixin):
"""Implements test for Transforms that are performed for different devices"""
def _assert_consistency(self, transform, tensor):
tensor = tensor.to(device=self.device, dtype=self.dtype)
transform = transform.to(device=self.device, dtype=self.dtype)
path = self.get_temp_path('transform.zip')
torch.jit.script(transform).save(path)
ts_transform = torch.jit.load(path)
output = transform(tensor)
ts_output = ts_transform(tensor)
self.assertEqual(ts_output, output)
def _assert_consistency_complex(self, transform, tensor, test_pseudo_complex=False):
assert tensor.is_complex()
tensor = tensor.to(device=self.device, dtype=self.complex_dtype)
transform = transform.to(device=self.device, dtype=self.dtype)
path = self.get_temp_path('transform.zip')
torch.jit.script(transform).save(path)
ts_transform = torch.jit.load(path)
if test_pseudo_complex:
tensor = torch.view_as_real(tensor)
output = transform(tensor)
ts_output = ts_transform(tensor)
self.assertEqual(ts_output, output)
def test_Spectrogram(self):
tensor = torch.rand((1, 1000))
self._assert_consistency(T.Spectrogram(), tensor)
def test_Spectrogram_return_complex(self):
tensor = torch.rand((1, 1000))
self._assert_consistency(T.Spectrogram(power=None, return_complex=True), tensor)
@skipIfRocm
def test_GriffinLim(self):
tensor = torch.rand((1, 201, 6))
self._assert_consistency(T.GriffinLim(length=1000, rand_init=False), tensor)
def test_AmplitudeToDB(self):
spec = torch.rand((6, 201))
self._assert_consistency(T.AmplitudeToDB(), spec)
def test_MelScale_invalid(self):
with self.assertRaises(ValueError):
torch.jit.script(T.MelScale())
def test_MelScale(self):
spec_f = torch.rand((1, 201, 6))
self._assert_consistency(T.MelScale(n_stft=201), spec_f)
def test_MelSpectrogram(self):
tensor = torch.rand((1, 1000))
self._assert_consistency(T.MelSpectrogram(), tensor)
def test_MFCC(self):
tensor = torch.rand((1, 1000))
self._assert_consistency(T.MFCC(), tensor)
def test_Resample(self):
sr1, sr2 = 16000, 8000
tensor = common_utils.get_whitenoise(sample_rate=sr1)
self._assert_consistency(T.Resample(float(sr1), float(sr2)), tensor)
def test_ComplexNorm(self):
tensor = torch.rand((1, 2, 201, 2))
self._assert_consistency(T.ComplexNorm(), tensor)
def test_MuLawEncoding(self):
tensor = common_utils.get_whitenoise()
self._assert_consistency(T.MuLawEncoding(), tensor)
def test_MuLawDecoding(self):
tensor = torch.rand((1, 10))
self._assert_consistency(T.MuLawDecoding(), tensor)
def test_Fade(self):
waveform = common_utils.get_whitenoise()
fade_in_len = 3000
fade_out_len = 3000
self._assert_consistency(T.Fade(fade_in_len, fade_out_len), waveform)
def test_FrequencyMasking(self):
tensor = torch.rand((10, 2, 50, 10, 2))
self._assert_consistency(T.FrequencyMasking(freq_mask_param=60, iid_masks=False), tensor)
def test_TimeMasking(self):
tensor = torch.rand((10, 2, 50, 10, 2))
self._assert_consistency(T.TimeMasking(time_mask_param=30, iid_masks=False), tensor)
def test_Vol(self):
waveform = common_utils.get_whitenoise()
self._assert_consistency(T.Vol(1.1), waveform)
def test_SlidingWindowCmn(self):
tensor = torch.rand((1000, 10))
self._assert_consistency(T.SlidingWindowCmn(), tensor)
def test_Vad(self):
filepath = common_utils.get_asset_path("vad-go-mono-32000.wav")
waveform, sample_rate = common_utils.load_wav(filepath)
self._assert_consistency(T.Vad(sample_rate=sample_rate), waveform)
def test_SpectralCentroid(self):
sample_rate = 44100
waveform = common_utils.get_whitenoise(sample_rate=sample_rate)
self._assert_consistency(T.SpectralCentroid(sample_rate=sample_rate), waveform)
@parameterized.expand([(True, ), (False, )])
def test_TimeStretch(self, test_pseudo_complex):
n_freq = 400
hop_length = 512
fixed_rate = 1.3
tensor = torch.view_as_complex(torch.rand((10, 2, n_freq, 10, 2)))
self._assert_consistency_complex(
T.TimeStretch(n_freq=n_freq, hop_length=hop_length, fixed_rate=fixed_rate),
tensor,
test_pseudo_complex
)
| 36.021739
| 97
| 0.681754
|
ab0c7a64256709e87956cd81f210d1f962d8c26a
| 1,213
|
py
|
Python
|
old-katas/roman-numbers/day-6.py
|
Alex-Diez/python-tdd-katas
|
a176f16cfd103e618e539a57cac0748fba52221c
|
[
"MIT"
] | null | null | null |
old-katas/roman-numbers/day-6.py
|
Alex-Diez/python-tdd-katas
|
a176f16cfd103e618e539a57cac0748fba52221c
|
[
"MIT"
] | null | null | null |
old-katas/roman-numbers/day-6.py
|
Alex-Diez/python-tdd-katas
|
a176f16cfd103e618e539a57cac0748fba52221c
|
[
"MIT"
] | null | null | null |
# -*- codeing: utf-8 -*-
class Converter(object):
def __init__(self):
self.factors = {10: "X", 9: "IX", 5: "V", 4: "IV", 1: "I"}
def convert(self, n):
if n < 1:
return ""
arabic = sorted(list(filter(lambda e: e <= n, self.factors)))[-1]
roman = self.factors.get(arabic)
return roman + self.convert(n - arabic)
import unittest
class RomanNumberTest(unittest.TestCase):
def setUp(self):
self.converter = Converter()
def test_converts_0(self):
self.assertEqual("", self.converter.convert(0))
def test_converts_1(self):
self.assertEqual("I", self.converter.convert(1))
def test_converts_5(self):
self.assertEqual("V", self.converter.convert(5))
def test_converts_2(self):
self.assertEqual("II", self.converter.convert(2))
def test_converts_4(self):
self.assertEqual("IV", self.converter.convert(4))
def test_converts_10(self):
self.assertEqual("X", self.converter.convert(10))
def test_converts_9(self):
self.assertEqual("IX", self.converter.convert(9))
def test_converts_29(self):
self.assertEqual("XXIX", self.converter.convert(29))
| 26.955556
| 73
| 0.624073
|
bb9075df8a02e9830a9f7016cdd9044eba0ab7d4
| 3,273
|
py
|
Python
|
docs/conf.py
|
MaxRamgraber/skbel
|
7d43fc495fc6aac2b4a833ed25c48619e8a15b62
|
[
"BSD-3-Clause"
] | 7
|
2021-05-24T16:01:36.000Z
|
2021-12-22T16:10:43.000Z
|
docs/conf.py
|
MaxRamgraber/skbel
|
7d43fc495fc6aac2b4a833ed25c48619e8a15b62
|
[
"BSD-3-Clause"
] | null | null | null |
docs/conf.py
|
MaxRamgraber/skbel
|
7d43fc495fc6aac2b4a833ed25c48619e8a15b62
|
[
"BSD-3-Clause"
] | 1
|
2021-05-27T21:47:49.000Z
|
2021-05-27T21:47:49.000Z
|
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
# sys.path.insert(2, os.path.abspath('../skbel'))
# sys.path.insert(1, os.path.abspath('../examples'))
from datetime import datetime
sys.path.insert(0, os.path.abspath('../'))
# -- Project information -----------------------------------------------------
project = 'skbel'
copyright = f"2019 - {datetime.now().year} Robin Thibaut"
author = 'Robin Thibaut'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
# extensions = ["sphinx.ext.autodoc",
# "sphinx_rtd_theme",
# ]
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx_rtd_theme",
"sphinx.ext.intersphinx",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode",
# "sphinx.ext.napoleon",
"sphinx.ext.autosummary",
"sphinx.ext.extlinks",
# "numpydoc",
"nbsphinx",
"matplotlib.sphinxext.plot_directive",
"IPython.sphinxext.ipython_directive",
"IPython.sphinxext.ipython_console_highlighting",
"sphinx.ext.inheritance_diagram",
# "m2r2",
]
autodoc_default_options = {'members': True,
'undoc-members': True,
'private-members': True,
'special-members': '__init__, __call__',
'inherited-members': False,
'show-inheritance': False}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
source_suffix = ['.rst', '.md']
add_function_parentheses = False
pygments_style = "sphinx"
html_domain_indices = False
html_use_index = False
html_copy_source = True
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
# html_theme = 'alabaster'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# cd = "cd docs"
# mc = "make clean"
# mh = "make html"
# auto = "sphinx-apidoc -f -o source/ ../skbel/"
#
# os.system(cd)
# os.system(mc)
# os.system(mh)
# os.system(auto)
| 32.088235
| 79
| 0.637336
|
435d16d35f22b614383f7ae95dcdd6cce7c3945d
| 22,294
|
py
|
Python
|
python/ccxt/bitfinex2.py
|
Bincentive/ccxt
|
5a1c6ccd0945657d71d4f5107e2e71480b9f1b5d
|
[
"MIT"
] | null | null | null |
python/ccxt/bitfinex2.py
|
Bincentive/ccxt
|
5a1c6ccd0945657d71d4f5107e2e71480b9f1b5d
|
[
"MIT"
] | null | null | null |
python/ccxt/bitfinex2.py
|
Bincentive/ccxt
|
5a1c6ccd0945657d71d4f5107e2e71480b9f1b5d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.bitfinex import bitfinex
import hashlib
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import NotSupported
class bitfinex2(bitfinex):
def describe(self):
return self.deep_extend(super(bitfinex2, self).describe(), {
'id': 'bitfinex2',
'name': 'Bitfinex',
'countries': ['VG'],
'version': 'v2',
'certified': False,
# new metainfo interface
'has': {
'CORS': False,
'createLimitOrder': False,
'createMarketOrder': False,
'createOrder': False,
'deposit': False,
'editOrder': False,
'fetchDepositAddress': False,
'fetchClosedOrders': False,
'fetchFundingFees': False,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': False,
'fetchOrder': True,
'fetchTickers': True,
'fetchTradingFee': False,
'fetchTradingFees': False,
'fetchTransactions': False,
'withdraw': True,
},
'timeframes': {
'1m': '1m',
'5m': '5m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'3h': '3h',
'6h': '6h',
'12h': '12h',
'1d': '1D',
'1w': '7D',
'2w': '14D',
'1M': '1M',
},
'rateLimit': 1500,
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766244-e328a50c-5ed2-11e7-947b-041416579bb3.jpg',
'api': {
'v1': 'https://api.bitfinex.com',
'public': 'https://api-pub.bitfinex.com',
'private': 'https://api.bitfinex.com',
},
'www': 'https://www.bitfinex.com',
'doc': [
'https://docs.bitfinex.com/v2/docs/',
'https://github.com/bitfinexcom/bitfinex-api-node',
],
'fees': 'https://www.bitfinex.com/fees',
},
'api': {
'v1': {
'get': [
'symbols',
'symbols_details',
],
},
'public': {
'get': [
'conf/pub:map:currency:label',
'platform/status',
'tickers',
'ticker/{symbol}',
'trades/{symbol}/hist',
'book/{symbol}/{precision}',
'book/{symbol}/P0',
'book/{symbol}/P1',
'book/{symbol}/P2',
'book/{symbol}/P3',
'book/{symbol}/R0',
'stats1/{key}:{size}:{symbol}:{side}/{section}',
'stats1/{key}:{size}:{symbol}/{section}',
'stats1/{key}:{size}:{symbol}:long/last',
'stats1/{key}:{size}:{symbol}:long/hist',
'stats1/{key}:{size}:{symbol}:short/last',
'stats1/{key}:{size}:{symbol}:short/hist',
'candles/trade:{timeframe}:{symbol}/{section}',
'candles/trade:{timeframe}:{symbol}/last',
'candles/trade:{timeframe}:{symbol}/hist',
],
'post': [
'calc/trade/avg',
'calc/fx',
],
},
'private': {
'post': [
'auth/r/wallets',
'auth/r/orders/{symbol}',
'auth/r/orders/{symbol}/new',
'auth/r/orders/{symbol}/hist',
'auth/r/order/{symbol}:{id}/trades',
'auth/w/order/submit',
'auth/r/trades/hist',
'auth/r/trades/{symbol}/hist',
'auth/r/positions',
'auth/r/positions/hist',
'auth/r/positions/audit',
'auth/r/funding/offers/{symbol}',
'auth/r/funding/offers/{symbol}/hist',
'auth/r/funding/loans/{symbol}',
'auth/r/funding/loans/{symbol}/hist',
'auth/r/funding/credits/{symbol}',
'auth/r/funding/credits/{symbol}/hist',
'auth/r/funding/trades/{symbol}/hist',
'auth/r/info/margin/{key}',
'auth/r/info/funding/{key}',
'auth/r/ledgers/hist',
'auth/r/movements/hist',
'auth/r/movements/{currency}/hist',
'auth/r/stats/perf:{timeframe}/hist',
'auth/r/alerts',
'auth/w/alert/set',
'auth/w/alert/{type}:{symbol}:{price}/del',
'auth/calc/order/avail',
'auth/r/ledgers/{symbol}/hist',
'auth/r/settings',
'auth/w/settings/set',
'auth/w/settings/del',
'auth/r/info/user',
],
},
},
'fees': {
'trading': {
'maker': 0.1 / 100,
'taker': 0.2 / 100,
},
'funding': {
'withdraw': {
'BTC': 0.0004,
'BCH': 0.0001,
'ETH': 0.00135,
'EOS': 0.0,
'LTC': 0.001,
'OMG': 0.15097,
'IOT': 0.0,
'NEO': 0.0,
'ETC': 0.01,
'XRP': 0.02,
'ETP': 0.01,
'ZEC': 0.001,
'BTG': 0.0,
'DASH': 0.01,
'XMR': 0.0001,
'QTM': 0.01,
'EDO': 0.23687,
'DAT': 9.8858,
'AVT': 1.1251,
'SAN': 0.35977,
'USDT': 5.0,
'SPK': 16.971,
'BAT': 1.1209,
'GNT': 2.8789,
'SNT': 9.0848,
'QASH': 1.726,
'YYW': 7.9464,
},
},
},
'options': {
'precision': 'R0', # P0, P1, P2, P3, P4, R0
'orderTypes': {
'MARKET': None,
'EXCHANGE MARKET': 'market',
'LIMIT': None,
'EXCHANGE LIMIT': 'limit',
'STOP': None,
'EXCHANGE STOP': 'stopOrLoss',
'TRAILING STOP': None,
'EXCHANGE TRAILING STOP': None,
'FOK': None,
'EXCHANGE FOK': 'limit FOK',
'STOP LIMIT': None,
'EXCHANGE STOP LIMIT': 'limit stop',
'IOC': None,
'EXCHANGE IOC': 'limit ioc',
},
'fiat': {
'USD': 'USD',
'EUR': 'EUR',
'JPY': 'JPY',
'GBP': 'GBP',
},
},
})
def is_fiat(self, code):
return(code in self.options['fiat'])
def get_currency_id(self, code):
return 'f' + code
def fetch_markets(self, params={}):
response = self.v1GetSymbolsDetails(params)
result = []
for i in range(0, len(response)):
market = response[i]
id = self.safe_string(market, 'pair')
id = id.upper()
baseId = None
quoteId = None
if id.find(':') >= 0:
parts = id.split(':')
baseId = parts[0]
quoteId = parts[1]
else:
baseId = id[0:3]
quoteId = id[3:6]
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
id = 't' + id
baseId = self.get_currency_id(baseId)
quoteId = self.get_currency_id(quoteId)
precision = {
'price': self.safe_integer(market, 'price_precision'),
'amount': self.safe_integer(market, 'price_precision'),
}
limits = {
'amount': {
'min': self.safe_float(market, 'minimum_order_size'),
'max': self.safe_float(market, 'maximum_order_size'),
},
'price': {
'min': math.pow(10, -precision['price']),
'max': math.pow(10, precision['price']),
},
}
limits['cost'] = {
'min': limits['amount']['min'] * limits['price']['min'],
'max': None,
}
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': True,
'precision': precision,
'limits': limits,
'info': market,
'swap': False,
'spot': False,
'futures': False,
})
return result
def fetch_balance(self, params={}):
# self api call does not return the 'used' amount - use the v1 version instead(which also returns zero balances)
self.load_markets()
response = self.privatePostAuthRWallets(params)
balanceType = self.safe_string(params, 'type', 'exchange')
result = {'info': response}
for b in range(0, len(response)):
balance = response[b]
accountType = balance[0]
currency = balance[1]
total = balance[2]
available = balance[4]
if accountType == balanceType:
if currency[0] == 't':
currency = currency[1:]
code = self.safe_currency_code(currency)
account = self.account()
# do not fill in zeroes and missing values in the parser
# rewrite and unify the following to use the unified parseBalance
account['total'] = total
if not available:
if available == 0:
account['free'] = 0
account['used'] = total
else:
account['free'] = total
else:
account['free'] = available
account['used'] = account['total'] - account['free']
result[code] = account
return self.parse_balance(result)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
precision = self.safe_value(self.options, 'precision', 'R0')
request = {
'symbol': self.market_id(symbol),
'precision': precision,
}
if limit is not None:
request['len'] = limit # 25 or 100
fullRequest = self.extend(request, params)
orderbook = self.publicGetBookSymbolPrecision(fullRequest)
timestamp = self.milliseconds()
result = {
'bids': [],
'asks': [],
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'nonce': None,
}
priceIndex = 1 if (fullRequest['precision'] == 'R0') else 0
for i in range(0, len(orderbook)):
order = orderbook[i]
price = order[priceIndex]
amount = abs(order[2])
side = 'bids' if (order[2] > 0) else 'asks'
result[side].append([price, amount])
result['bids'] = self.sort_by(result['bids'], 0, True)
result['asks'] = self.sort_by(result['asks'], 0)
return result
def parse_ticker(self, ticker, market=None):
timestamp = self.milliseconds()
symbol = None
if market is not None:
symbol = market['symbol']
length = len(ticker)
last = ticker[length - 4]
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': ticker[length - 2],
'low': ticker[length - 1],
'bid': ticker[length - 10],
'bidVolume': None,
'ask': ticker[length - 8],
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': ticker[length - 6],
'percentage': ticker[length - 5] * 100,
'average': None,
'baseVolume': ticker[length - 3],
'quoteVolume': None,
'info': ticker,
}
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
request = {}
if symbols is not None:
ids = self.market_ids(symbols)
request['symbols'] = ','.join(ids)
else:
request['symbols'] = 'ALL'
tickers = self.publicGetTickers(self.extend(request, params))
result = {}
for i in range(0, len(tickers)):
ticker = tickers[i]
id = ticker[0]
if id in self.markets_by_id:
market = self.markets_by_id[id]
symbol = market['symbol']
result[symbol] = self.parse_ticker(ticker, market)
return result
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
ticker = self.publicGetTickerSymbol(self.extend(request, params))
return self.parse_ticker(ticker, market)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# [
# ID,
# MTS, # timestamp
# AMOUNT,
# PRICE
# ]
#
# fetchMyTrades(private)
#
# [
# ID,
# PAIR,
# MTS_CREATE,
# ORDER_ID,
# EXEC_AMOUNT,
# EXEC_PRICE,
# ORDER_TYPE,
# ORDER_PRICE,
# MAKER,
# FEE,
# FEE_CURRENCY,
# ...
# ]
#
tradeLength = len(trade)
isPrivate = (tradeLength > 5)
id = str(trade[0])
amountIndex = 4 if isPrivate else 2
amount = trade[amountIndex]
cost = None
priceIndex = 5 if isPrivate else 3
price = trade[priceIndex]
side = None
orderId = None
takerOrMaker = None
type = None
fee = None
symbol = None
timestampIndex = 2 if isPrivate else 1
timestamp = trade[timestampIndex]
if isPrivate:
marketId = trade[1]
if marketId is not None:
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
else:
symbol = marketId
orderId = str(trade[3])
takerOrMaker = 'maker' if (trade[8] == 1) else 'taker'
feeCost = trade[9]
feeCurrency = self.safe_currency_code(trade[10])
if feeCost is not None:
fee = {
'cost': abs(feeCost),
'currency': feeCurrency,
}
orderType = trade[6]
type = self.safe_string(self.options['orderTypes'], orderType)
if symbol is None:
if market is not None:
symbol = market['symbol']
if amount is not None:
side = 'sell' if (amount < 0) else 'buy'
amount = abs(amount)
if cost is None:
if price is not None:
cost = amount * price
return {
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'side': side,
'type': type,
'takerOrMaker': takerOrMaker,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
'info': trade,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
sort = '-1'
request = {
'symbol': market['id'],
}
if since is not None:
request['start'] = since
sort = '1'
if limit is not None:
request['limit'] = limit # default 120, max 5000
request['sort'] = sort
response = self.publicGetTradesSymbolHist(self.extend(request, params))
#
# [
# [
# ID,
# MTS, # timestamp
# AMOUNT,
# PRICE
# ]
# ]
#
trades = self.sort_by(response, 1)
return self.parse_trades(trades, market, None, limit)
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=100, params={}):
self.load_markets()
market = self.market(symbol)
if limit is None:
limit = 100 # default 100, max 5000
if since is None:
since = self.milliseconds() - self.parse_timeframe(timeframe) * limit * 1000
request = {
'symbol': market['id'],
'timeframe': self.timeframes[timeframe],
'sort': 1,
'start': since,
'limit': limit,
}
response = self.publicGetCandlesTradeTimeframeSymbolHist(self.extend(request, params))
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def create_order(self, symbol, type, side, amount, price=None, params={}):
raise NotSupported(self.id + ' createOrder not implemented yet')
def cancel_order(self, id, symbol=None, params={}):
raise NotSupported(self.id + ' cancelOrder not implemented yet')
def fetch_order(self, id, symbol=None, params={}):
raise NotSupported(self.id + ' fetchOrder not implemented yet')
def fetch_deposit_address(self, currency, params={}):
raise NotSupported(self.id + ' fetchDepositAddress() not implemented yet.')
def withdraw(self, code, amount, address, tag=None, params={}):
raise NotSupported(self.id + ' withdraw not implemented yet')
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = None
request = {
'end': self.milliseconds(),
}
if since is not None:
request['start'] = since
if limit is not None:
request['limit'] = limit # default 25, max 1000
method = 'privatePostAuthRTradesHist'
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
method = 'privatePostAuthRTradesSymbolHist'
response = getattr(self, method)(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
request = '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if api == 'v1':
request = api + request
else:
request = self.version + request
url = self.urls['api'][api] + '/' + request
if api == 'public':
if query:
url += '?' + self.urlencode(query)
if api == 'private':
self.check_required_credentials()
nonce = str(self.nonce())
body = self.json(query)
auth = '/api/' + request + nonce + body
signature = self.hmac(self.encode(auth), self.encode(self.secret), hashlib.sha384)
headers = {
'bfx-nonce': nonce,
'bfx-apikey': self.apiKey,
'bfx-signature': signature,
'Content-Type': 'application/json',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = self.fetch2(path, api, method, params, headers, body)
if response:
if 'message' in response:
if response['message'].find('not enough exchange balance') >= 0:
raise InsufficientFunds(self.id + ' ' + self.json(response))
raise ExchangeError(self.id + ' ' + self.json(response))
return response
elif response == '':
raise ExchangeError(self.id + ' returned empty response')
return response
| 37.343384
| 126
| 0.440118
|
ad6e0d9cb72a3ae259b47f891facfad3fdf7cf32
| 305
|
py
|
Python
|
2015/11/table-homeless-cities-20151118/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | 14
|
2015-05-08T13:41:51.000Z
|
2021-02-24T12:34:55.000Z
|
2015/11/table-homeless-20151118/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | null | null | null |
2015/11/table-homeless-20151118/graphic_config.py
|
nprapps/graphics-archive
|
97b0ef326b46a959df930f5522d325e537f7a655
|
[
"FSFAP"
] | 7
|
2015-04-04T04:45:54.000Z
|
2021-02-18T11:12:48.000Z
|
#!/usr/bin/env python
import base_filters
COPY_GOOGLE_DOC_KEY = '1a78aETHbjs-hE43tgzOmfeAXhFMIx2BN9m44elQZaEo'
USE_ASSETS = False
# Use these variables to override the default cache timeouts for this graphic
# DEFAULT_MAX_AGE = 20
# ASSETS_MAX_AGE = 300
JINJA_FILTER_FUNCTIONS = base_filters.FILTERS
| 21.785714
| 77
| 0.816393
|
eb9460d255b190870638aefbcca94ef558894bd0
| 1,552
|
py
|
Python
|
models/features.py
|
ghokun-thesis/domain-networks
|
8f64182a5ef404a0e41eb023812de5efefe4233e
|
[
"MIT"
] | 1
|
2020-12-19T11:56:10.000Z
|
2020-12-19T11:56:10.000Z
|
models/features.py
|
ghokun-thesis/domain-networks
|
8f64182a5ef404a0e41eb023812de5efefe4233e
|
[
"MIT"
] | null | null | null |
models/features.py
|
ghokun-thesis/domain-networks
|
8f64182a5ef404a0e41eb023812de5efefe4233e
|
[
"MIT"
] | 1
|
2021-01-11T13:55:32.000Z
|
2021-01-11T13:55:32.000Z
|
"""
architecture of the domain feature extractors.
author: David-Alexandre Beaupre
date: 2020-04-27
"""
import torch
import torch.nn as nn
import models.blocks as blk
class Features(nn.Module):
def __init__(self, num_channels: int):
"""
represents the feature extractors for each spectrum.
:param num_channels: number of channels of the input image.
"""
super(Features, self).__init__()
self.conv1 = blk.Conv2dBNReLU(in_dim=num_channels, out_dim=32, ksize=(5, 5))
self.conv2 = blk.Conv2dBNReLU(in_dim=32, out_dim=64, ksize=(5, 5))
self.conv3 = blk.Conv2dBNReLU(in_dim=64, out_dim=64, ksize=(5, 5))
self.conv4 = blk.Conv2dBNReLU(in_dim=64, out_dim=64, ksize=(5, 5))
self.conv5 = blk.Conv2dBNReLU(in_dim=64, out_dim=128, ksize=(5, 5))
self.conv6 = blk.Conv2dBNReLU(in_dim=128, out_dim=128, ksize=(5, 5))
self.conv7 = blk.Conv2dBNReLU(in_dim=128, out_dim=256, ksize=(5, 5))
self.conv8 = blk.Conv2dBNReLU(in_dim=256, out_dim=256, ksize=(5, 5))
self.conv9 = blk.Conv2dBN(in_dim=256, out_dim=256, ksize=(4, 4))
def forward(self, x: torch.Tensor) -> torch.Tensor:
"""
forward pass implementation.
:param x: input tensor.
:return: tensor.
"""
y = self.conv1(x)
y = self.conv2(y)
y = self.conv3(y)
y = self.conv4(y)
y = self.conv5(y)
y = self.conv6(y)
y = self.conv7(y)
y = self.conv8(y)
y = self.conv9(y)
return y
| 33.021277
| 84
| 0.607603
|
7afd7d19a5ba7d4e51fcb4e46751c52be59c2943
| 3,053
|
py
|
Python
|
experiments/1.lexicon_experiment/stock_unseen_source.py
|
jkvc/modapt
|
44610e6f03fb151433f2db633e1d23cc0dc470fc
|
[
"MIT"
] | null | null | null |
experiments/1.lexicon_experiment/stock_unseen_source.py
|
jkvc/modapt
|
44610e6f03fb151433f2db633e1d23cc0dc470fc
|
[
"MIT"
] | null | null | null |
experiments/1.lexicon_experiment/stock_unseen_source.py
|
jkvc/modapt
|
44610e6f03fb151433f2db633e1d23cc0dc470fc
|
[
"MIT"
] | 1
|
2021-04-28T17:09:05.000Z
|
2021-04-28T17:09:05.000Z
|
# Usage: python <script_name> <dataset_name> <stock_lexicon_name> <arch>
import sys
from os import makedirs
from os.path import basename, join, realpath
from posixpath import dirname
import pandas as pd
import torch
from config import LEXICON_DIR, STOCK_LEXICON_DIR
from experiments.datadef.zoo import get_datadef
from modapt.eval import reduce_and_save_metrics
from modapt.lexicon import eval_lexicon_model, train_lexicon_model
from modapt.model import get_model
from modapt.model.logreg_config.base import (
load_stock_lexicon_logreg_model_config,
)
from modapt.utils import (
AUTO_DEVICE,
read_txt_as_str_list,
save_json,
write_str_list_as_txt,
)
_DATASET_NAME = sys.argv[1]
_STOCK_LEXICON_NAME = sys.argv[2]
_ARCH = sys.argv[3]
_DATADEF = get_datadef(_DATASET_NAME)
_SAVE_DIR = join(
LEXICON_DIR, _DATASET_NAME, "holdout_source", f"{_STOCK_LEXICON_NAME}@{_ARCH}"
)
# load stock lexicon
stock_lexicon_dir = join(STOCK_LEXICON_DIR, _STOCK_LEXICON_NAME, "processed")
vocab = read_txt_as_str_list(join(stock_lexicon_dir, "vocab.txt"))
vocab_size = len(vocab)
lexicon_df = pd.read_csv(join(stock_lexicon_dir, "lexicon.csv"))
print(lexicon_df)
for holdout_source in _DATADEF.domain_names:
print(">>", holdout_source)
logdir = join(_SAVE_DIR, holdout_source)
makedirs(logdir, exist_ok=True)
# valid using holdout issue all samples
valid_samples = _DATADEF.load_splits_func([holdout_source], ["train"])["train"]
# build model
config = load_stock_lexicon_logreg_model_config(
lexicon_name=_STOCK_LEXICON_NAME,
arch=_ARCH,
n_classes=_DATADEF.n_classes,
n_sources=_DATADEF.n_sources,
vocab_size=len(vocab),
)
model = get_model(config).to(AUTO_DEVICE)
model.set_weight_from_lexicon(lexicon_df, _DATADEF.label_names)
use_source_individual_norm = config["use_source_individual_norm"]
use_lemmatize = config["use_lemmatize"]
metrics = {}
# run validation set
valid_metrics = eval_lexicon_model(
model=model,
datadef=_DATADEF,
valid_samples=valid_samples,
vocab=vocab,
use_source_individual_norm=use_source_individual_norm,
use_lemmatize=use_lemmatize,
labelprop_split="train",
)
metrics.update(valid_metrics)
save_json(metrics, join(logdir, "leaf_metrics.json"))
write_str_list_as_txt(vocab, join(logdir, "vocab.txt"))
torch.save(model, join(logdir, "model.pth"))
# run test set
test_samples = _DATADEF.load_splits_func([holdout_source], ["test"])["test"]
test_metrics = eval_lexicon_model(
model,
_DATADEF,
test_samples,
vocab,
use_source_individual_norm=config["use_source_individual_norm"],
use_lemmatize=False,
labelprop_split="test",
)
save_json(test_metrics, join(logdir, "leaf_test.json"))
save_json(config, join(_SAVE_DIR, "config.json"))
reduce_and_save_metrics(dirname(_SAVE_DIR))
reduce_and_save_metrics(dirname(_SAVE_DIR), "leaf_test.json", "mean_test.json")
| 30.53
| 83
| 0.740255
|
3c47ea6342a58c10d4ab8b9fa2b500d30a5bdce2
| 791
|
py
|
Python
|
blog/migrations/0002_profile.py
|
dkowsikpai/librolet
|
7148670655157ca5f1ad6853039c9ec00e37adef
|
[
"MIT"
] | null | null | null |
blog/migrations/0002_profile.py
|
dkowsikpai/librolet
|
7148670655157ca5f1ad6853039c9ec00e37adef
|
[
"MIT"
] | null | null | null |
blog/migrations/0002_profile.py
|
dkowsikpai/librolet
|
7148670655157ca5f1ad6853039c9ec00e37adef
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.3 on 2018-12-27 15:38
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('blog', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(default='default.jpg', upload_to='profile_pics')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 31.64
| 121
| 0.640961
|
66f33127480fedef521e58eb4f99be25a3461e17
| 3,272
|
py
|
Python
|
pysnmp/CISCO-CABLE-WIDEBAND-CAPABILITY.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/CISCO-CABLE-WIDEBAND-CAPABILITY.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/CISCO-CABLE-WIDEBAND-CAPABILITY.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module CISCO-CABLE-WIDEBAND-CAPABILITY (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-CABLE-WIDEBAND-CAPABILITY
# Produced by pysmi-0.3.4 at Mon Apr 29 17:34:39 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint")
ciscoAgentCapability, = mibBuilder.importSymbols("CISCO-SMI", "ciscoAgentCapability")
InetAddressType, = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType")
ModuleCompliance, AgentCapabilities, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "AgentCapabilities", "NotificationGroup")
iso, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, TimeTicks, ModuleIdentity, Unsigned32, IpAddress, Counter32, Counter64, NotificationType, Gauge32, ObjectIdentity, Integer32, MibIdentifier = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "TimeTicks", "ModuleIdentity", "Unsigned32", "IpAddress", "Counter32", "Counter64", "NotificationType", "Gauge32", "ObjectIdentity", "Integer32", "MibIdentifier")
DisplayString, TextualConvention, StorageType = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "StorageType")
ciscoCableWidebandCapability = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 7, 520))
ciscoCableWidebandCapability.setRevisions(('2010-06-09 00:00', '2006-09-07 00:00',))
if mibBuilder.loadTexts: ciscoCableWidebandCapability.setLastUpdated('201006090000Z')
if mibBuilder.loadTexts: ciscoCableWidebandCapability.setOrganization('Cisco Systems, Inc.')
ciscoCableWidebandCapabilityV12R00 = AgentCapabilities((1, 3, 6, 1, 4, 1, 9, 7, 520, 1))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoCableWidebandCapabilityV12R00 = ciscoCableWidebandCapabilityV12R00.setProductRelease('Cisco IOS 12.3BC')
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoCableWidebandCapabilityV12R00 = ciscoCableWidebandCapabilityV12R00.setStatus('current')
ciscoCableWidebandCapabilityV122R00 = AgentCapabilities((1, 3, 6, 1, 4, 1, 9, 7, 520, 2))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoCableWidebandCapabilityV122R00 = ciscoCableWidebandCapabilityV122R00.setProductRelease('Cisco IOS 12.2S')
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoCableWidebandCapabilityV122R00 = ciscoCableWidebandCapabilityV122R00.setStatus('current')
mibBuilder.exportSymbols("CISCO-CABLE-WIDEBAND-CAPABILITY", ciscoCableWidebandCapability=ciscoCableWidebandCapability, PYSNMP_MODULE_ID=ciscoCableWidebandCapability, ciscoCableWidebandCapabilityV122R00=ciscoCableWidebandCapabilityV122R00, ciscoCableWidebandCapabilityV12R00=ciscoCableWidebandCapabilityV12R00)
| 105.548387
| 477
| 0.795844
|
5e91298828fbd2b2edd1f379c78d39b0e8bd6d73
| 79,267
|
py
|
Python
|
sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models.py
|
swanderz/azure-sdk-for-python
|
4bc03153b06962fc75f8b8610e96172001c22657
|
[
"MIT"
] | null | null | null |
sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models.py
|
swanderz/azure-sdk-for-python
|
4bc03153b06962fc75f8b8610e96172001c22657
|
[
"MIT"
] | null | null | null |
sdk/storage/azure-storage-blob/azure/storage/blob/_generated/models/_models.py
|
swanderz/azure-sdk-for-python
|
4bc03153b06962fc75f8b8610e96172001c22657
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
from azure.core.exceptions import HttpResponseError
class AccessPolicy(Model):
"""An Access policy.
:param start: the date-time the policy is active
:type start: str
:param expiry: the date-time the policy expires
:type expiry: str
:param permission: the permissions for the acl policy
:type permission: str
"""
_attribute_map = {
'start': {'key': 'Start', 'type': 'str', 'xml': {'name': 'Start'}},
'expiry': {'key': 'Expiry', 'type': 'str', 'xml': {'name': 'Expiry'}},
'permission': {'key': 'Permission', 'type': 'str', 'xml': {'name': 'Permission'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(AccessPolicy, self).__init__(**kwargs)
self.start = kwargs.get('start', None)
self.expiry = kwargs.get('expiry', None)
self.permission = kwargs.get('permission', None)
class AppendPositionAccessConditions(Model):
"""Additional parameters for a set of operations, such as:
AppendBlob_append_block, AppendBlob_append_block_from_url, AppendBlob_seal.
:param max_size: Optional conditional header. The max length in bytes
permitted for the append blob. If the Append Block operation would cause
the blob to exceed that limit or if the blob size is already greater than
the value specified in this header, the request will fail with
MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition
Failed).
:type max_size: long
:param append_position: Optional conditional header, used only for the
Append Block operation. A number indicating the byte offset to compare.
Append Block will succeed only if the append position is equal to this
number. If it is not, the request will fail with the
AppendPositionConditionNotMet error (HTTP status code 412 - Precondition
Failed).
:type append_position: long
"""
_attribute_map = {
'max_size': {'key': '', 'type': 'long', 'xml': {'name': 'max_size'}},
'append_position': {'key': '', 'type': 'long', 'xml': {'name': 'append_position'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(AppendPositionAccessConditions, self).__init__(**kwargs)
self.max_size = kwargs.get('max_size', None)
self.append_position = kwargs.get('append_position', None)
class ArrowConfiguration(Model):
"""arrow configuration.
All required parameters must be populated in order to send to Azure.
:param schema: Required.
:type schema: list[~azure.storage.blob.models.ArrowField]
"""
_validation = {
'schema': {'required': True},
}
_attribute_map = {
'schema': {'key': 'Schema', 'type': '[ArrowField]', 'xml': {'name': 'Schema', 'itemsName': 'Schema', 'wrapped': True}},
}
_xml_map = {
'name': 'ArrowConfiguration'
}
def __init__(self, **kwargs):
super(ArrowConfiguration, self).__init__(**kwargs)
self.schema = kwargs.get('schema', None)
class ArrowField(Model):
"""field of an arrow schema.
All required parameters must be populated in order to send to Azure.
:param type: Required.
:type type: str
:param name:
:type name: str
:param precision:
:type precision: int
:param scale:
:type scale: int
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'type': {'key': 'Type', 'type': 'str', 'xml': {'name': 'Type'}},
'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
'precision': {'key': 'Precision', 'type': 'int', 'xml': {'name': 'Precision'}},
'scale': {'key': 'Scale', 'type': 'int', 'xml': {'name': 'Scale'}},
}
_xml_map = {
'name': 'Field'
}
def __init__(self, **kwargs):
super(ArrowField, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.name = kwargs.get('name', None)
self.precision = kwargs.get('precision', None)
self.scale = kwargs.get('scale', None)
class BlobFlatListSegment(Model):
"""BlobFlatListSegment.
All required parameters must be populated in order to send to Azure.
:param blob_items: Required.
:type blob_items: list[~azure.storage.blob.models.BlobItemInternal]
"""
_validation = {
'blob_items': {'required': True},
}
_attribute_map = {
'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'BlobItems', 'itemsName': 'Blob'}},
}
_xml_map = {
'name': 'Blobs'
}
def __init__(self, **kwargs):
super(BlobFlatListSegment, self).__init__(**kwargs)
self.blob_items = kwargs.get('blob_items', None)
class BlobHierarchyListSegment(Model):
"""BlobHierarchyListSegment.
All required parameters must be populated in order to send to Azure.
:param blob_prefixes:
:type blob_prefixes: list[~azure.storage.blob.models.BlobPrefix]
:param blob_items: Required.
:type blob_items: list[~azure.storage.blob.models.BlobItemInternal]
"""
_validation = {
'blob_items': {'required': True},
}
_attribute_map = {
'blob_prefixes': {'key': 'BlobPrefixes', 'type': '[BlobPrefix]', 'xml': {'name': 'BlobPrefix', 'itemsName': 'BlobPrefix'}},
'blob_items': {'key': 'BlobItems', 'type': '[BlobItemInternal]', 'xml': {'name': 'Blob', 'itemsName': 'Blob'}},
}
_xml_map = {
'name': 'Blobs'
}
def __init__(self, **kwargs):
super(BlobHierarchyListSegment, self).__init__(**kwargs)
self.blob_prefixes = kwargs.get('blob_prefixes', None)
self.blob_items = kwargs.get('blob_items', None)
class BlobHTTPHeaders(Model):
"""Additional parameters for a set of operations.
:param blob_cache_control: Optional. Sets the blob's cache control. If
specified, this property is stored with the blob and returned with a read
request.
:type blob_cache_control: str
:param blob_content_type: Optional. Sets the blob's content type. If
specified, this property is stored with the blob and returned with a read
request.
:type blob_content_type: str
:param blob_content_md5: Optional. An MD5 hash of the blob content. Note
that this hash is not validated, as the hashes for the individual blocks
were validated when each was uploaded.
:type blob_content_md5: bytearray
:param blob_content_encoding: Optional. Sets the blob's content encoding.
If specified, this property is stored with the blob and returned with a
read request.
:type blob_content_encoding: str
:param blob_content_language: Optional. Set the blob's content language.
If specified, this property is stored with the blob and returned with a
read request.
:type blob_content_language: str
:param blob_content_disposition: Optional. Sets the blob's
Content-Disposition header.
:type blob_content_disposition: str
"""
_attribute_map = {
'blob_cache_control': {'key': '', 'type': 'str', 'xml': {'name': 'blob_cache_control'}},
'blob_content_type': {'key': '', 'type': 'str', 'xml': {'name': 'blob_content_type'}},
'blob_content_md5': {'key': '', 'type': 'bytearray', 'xml': {'name': 'blob_content_md5'}},
'blob_content_encoding': {'key': '', 'type': 'str', 'xml': {'name': 'blob_content_encoding'}},
'blob_content_language': {'key': '', 'type': 'str', 'xml': {'name': 'blob_content_language'}},
'blob_content_disposition': {'key': '', 'type': 'str', 'xml': {'name': 'blob_content_disposition'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(BlobHTTPHeaders, self).__init__(**kwargs)
self.blob_cache_control = kwargs.get('blob_cache_control', None)
self.blob_content_type = kwargs.get('blob_content_type', None)
self.blob_content_md5 = kwargs.get('blob_content_md5', None)
self.blob_content_encoding = kwargs.get('blob_content_encoding', None)
self.blob_content_language = kwargs.get('blob_content_language', None)
self.blob_content_disposition = kwargs.get('blob_content_disposition', None)
class BlobItemInternal(Model):
"""An Azure Storage blob.
All required parameters must be populated in order to send to Azure.
:param name: Required.
:type name: str
:param deleted: Required.
:type deleted: bool
:param snapshot: Required.
:type snapshot: str
:param version_id:
:type version_id: str
:param is_current_version:
:type is_current_version: bool
:param properties: Required.
:type properties: ~azure.storage.blob.models.BlobPropertiesInternal
:param metadata:
:type metadata: ~azure.storage.blob.models.BlobMetadata
:param blob_tags:
:type blob_tags: ~azure.storage.blob.models.BlobTags
:param object_replication_metadata:
:type object_replication_metadata: dict[str, str]
"""
_validation = {
'name': {'required': True},
'deleted': {'required': True},
'snapshot': {'required': True},
'properties': {'required': True},
}
_attribute_map = {
'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
'deleted': {'key': 'Deleted', 'type': 'bool', 'xml': {'name': 'Deleted'}},
'snapshot': {'key': 'Snapshot', 'type': 'str', 'xml': {'name': 'Snapshot'}},
'version_id': {'key': 'VersionId', 'type': 'str', 'xml': {'name': 'VersionId'}},
'is_current_version': {'key': 'IsCurrentVersion', 'type': 'bool', 'xml': {'name': 'IsCurrentVersion'}},
'properties': {'key': 'Properties', 'type': 'BlobPropertiesInternal', 'xml': {'name': 'Properties'}},
'metadata': {'key': 'Metadata', 'type': 'BlobMetadata', 'xml': {'name': 'Metadata'}},
'blob_tags': {'key': 'BlobTags', 'type': 'BlobTags', 'xml': {'name': 'BlobTags'}},
'object_replication_metadata': {'key': 'OrMetadata', 'type': '{str}', 'xml': {'name': 'OrMetadata'}},
}
_xml_map = {
'name': 'Blob'
}
def __init__(self, **kwargs):
super(BlobItemInternal, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.deleted = kwargs.get('deleted', None)
self.snapshot = kwargs.get('snapshot', None)
self.version_id = kwargs.get('version_id', None)
self.is_current_version = kwargs.get('is_current_version', None)
self.properties = kwargs.get('properties', None)
self.metadata = kwargs.get('metadata', None)
self.blob_tags = kwargs.get('blob_tags', None)
self.object_replication_metadata = kwargs.get('object_replication_metadata', None)
class BlobMetadata(Model):
"""BlobMetadata.
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, str]
:param encrypted:
:type encrypted: str
"""
_attribute_map = {
'additional_properties': {'key': '', 'type': '{str}', 'xml': {'name': 'additional_properties'}},
'encrypted': {'key': 'Encrypted', 'type': 'str', 'xml': {'name': 'Encrypted', 'attr': True}},
}
_xml_map = {
'name': 'Metadata'
}
def __init__(self, **kwargs):
super(BlobMetadata, self).__init__(**kwargs)
self.additional_properties = kwargs.get('additional_properties', None)
self.encrypted = kwargs.get('encrypted', None)
class BlobPrefix(Model):
"""BlobPrefix.
All required parameters must be populated in order to send to Azure.
:param name: Required.
:type name: str
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(BlobPrefix, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class BlobPropertiesInternal(Model):
"""Properties of a blob.
All required parameters must be populated in order to send to Azure.
:param creation_time:
:type creation_time: datetime
:param last_modified: Required.
:type last_modified: datetime
:param etag: Required.
:type etag: str
:param content_length: Size in bytes
:type content_length: long
:param content_type:
:type content_type: str
:param content_encoding:
:type content_encoding: str
:param content_language:
:type content_language: str
:param content_md5:
:type content_md5: bytearray
:param content_disposition:
:type content_disposition: str
:param cache_control:
:type cache_control: str
:param blob_sequence_number:
:type blob_sequence_number: long
:param blob_type: Possible values include: 'BlockBlob', 'PageBlob',
'AppendBlob'
:type blob_type: str or ~azure.storage.blob.models.BlobType
:param lease_status: Possible values include: 'locked', 'unlocked'
:type lease_status: str or ~azure.storage.blob.models.LeaseStatusType
:param lease_state: Possible values include: 'available', 'leased',
'expired', 'breaking', 'broken'
:type lease_state: str or ~azure.storage.blob.models.LeaseStateType
:param lease_duration: Possible values include: 'infinite', 'fixed'
:type lease_duration: str or ~azure.storage.blob.models.LeaseDurationType
:param copy_id:
:type copy_id: str
:param copy_status: Possible values include: 'pending', 'success',
'aborted', 'failed'
:type copy_status: str or ~azure.storage.blob.models.CopyStatusType
:param copy_source:
:type copy_source: str
:param copy_progress:
:type copy_progress: str
:param copy_completion_time:
:type copy_completion_time: datetime
:param copy_status_description:
:type copy_status_description: str
:param server_encrypted:
:type server_encrypted: bool
:param incremental_copy:
:type incremental_copy: bool
:param destination_snapshot:
:type destination_snapshot: str
:param deleted_time:
:type deleted_time: datetime
:param remaining_retention_days:
:type remaining_retention_days: int
:param access_tier: Possible values include: 'P4', 'P6', 'P10', 'P15',
'P20', 'P30', 'P40', 'P50', 'P60', 'P70', 'P80', 'Hot', 'Cool', 'Archive'
:type access_tier: str or ~azure.storage.blob.models.AccessTier
:param access_tier_inferred:
:type access_tier_inferred: bool
:param archive_status: Possible values include:
'rehydrate-pending-to-hot', 'rehydrate-pending-to-cool'
:type archive_status: str or ~azure.storage.blob.models.ArchiveStatus
:param customer_provided_key_sha256:
:type customer_provided_key_sha256: str
:param encryption_scope: The name of the encryption scope under which the
blob is encrypted.
:type encryption_scope: str
:param access_tier_change_time:
:type access_tier_change_time: datetime
:param tag_count:
:type tag_count: int
:param expires_on:
:type expires_on: datetime
:param is_sealed:
:type is_sealed: bool
:param rehydrate_priority: Possible values include: 'High', 'Standard'
:type rehydrate_priority: str or
~azure.storage.blob.models.RehydratePriority
:param last_accessed_on:
:type last_accessed_on: datetime
"""
_validation = {
'last_modified': {'required': True},
'etag': {'required': True},
}
_attribute_map = {
'creation_time': {'key': 'Creation-Time', 'type': 'rfc-1123', 'xml': {'name': 'Creation-Time'}},
'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123', 'xml': {'name': 'Last-Modified'}},
'etag': {'key': 'Etag', 'type': 'str', 'xml': {'name': 'Etag'}},
'content_length': {'key': 'Content-Length', 'type': 'long', 'xml': {'name': 'Content-Length'}},
'content_type': {'key': 'Content-Type', 'type': 'str', 'xml': {'name': 'Content-Type'}},
'content_encoding': {'key': 'Content-Encoding', 'type': 'str', 'xml': {'name': 'Content-Encoding'}},
'content_language': {'key': 'Content-Language', 'type': 'str', 'xml': {'name': 'Content-Language'}},
'content_md5': {'key': 'Content-MD5', 'type': 'bytearray', 'xml': {'name': 'Content-MD5'}},
'content_disposition': {'key': 'Content-Disposition', 'type': 'str', 'xml': {'name': 'Content-Disposition'}},
'cache_control': {'key': 'Cache-Control', 'type': 'str', 'xml': {'name': 'Cache-Control'}},
'blob_sequence_number': {'key': 'x-ms-blob-sequence-number', 'type': 'long', 'xml': {'name': 'x-ms-blob-sequence-number'}},
'blob_type': {'key': 'BlobType', 'type': 'BlobType', 'xml': {'name': 'BlobType'}},
'lease_status': {'key': 'LeaseStatus', 'type': 'LeaseStatusType', 'xml': {'name': 'LeaseStatus'}},
'lease_state': {'key': 'LeaseState', 'type': 'LeaseStateType', 'xml': {'name': 'LeaseState'}},
'lease_duration': {'key': 'LeaseDuration', 'type': 'LeaseDurationType', 'xml': {'name': 'LeaseDuration'}},
'copy_id': {'key': 'CopyId', 'type': 'str', 'xml': {'name': 'CopyId'}},
'copy_status': {'key': 'CopyStatus', 'type': 'CopyStatusType', 'xml': {'name': 'CopyStatus'}},
'copy_source': {'key': 'CopySource', 'type': 'str', 'xml': {'name': 'CopySource'}},
'copy_progress': {'key': 'CopyProgress', 'type': 'str', 'xml': {'name': 'CopyProgress'}},
'copy_completion_time': {'key': 'CopyCompletionTime', 'type': 'rfc-1123', 'xml': {'name': 'CopyCompletionTime'}},
'copy_status_description': {'key': 'CopyStatusDescription', 'type': 'str', 'xml': {'name': 'CopyStatusDescription'}},
'server_encrypted': {'key': 'ServerEncrypted', 'type': 'bool', 'xml': {'name': 'ServerEncrypted'}},
'incremental_copy': {'key': 'IncrementalCopy', 'type': 'bool', 'xml': {'name': 'IncrementalCopy'}},
'destination_snapshot': {'key': 'DestinationSnapshot', 'type': 'str', 'xml': {'name': 'DestinationSnapshot'}},
'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123', 'xml': {'name': 'DeletedTime'}},
'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int', 'xml': {'name': 'RemainingRetentionDays'}},
'access_tier': {'key': 'AccessTier', 'type': 'str', 'xml': {'name': 'AccessTier'}},
'access_tier_inferred': {'key': 'AccessTierInferred', 'type': 'bool', 'xml': {'name': 'AccessTierInferred'}},
'archive_status': {'key': 'ArchiveStatus', 'type': 'str', 'xml': {'name': 'ArchiveStatus'}},
'customer_provided_key_sha256': {'key': 'CustomerProvidedKeySha256', 'type': 'str', 'xml': {'name': 'CustomerProvidedKeySha256'}},
'encryption_scope': {'key': 'EncryptionScope', 'type': 'str', 'xml': {'name': 'EncryptionScope'}},
'access_tier_change_time': {'key': 'AccessTierChangeTime', 'type': 'rfc-1123', 'xml': {'name': 'AccessTierChangeTime'}},
'tag_count': {'key': 'TagCount', 'type': 'int', 'xml': {'name': 'TagCount'}},
'expires_on': {'key': 'Expiry-Time', 'type': 'rfc-1123', 'xml': {'name': 'Expiry-Time'}},
'is_sealed': {'key': 'Sealed', 'type': 'bool', 'xml': {'name': 'Sealed'}},
'rehydrate_priority': {'key': 'RehydratePriority', 'type': 'str', 'xml': {'name': 'RehydratePriority'}},
'last_accessed_on': {'key': 'LastAccessTime', 'type': 'rfc-1123', 'xml': {'name': 'LastAccessTime'}},
}
_xml_map = {
'name': 'Properties'
}
def __init__(self, **kwargs):
super(BlobPropertiesInternal, self).__init__(**kwargs)
self.creation_time = kwargs.get('creation_time', None)
self.last_modified = kwargs.get('last_modified', None)
self.etag = kwargs.get('etag', None)
self.content_length = kwargs.get('content_length', None)
self.content_type = kwargs.get('content_type', None)
self.content_encoding = kwargs.get('content_encoding', None)
self.content_language = kwargs.get('content_language', None)
self.content_md5 = kwargs.get('content_md5', None)
self.content_disposition = kwargs.get('content_disposition', None)
self.cache_control = kwargs.get('cache_control', None)
self.blob_sequence_number = kwargs.get('blob_sequence_number', None)
self.blob_type = kwargs.get('blob_type', None)
self.lease_status = kwargs.get('lease_status', None)
self.lease_state = kwargs.get('lease_state', None)
self.lease_duration = kwargs.get('lease_duration', None)
self.copy_id = kwargs.get('copy_id', None)
self.copy_status = kwargs.get('copy_status', None)
self.copy_source = kwargs.get('copy_source', None)
self.copy_progress = kwargs.get('copy_progress', None)
self.copy_completion_time = kwargs.get('copy_completion_time', None)
self.copy_status_description = kwargs.get('copy_status_description', None)
self.server_encrypted = kwargs.get('server_encrypted', None)
self.incremental_copy = kwargs.get('incremental_copy', None)
self.destination_snapshot = kwargs.get('destination_snapshot', None)
self.deleted_time = kwargs.get('deleted_time', None)
self.remaining_retention_days = kwargs.get('remaining_retention_days', None)
self.access_tier = kwargs.get('access_tier', None)
self.access_tier_inferred = kwargs.get('access_tier_inferred', None)
self.archive_status = kwargs.get('archive_status', None)
self.customer_provided_key_sha256 = kwargs.get('customer_provided_key_sha256', None)
self.encryption_scope = kwargs.get('encryption_scope', None)
self.access_tier_change_time = kwargs.get('access_tier_change_time', None)
self.tag_count = kwargs.get('tag_count', None)
self.expires_on = kwargs.get('expires_on', None)
self.is_sealed = kwargs.get('is_sealed', None)
self.rehydrate_priority = kwargs.get('rehydrate_priority', None)
self.last_accessed_on = kwargs.get('last_accessed_on', None)
class BlobTag(Model):
"""BlobTag.
All required parameters must be populated in order to send to Azure.
:param key: Required.
:type key: str
:param value: Required.
:type value: str
"""
_validation = {
'key': {'required': True},
'value': {'required': True},
}
_attribute_map = {
'key': {'key': 'Key', 'type': 'str', 'xml': {'name': 'Key'}},
'value': {'key': 'Value', 'type': 'str', 'xml': {'name': 'Value'}},
}
_xml_map = {
'name': 'Tag'
}
def __init__(self, **kwargs):
super(BlobTag, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.value = kwargs.get('value', None)
class BlobTags(Model):
"""Blob tags.
All required parameters must be populated in order to send to Azure.
:param blob_tag_set: Required.
:type blob_tag_set: list[~azure.storage.blob.models.BlobTag]
"""
_validation = {
'blob_tag_set': {'required': True},
}
_attribute_map = {
'blob_tag_set': {'key': 'BlobTagSet', 'type': '[BlobTag]', 'xml': {'name': 'TagSet', 'itemsName': 'TagSet', 'wrapped': True}},
}
_xml_map = {
'name': 'Tags'
}
def __init__(self, **kwargs):
super(BlobTags, self).__init__(**kwargs)
self.blob_tag_set = kwargs.get('blob_tag_set', None)
class Block(Model):
"""Represents a single block in a block blob. It describes the block's ID and
size.
All required parameters must be populated in order to send to Azure.
:param name: Required. The base64 encoded block ID.
:type name: str
:param size: Required. The block size in bytes.
:type size: int
"""
_validation = {
'name': {'required': True},
'size': {'required': True},
}
_attribute_map = {
'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
'size': {'key': 'Size', 'type': 'int', 'xml': {'name': 'Size'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(Block, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.size = kwargs.get('size', None)
class BlockList(Model):
"""BlockList.
:param committed_blocks:
:type committed_blocks: list[~azure.storage.blob.models.Block]
:param uncommitted_blocks:
:type uncommitted_blocks: list[~azure.storage.blob.models.Block]
"""
_attribute_map = {
'committed_blocks': {'key': 'CommittedBlocks', 'type': '[Block]', 'xml': {'name': 'CommittedBlocks', 'itemsName': 'Block', 'wrapped': True}},
'uncommitted_blocks': {'key': 'UncommittedBlocks', 'type': '[Block]', 'xml': {'name': 'UncommittedBlocks', 'itemsName': 'Block', 'wrapped': True}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(BlockList, self).__init__(**kwargs)
self.committed_blocks = kwargs.get('committed_blocks', None)
self.uncommitted_blocks = kwargs.get('uncommitted_blocks', None)
class BlockLookupList(Model):
"""BlockLookupList.
:param committed:
:type committed: list[str]
:param uncommitted:
:type uncommitted: list[str]
:param latest:
:type latest: list[str]
"""
_attribute_map = {
'committed': {'key': 'Committed', 'type': '[str]', 'xml': {'name': 'Committed', 'itemsName': 'Committed'}},
'uncommitted': {'key': 'Uncommitted', 'type': '[str]', 'xml': {'name': 'Uncommitted', 'itemsName': 'Uncommitted'}},
'latest': {'key': 'Latest', 'type': '[str]', 'xml': {'name': 'Latest', 'itemsName': 'Latest'}},
}
_xml_map = {
'name': 'BlockList'
}
def __init__(self, **kwargs):
super(BlockLookupList, self).__init__(**kwargs)
self.committed = kwargs.get('committed', None)
self.uncommitted = kwargs.get('uncommitted', None)
self.latest = kwargs.get('latest', None)
class ClearRange(Model):
"""ClearRange.
All required parameters must be populated in order to send to Azure.
:param start: Required.
:type start: long
:param end: Required.
:type end: long
"""
_validation = {
'start': {'required': True},
'end': {'required': True},
}
_attribute_map = {
'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}},
'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}},
}
_xml_map = {
'name': 'ClearRange'
}
def __init__(self, **kwargs):
super(ClearRange, self).__init__(**kwargs)
self.start = kwargs.get('start', None)
self.end = kwargs.get('end', None)
class ContainerCpkScopeInfo(Model):
"""Additional parameters for create operation.
:param default_encryption_scope: Optional. Version 2019-07-07 and later.
Specifies the default encryption scope to set on the container and use for
all future writes.
:type default_encryption_scope: str
:param prevent_encryption_scope_override: Optional. Version 2019-07-07
and newer. If true, prevents any request from specifying a different
encryption scope than the scope set on the container.
:type prevent_encryption_scope_override: bool
"""
_attribute_map = {
'default_encryption_scope': {'key': '', 'type': 'str', 'xml': {'name': 'default_encryption_scope'}},
'prevent_encryption_scope_override': {'key': '', 'type': 'bool', 'xml': {'name': 'prevent_encryption_scope_override'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(ContainerCpkScopeInfo, self).__init__(**kwargs)
self.default_encryption_scope = kwargs.get('default_encryption_scope', None)
self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', None)
class ContainerItem(Model):
"""An Azure Storage container.
All required parameters must be populated in order to send to Azure.
:param name: Required.
:type name: str
:param deleted:
:type deleted: bool
:param version:
:type version: str
:param properties: Required.
:type properties: ~azure.storage.blob.models.ContainerProperties
:param metadata:
:type metadata: dict[str, str]
"""
_validation = {
'name': {'required': True},
'properties': {'required': True},
}
_attribute_map = {
'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
'deleted': {'key': 'Deleted', 'type': 'bool', 'xml': {'name': 'Deleted'}},
'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}},
'properties': {'key': 'Properties', 'type': 'ContainerProperties', 'xml': {'name': 'Properties'}},
'metadata': {'key': 'Metadata', 'type': '{str}', 'xml': {'name': 'Metadata'}},
}
_xml_map = {
'name': 'Container'
}
def __init__(self, **kwargs):
super(ContainerItem, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.deleted = kwargs.get('deleted', None)
self.version = kwargs.get('version', None)
self.properties = kwargs.get('properties', None)
self.metadata = kwargs.get('metadata', None)
class ContainerProperties(Model):
"""Properties of a container.
All required parameters must be populated in order to send to Azure.
:param last_modified: Required.
:type last_modified: datetime
:param etag: Required.
:type etag: str
:param lease_status: Possible values include: 'locked', 'unlocked'
:type lease_status: str or ~azure.storage.blob.models.LeaseStatusType
:param lease_state: Possible values include: 'available', 'leased',
'expired', 'breaking', 'broken'
:type lease_state: str or ~azure.storage.blob.models.LeaseStateType
:param lease_duration: Possible values include: 'infinite', 'fixed'
:type lease_duration: str or ~azure.storage.blob.models.LeaseDurationType
:param public_access: Possible values include: 'container', 'blob'
:type public_access: str or ~azure.storage.blob.models.PublicAccessType
:param has_immutability_policy:
:type has_immutability_policy: bool
:param has_legal_hold:
:type has_legal_hold: bool
:param default_encryption_scope:
:type default_encryption_scope: str
:param prevent_encryption_scope_override:
:type prevent_encryption_scope_override: bool
:param deleted_time:
:type deleted_time: datetime
:param remaining_retention_days:
:type remaining_retention_days: int
"""
_validation = {
'last_modified': {'required': True},
'etag': {'required': True},
}
_attribute_map = {
'last_modified': {'key': 'Last-Modified', 'type': 'rfc-1123', 'xml': {'name': 'Last-Modified'}},
'etag': {'key': 'Etag', 'type': 'str', 'xml': {'name': 'Etag'}},
'lease_status': {'key': 'LeaseStatus', 'type': 'LeaseStatusType', 'xml': {'name': 'LeaseStatus'}},
'lease_state': {'key': 'LeaseState', 'type': 'LeaseStateType', 'xml': {'name': 'LeaseState'}},
'lease_duration': {'key': 'LeaseDuration', 'type': 'LeaseDurationType', 'xml': {'name': 'LeaseDuration'}},
'public_access': {'key': 'PublicAccess', 'type': 'str', 'xml': {'name': 'PublicAccess'}},
'has_immutability_policy': {'key': 'HasImmutabilityPolicy', 'type': 'bool', 'xml': {'name': 'HasImmutabilityPolicy'}},
'has_legal_hold': {'key': 'HasLegalHold', 'type': 'bool', 'xml': {'name': 'HasLegalHold'}},
'default_encryption_scope': {'key': 'DefaultEncryptionScope', 'type': 'str', 'xml': {'name': 'DefaultEncryptionScope'}},
'prevent_encryption_scope_override': {'key': 'DenyEncryptionScopeOverride', 'type': 'bool', 'xml': {'name': 'DenyEncryptionScopeOverride'}},
'deleted_time': {'key': 'DeletedTime', 'type': 'rfc-1123', 'xml': {'name': 'DeletedTime'}},
'remaining_retention_days': {'key': 'RemainingRetentionDays', 'type': 'int', 'xml': {'name': 'RemainingRetentionDays'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(ContainerProperties, self).__init__(**kwargs)
self.last_modified = kwargs.get('last_modified', None)
self.etag = kwargs.get('etag', None)
self.lease_status = kwargs.get('lease_status', None)
self.lease_state = kwargs.get('lease_state', None)
self.lease_duration = kwargs.get('lease_duration', None)
self.public_access = kwargs.get('public_access', None)
self.has_immutability_policy = kwargs.get('has_immutability_policy', None)
self.has_legal_hold = kwargs.get('has_legal_hold', None)
self.default_encryption_scope = kwargs.get('default_encryption_scope', None)
self.prevent_encryption_scope_override = kwargs.get('prevent_encryption_scope_override', None)
self.deleted_time = kwargs.get('deleted_time', None)
self.remaining_retention_days = kwargs.get('remaining_retention_days', None)
class CorsRule(Model):
"""CORS is an HTTP feature that enables a web application running under one
domain to access resources in another domain. Web browsers implement a
security restriction known as same-origin policy that prevents a web page
from calling APIs in a different domain; CORS provides a secure way to
allow one domain (the origin domain) to call APIs in another domain.
All required parameters must be populated in order to send to Azure.
:param allowed_origins: Required. The origin domains that are permitted to
make a request against the storage service via CORS. The origin domain is
the domain from which the request originates. Note that the origin must be
an exact case-sensitive match with the origin that the user age sends to
the service. You can also use the wildcard character '*' to allow all
origin domains to make requests via CORS.
:type allowed_origins: str
:param allowed_methods: Required. The methods (HTTP request verbs) that
the origin domain may use for a CORS request. (comma separated)
:type allowed_methods: str
:param allowed_headers: Required. the request headers that the origin
domain may specify on the CORS request.
:type allowed_headers: str
:param exposed_headers: Required. The response headers that may be sent in
the response to the CORS request and exposed by the browser to the request
issuer
:type exposed_headers: str
:param max_age_in_seconds: Required. The maximum amount time that a
browser should cache the preflight OPTIONS request.
:type max_age_in_seconds: int
"""
_validation = {
'allowed_origins': {'required': True},
'allowed_methods': {'required': True},
'allowed_headers': {'required': True},
'exposed_headers': {'required': True},
'max_age_in_seconds': {'required': True, 'minimum': 0},
}
_attribute_map = {
'allowed_origins': {'key': 'AllowedOrigins', 'type': 'str', 'xml': {'name': 'AllowedOrigins'}},
'allowed_methods': {'key': 'AllowedMethods', 'type': 'str', 'xml': {'name': 'AllowedMethods'}},
'allowed_headers': {'key': 'AllowedHeaders', 'type': 'str', 'xml': {'name': 'AllowedHeaders'}},
'exposed_headers': {'key': 'ExposedHeaders', 'type': 'str', 'xml': {'name': 'ExposedHeaders'}},
'max_age_in_seconds': {'key': 'MaxAgeInSeconds', 'type': 'int', 'xml': {'name': 'MaxAgeInSeconds'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(CorsRule, self).__init__(**kwargs)
self.allowed_origins = kwargs.get('allowed_origins', None)
self.allowed_methods = kwargs.get('allowed_methods', None)
self.allowed_headers = kwargs.get('allowed_headers', None)
self.exposed_headers = kwargs.get('exposed_headers', None)
self.max_age_in_seconds = kwargs.get('max_age_in_seconds', None)
class CpkInfo(Model):
"""Additional parameters for a set of operations.
:param encryption_key: Optional. Specifies the encryption key to use to
encrypt the data provided in the request. If not specified, encryption is
performed with the root account encryption key. For more information, see
Encryption at Rest for Azure Storage Services.
:type encryption_key: str
:param encryption_key_sha256: The SHA-256 hash of the provided encryption
key. Must be provided if the x-ms-encryption-key header is provided.
:type encryption_key_sha256: str
:param encryption_algorithm: The algorithm used to produce the encryption
key hash. Currently, the only accepted value is "AES256". Must be provided
if the x-ms-encryption-key header is provided. Possible values include:
'AES256'
:type encryption_algorithm: str or
~azure.storage.blob.models.EncryptionAlgorithmType
"""
_attribute_map = {
'encryption_key': {'key': '', 'type': 'str', 'xml': {'name': 'encryption_key'}},
'encryption_key_sha256': {'key': '', 'type': 'str', 'xml': {'name': 'encryption_key_sha256'}},
'encryption_algorithm': {'key': '', 'type': 'EncryptionAlgorithmType', 'xml': {'name': 'encryption_algorithm'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(CpkInfo, self).__init__(**kwargs)
self.encryption_key = kwargs.get('encryption_key', None)
self.encryption_key_sha256 = kwargs.get('encryption_key_sha256', None)
self.encryption_algorithm = kwargs.get('encryption_algorithm', None)
class CpkScopeInfo(Model):
"""Additional parameters for a set of operations.
:param encryption_scope: Optional. Version 2019-07-07 and later.
Specifies the name of the encryption scope to use to encrypt the data
provided in the request. If not specified, encryption is performed with
the default account encryption scope. For more information, see
Encryption at Rest for Azure Storage Services.
:type encryption_scope: str
"""
_attribute_map = {
'encryption_scope': {'key': '', 'type': 'str', 'xml': {'name': 'encryption_scope'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(CpkScopeInfo, self).__init__(**kwargs)
self.encryption_scope = kwargs.get('encryption_scope', None)
class DataLakeStorageError(Model):
"""DataLakeStorageError.
:param data_lake_storage_error_details: The service error response object.
:type data_lake_storage_error_details:
~azure.storage.blob.models.DataLakeStorageErrorError
"""
_attribute_map = {
'data_lake_storage_error_details': {'key': 'error', 'type': 'DataLakeStorageErrorError', 'xml': {'name': 'error'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(DataLakeStorageError, self).__init__(**kwargs)
self.data_lake_storage_error_details = kwargs.get('data_lake_storage_error_details', None)
class DataLakeStorageErrorException(HttpResponseError):
"""Server responsed with exception of type: 'DataLakeStorageError'.
:param deserialize: A deserializer
:param response: Server response to be deserialized.
"""
def __init__(self, response, deserialize, *args):
model_name = 'DataLakeStorageError'
self.error = deserialize(model_name, response)
if self.error is None:
self.error = deserialize.dependencies[model_name]()
super(DataLakeStorageErrorException, self).__init__(response=response)
class DataLakeStorageErrorError(Model):
"""The service error response object.
:param code: The service error code.
:type code: str
:param message: The service error message.
:type message: str
"""
_attribute_map = {
'code': {'key': 'Code', 'type': 'str', 'xml': {'name': 'Code'}},
'message': {'key': 'Message', 'type': 'str', 'xml': {'name': 'Message'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(DataLakeStorageErrorError, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
class DelimitedTextConfiguration(Model):
"""delimited text configuration.
All required parameters must be populated in order to send to Azure.
:param column_separator: Required. column separator
:type column_separator: str
:param field_quote: Required. field quote
:type field_quote: str
:param record_separator: Required. record separator
:type record_separator: str
:param escape_char: Required. escape char
:type escape_char: str
:param headers_present: Required. has headers
:type headers_present: bool
"""
_validation = {
'column_separator': {'required': True},
'field_quote': {'required': True},
'record_separator': {'required': True},
'escape_char': {'required': True},
'headers_present': {'required': True},
}
_attribute_map = {
'column_separator': {'key': 'ColumnSeparator', 'type': 'str', 'xml': {'name': 'ColumnSeparator'}},
'field_quote': {'key': 'FieldQuote', 'type': 'str', 'xml': {'name': 'FieldQuote'}},
'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}},
'escape_char': {'key': 'EscapeChar', 'type': 'str', 'xml': {'name': 'EscapeChar'}},
'headers_present': {'key': 'HeadersPresent', 'type': 'bool', 'xml': {'name': 'HasHeaders'}},
}
_xml_map = {
'name': 'DelimitedTextConfiguration'
}
def __init__(self, **kwargs):
super(DelimitedTextConfiguration, self).__init__(**kwargs)
self.column_separator = kwargs.get('column_separator', None)
self.field_quote = kwargs.get('field_quote', None)
self.record_separator = kwargs.get('record_separator', None)
self.escape_char = kwargs.get('escape_char', None)
self.headers_present = kwargs.get('headers_present', None)
class DirectoryHttpHeaders(Model):
"""Additional parameters for a set of operations, such as: Directory_create,
Directory_rename, Blob_rename.
:param cache_control: Cache control for given resource
:type cache_control: str
:param content_type: Content type for given resource
:type content_type: str
:param content_encoding: Content encoding for given resource
:type content_encoding: str
:param content_language: Content language for given resource
:type content_language: str
:param content_disposition: Content disposition for given resource
:type content_disposition: str
"""
_attribute_map = {
'cache_control': {'key': '', 'type': 'str', 'xml': {'name': 'cache_control'}},
'content_type': {'key': '', 'type': 'str', 'xml': {'name': 'content_type'}},
'content_encoding': {'key': '', 'type': 'str', 'xml': {'name': 'content_encoding'}},
'content_language': {'key': '', 'type': 'str', 'xml': {'name': 'content_language'}},
'content_disposition': {'key': '', 'type': 'str', 'xml': {'name': 'content_disposition'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(DirectoryHttpHeaders, self).__init__(**kwargs)
self.cache_control = kwargs.get('cache_control', None)
self.content_type = kwargs.get('content_type', None)
self.content_encoding = kwargs.get('content_encoding', None)
self.content_language = kwargs.get('content_language', None)
self.content_disposition = kwargs.get('content_disposition', None)
class FilterBlobItem(Model):
"""Blob info from a Filter Blobs API call.
All required parameters must be populated in order to send to Azure.
:param name: Required.
:type name: str
:param container_name: Required.
:type container_name: str
:param tags:
:type tags: ~azure.storage.blob.models.BlobTags
"""
_validation = {
'name': {'required': True},
'container_name': {'required': True},
}
_attribute_map = {
'name': {'key': 'Name', 'type': 'str', 'xml': {'name': 'Name'}},
'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'name': 'ContainerName'}},
'tags': {'key': 'Tags', 'type': 'BlobTags', 'xml': {'name': 'Tags'}},
}
_xml_map = {
'name': 'Blob'
}
def __init__(self, **kwargs):
super(FilterBlobItem, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.container_name = kwargs.get('container_name', None)
self.tags = kwargs.get('tags', None)
class FilterBlobSegment(Model):
"""The result of a Filter Blobs API call.
All required parameters must be populated in order to send to Azure.
:param service_endpoint: Required.
:type service_endpoint: str
:param where: Required.
:type where: str
:param blobs: Required.
:type blobs: list[~azure.storage.blob.models.FilterBlobItem]
:param next_marker:
:type next_marker: str
"""
_validation = {
'service_endpoint': {'required': True},
'where': {'required': True},
'blobs': {'required': True},
}
_attribute_map = {
'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'name': 'ServiceEndpoint', 'attr': True}},
'where': {'key': 'Where', 'type': 'str', 'xml': {'name': 'Where'}},
'blobs': {'key': 'Blobs', 'type': '[FilterBlobItem]', 'xml': {'name': 'Blobs', 'itemsName': 'Blobs', 'wrapped': True}},
'next_marker': {'key': 'NextMarker', 'type': 'str', 'xml': {'name': 'NextMarker'}},
}
_xml_map = {
'name': 'EnumerationResults'
}
def __init__(self, **kwargs):
super(FilterBlobSegment, self).__init__(**kwargs)
self.service_endpoint = kwargs.get('service_endpoint', None)
self.where = kwargs.get('where', None)
self.blobs = kwargs.get('blobs', None)
self.next_marker = kwargs.get('next_marker', None)
class GeoReplication(Model):
"""Geo-Replication information for the Secondary Storage Service.
All required parameters must be populated in order to send to Azure.
:param status: Required. The status of the secondary location. Possible
values include: 'live', 'bootstrap', 'unavailable'
:type status: str or ~azure.storage.blob.models.GeoReplicationStatusType
:param last_sync_time: Required. A GMT date/time value, to the second. All
primary writes preceding this value are guaranteed to be available for
read operations at the secondary. Primary writes after this point in time
may or may not be available for reads.
:type last_sync_time: datetime
"""
_validation = {
'status': {'required': True},
'last_sync_time': {'required': True},
}
_attribute_map = {
'status': {'key': 'Status', 'type': 'str', 'xml': {'name': 'Status'}},
'last_sync_time': {'key': 'LastSyncTime', 'type': 'rfc-1123', 'xml': {'name': 'LastSyncTime'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(GeoReplication, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.last_sync_time = kwargs.get('last_sync_time', None)
class JsonTextConfiguration(Model):
"""json text configuration.
All required parameters must be populated in order to send to Azure.
:param record_separator: Required. record separator
:type record_separator: str
"""
_validation = {
'record_separator': {'required': True},
}
_attribute_map = {
'record_separator': {'key': 'RecordSeparator', 'type': 'str', 'xml': {'name': 'RecordSeparator'}},
}
_xml_map = {
'name': 'JsonTextConfiguration'
}
def __init__(self, **kwargs):
super(JsonTextConfiguration, self).__init__(**kwargs)
self.record_separator = kwargs.get('record_separator', None)
class KeyInfo(Model):
"""Key information.
All required parameters must be populated in order to send to Azure.
:param start: Required. The date-time the key is active in ISO 8601 UTC
time
:type start: str
:param expiry: Required. The date-time the key expires in ISO 8601 UTC
time
:type expiry: str
"""
_validation = {
'start': {'required': True},
'expiry': {'required': True},
}
_attribute_map = {
'start': {'key': 'Start', 'type': 'str', 'xml': {'name': 'Start'}},
'expiry': {'key': 'Expiry', 'type': 'str', 'xml': {'name': 'Expiry'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(KeyInfo, self).__init__(**kwargs)
self.start = kwargs.get('start', None)
self.expiry = kwargs.get('expiry', None)
class LeaseAccessConditions(Model):
"""Additional parameters for a set of operations.
:param lease_id: If specified, the operation only succeeds if the
resource's lease is active and matches this ID.
:type lease_id: str
"""
_attribute_map = {
'lease_id': {'key': '', 'type': 'str', 'xml': {'name': 'lease_id'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(LeaseAccessConditions, self).__init__(**kwargs)
self.lease_id = kwargs.get('lease_id', None)
class ListBlobsFlatSegmentResponse(Model):
"""An enumeration of blobs.
All required parameters must be populated in order to send to Azure.
:param service_endpoint: Required.
:type service_endpoint: str
:param container_name: Required.
:type container_name: str
:param prefix:
:type prefix: str
:param marker:
:type marker: str
:param max_results:
:type max_results: int
:param segment: Required.
:type segment: ~azure.storage.blob.models.BlobFlatListSegment
:param next_marker:
:type next_marker: str
"""
_validation = {
'service_endpoint': {'required': True},
'container_name': {'required': True},
'segment': {'required': True},
}
_attribute_map = {
'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'name': 'ServiceEndpoint', 'attr': True}},
'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'name': 'ContainerName', 'attr': True}},
'prefix': {'key': 'Prefix', 'type': 'str', 'xml': {'name': 'Prefix'}},
'marker': {'key': 'Marker', 'type': 'str', 'xml': {'name': 'Marker'}},
'max_results': {'key': 'MaxResults', 'type': 'int', 'xml': {'name': 'MaxResults'}},
'segment': {'key': 'Segment', 'type': 'BlobFlatListSegment', 'xml': {'name': 'Segment'}},
'next_marker': {'key': 'NextMarker', 'type': 'str', 'xml': {'name': 'NextMarker'}},
}
_xml_map = {
'name': 'EnumerationResults'
}
def __init__(self, **kwargs):
super(ListBlobsFlatSegmentResponse, self).__init__(**kwargs)
self.service_endpoint = kwargs.get('service_endpoint', None)
self.container_name = kwargs.get('container_name', None)
self.prefix = kwargs.get('prefix', None)
self.marker = kwargs.get('marker', None)
self.max_results = kwargs.get('max_results', None)
self.segment = kwargs.get('segment', None)
self.next_marker = kwargs.get('next_marker', None)
class ListBlobsHierarchySegmentResponse(Model):
"""An enumeration of blobs.
All required parameters must be populated in order to send to Azure.
:param service_endpoint: Required.
:type service_endpoint: str
:param container_name: Required.
:type container_name: str
:param prefix:
:type prefix: str
:param marker:
:type marker: str
:param max_results:
:type max_results: int
:param delimiter:
:type delimiter: str
:param segment: Required.
:type segment: ~azure.storage.blob.models.BlobHierarchyListSegment
:param next_marker:
:type next_marker: str
"""
_validation = {
'service_endpoint': {'required': True},
'container_name': {'required': True},
'segment': {'required': True},
}
_attribute_map = {
'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'name': 'ServiceEndpoint', 'attr': True}},
'container_name': {'key': 'ContainerName', 'type': 'str', 'xml': {'name': 'ContainerName', 'attr': True}},
'prefix': {'key': 'Prefix', 'type': 'str', 'xml': {'name': 'Prefix'}},
'marker': {'key': 'Marker', 'type': 'str', 'xml': {'name': 'Marker'}},
'max_results': {'key': 'MaxResults', 'type': 'int', 'xml': {'name': 'MaxResults'}},
'delimiter': {'key': 'Delimiter', 'type': 'str', 'xml': {'name': 'Delimiter'}},
'segment': {'key': 'Segment', 'type': 'BlobHierarchyListSegment', 'xml': {'name': 'Segment'}},
'next_marker': {'key': 'NextMarker', 'type': 'str', 'xml': {'name': 'NextMarker'}},
}
_xml_map = {
'name': 'EnumerationResults'
}
def __init__(self, **kwargs):
super(ListBlobsHierarchySegmentResponse, self).__init__(**kwargs)
self.service_endpoint = kwargs.get('service_endpoint', None)
self.container_name = kwargs.get('container_name', None)
self.prefix = kwargs.get('prefix', None)
self.marker = kwargs.get('marker', None)
self.max_results = kwargs.get('max_results', None)
self.delimiter = kwargs.get('delimiter', None)
self.segment = kwargs.get('segment', None)
self.next_marker = kwargs.get('next_marker', None)
class ListContainersSegmentResponse(Model):
"""An enumeration of containers.
All required parameters must be populated in order to send to Azure.
:param service_endpoint: Required.
:type service_endpoint: str
:param prefix:
:type prefix: str
:param marker:
:type marker: str
:param max_results:
:type max_results: int
:param container_items: Required.
:type container_items: list[~azure.storage.blob.models.ContainerItem]
:param next_marker:
:type next_marker: str
"""
_validation = {
'service_endpoint': {'required': True},
'container_items': {'required': True},
}
_attribute_map = {
'service_endpoint': {'key': 'ServiceEndpoint', 'type': 'str', 'xml': {'name': 'ServiceEndpoint', 'attr': True}},
'prefix': {'key': 'Prefix', 'type': 'str', 'xml': {'name': 'Prefix'}},
'marker': {'key': 'Marker', 'type': 'str', 'xml': {'name': 'Marker'}},
'max_results': {'key': 'MaxResults', 'type': 'int', 'xml': {'name': 'MaxResults'}},
'container_items': {'key': 'ContainerItems', 'type': '[ContainerItem]', 'xml': {'name': 'Containers', 'itemsName': 'Containers', 'wrapped': True}},
'next_marker': {'key': 'NextMarker', 'type': 'str', 'xml': {'name': 'NextMarker'}},
}
_xml_map = {
'name': 'EnumerationResults'
}
def __init__(self, **kwargs):
super(ListContainersSegmentResponse, self).__init__(**kwargs)
self.service_endpoint = kwargs.get('service_endpoint', None)
self.prefix = kwargs.get('prefix', None)
self.marker = kwargs.get('marker', None)
self.max_results = kwargs.get('max_results', None)
self.container_items = kwargs.get('container_items', None)
self.next_marker = kwargs.get('next_marker', None)
class Logging(Model):
"""Azure Analytics Logging settings.
All required parameters must be populated in order to send to Azure.
:param version: Required. The version of Storage Analytics to configure.
:type version: str
:param delete: Required. Indicates whether all delete requests should be
logged.
:type delete: bool
:param read: Required. Indicates whether all read requests should be
logged.
:type read: bool
:param write: Required. Indicates whether all write requests should be
logged.
:type write: bool
:param retention_policy: Required.
:type retention_policy: ~azure.storage.blob.models.RetentionPolicy
"""
_validation = {
'version': {'required': True},
'delete': {'required': True},
'read': {'required': True},
'write': {'required': True},
'retention_policy': {'required': True},
}
_attribute_map = {
'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}},
'delete': {'key': 'Delete', 'type': 'bool', 'xml': {'name': 'Delete'}},
'read': {'key': 'Read', 'type': 'bool', 'xml': {'name': 'Read'}},
'write': {'key': 'Write', 'type': 'bool', 'xml': {'name': 'Write'}},
'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy', 'xml': {'name': 'RetentionPolicy'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(Logging, self).__init__(**kwargs)
self.version = kwargs.get('version', None)
self.delete = kwargs.get('delete', None)
self.read = kwargs.get('read', None)
self.write = kwargs.get('write', None)
self.retention_policy = kwargs.get('retention_policy', None)
class Metrics(Model):
"""a summary of request statistics grouped by API in hour or minute aggregates
for blobs.
All required parameters must be populated in order to send to Azure.
:param version: The version of Storage Analytics to configure.
:type version: str
:param enabled: Required. Indicates whether metrics are enabled for the
Blob service.
:type enabled: bool
:param include_apis: Indicates whether metrics should generate summary
statistics for called API operations.
:type include_apis: bool
:param retention_policy:
:type retention_policy: ~azure.storage.blob.models.RetentionPolicy
"""
_validation = {
'enabled': {'required': True},
}
_attribute_map = {
'version': {'key': 'Version', 'type': 'str', 'xml': {'name': 'Version'}},
'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}},
'include_apis': {'key': 'IncludeAPIs', 'type': 'bool', 'xml': {'name': 'IncludeAPIs'}},
'retention_policy': {'key': 'RetentionPolicy', 'type': 'RetentionPolicy', 'xml': {'name': 'RetentionPolicy'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(Metrics, self).__init__(**kwargs)
self.version = kwargs.get('version', None)
self.enabled = kwargs.get('enabled', None)
self.include_apis = kwargs.get('include_apis', None)
self.retention_policy = kwargs.get('retention_policy', None)
class ModifiedAccessConditions(Model):
"""Additional parameters for a set of operations.
:param if_modified_since: Specify this header value to operate only on a
blob if it has been modified since the specified date/time.
:type if_modified_since: datetime
:param if_unmodified_since: Specify this header value to operate only on a
blob if it has not been modified since the specified date/time.
:type if_unmodified_since: datetime
:param if_match: Specify an ETag value to operate only on blobs with a
matching value.
:type if_match: str
:param if_none_match: Specify an ETag value to operate only on blobs
without a matching value.
:type if_none_match: str
:param if_tags: Specify a SQL where clause on blob tags to operate only on
blobs with a matching value.
:type if_tags: str
"""
_attribute_map = {
'if_modified_since': {'key': '', 'type': 'rfc-1123', 'xml': {'name': 'if_modified_since'}},
'if_unmodified_since': {'key': '', 'type': 'rfc-1123', 'xml': {'name': 'if_unmodified_since'}},
'if_match': {'key': '', 'type': 'str', 'xml': {'name': 'if_match'}},
'if_none_match': {'key': '', 'type': 'str', 'xml': {'name': 'if_none_match'}},
'if_tags': {'key': '', 'type': 'str', 'xml': {'name': 'if_tags'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(ModifiedAccessConditions, self).__init__(**kwargs)
self.if_modified_since = kwargs.get('if_modified_since', None)
self.if_unmodified_since = kwargs.get('if_unmodified_since', None)
self.if_match = kwargs.get('if_match', None)
self.if_none_match = kwargs.get('if_none_match', None)
self.if_tags = kwargs.get('if_tags', None)
class PageList(Model):
"""the list of pages.
:param page_range:
:type page_range: list[~azure.storage.blob.models.PageRange]
:param clear_range:
:type clear_range: list[~azure.storage.blob.models.ClearRange]
"""
_attribute_map = {
'page_range': {'key': 'PageRange', 'type': '[PageRange]', 'xml': {'name': 'PageRange', 'itemsName': 'PageRange'}},
'clear_range': {'key': 'ClearRange', 'type': '[ClearRange]', 'xml': {'name': 'ClearRange', 'itemsName': 'ClearRange'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(PageList, self).__init__(**kwargs)
self.page_range = kwargs.get('page_range', None)
self.clear_range = kwargs.get('clear_range', None)
class PageRange(Model):
"""PageRange.
All required parameters must be populated in order to send to Azure.
:param start: Required.
:type start: long
:param end: Required.
:type end: long
"""
_validation = {
'start': {'required': True},
'end': {'required': True},
}
_attribute_map = {
'start': {'key': 'Start', 'type': 'long', 'xml': {'name': 'Start'}},
'end': {'key': 'End', 'type': 'long', 'xml': {'name': 'End'}},
}
_xml_map = {
'name': 'PageRange'
}
def __init__(self, **kwargs):
super(PageRange, self).__init__(**kwargs)
self.start = kwargs.get('start', None)
self.end = kwargs.get('end', None)
class QueryFormat(Model):
"""QueryFormat.
:param type: Possible values include: 'delimited', 'json', 'arrow'
:type type: str or ~azure.storage.blob.models.QueryFormatType
:param delimited_text_configuration:
:type delimited_text_configuration:
~azure.storage.blob.models.DelimitedTextConfiguration
:param json_text_configuration:
:type json_text_configuration:
~azure.storage.blob.models.JsonTextConfiguration
:param arrow_configuration:
:type arrow_configuration: ~azure.storage.blob.models.ArrowConfiguration
"""
_attribute_map = {
'type': {'key': 'Type', 'type': 'QueryFormatType', 'xml': {'name': 'Type'}},
'delimited_text_configuration': {'key': 'DelimitedTextConfiguration', 'type': 'DelimitedTextConfiguration', 'xml': {'name': 'DelimitedTextConfiguration'}},
'json_text_configuration': {'key': 'JsonTextConfiguration', 'type': 'JsonTextConfiguration', 'xml': {'name': 'JsonTextConfiguration'}},
'arrow_configuration': {'key': 'ArrowConfiguration', 'type': 'ArrowConfiguration', 'xml': {'name': 'ArrowConfiguration'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(QueryFormat, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.delimited_text_configuration = kwargs.get('delimited_text_configuration', None)
self.json_text_configuration = kwargs.get('json_text_configuration', None)
self.arrow_configuration = kwargs.get('arrow_configuration', None)
class QueryRequest(Model):
"""the quick query body.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar query_type: Required. the query type. Default value: "SQL" .
:vartype query_type: str
:param expression: Required. a query statement
:type expression: str
:param input_serialization:
:type input_serialization: ~azure.storage.blob.models.QuerySerialization
:param output_serialization:
:type output_serialization: ~azure.storage.blob.models.QuerySerialization
"""
_validation = {
'query_type': {'required': True, 'constant': True},
'expression': {'required': True},
}
_attribute_map = {
'query_type': {'key': 'QueryType', 'type': 'str', 'xml': {'name': 'QueryType'}},
'expression': {'key': 'Expression', 'type': 'str', 'xml': {'name': 'Expression'}},
'input_serialization': {'key': 'InputSerialization', 'type': 'QuerySerialization', 'xml': {'name': 'InputSerialization'}},
'output_serialization': {'key': 'OutputSerialization', 'type': 'QuerySerialization', 'xml': {'name': 'OutputSerialization'}},
}
_xml_map = {
'name': 'QueryRequest'
}
query_type = "SQL"
def __init__(self, **kwargs):
super(QueryRequest, self).__init__(**kwargs)
self.expression = kwargs.get('expression', None)
self.input_serialization = kwargs.get('input_serialization', None)
self.output_serialization = kwargs.get('output_serialization', None)
class QuerySerialization(Model):
"""QuerySerialization.
All required parameters must be populated in order to send to Azure.
:param format: Required.
:type format: ~azure.storage.blob.models.QueryFormat
"""
_validation = {
'format': {'required': True},
}
_attribute_map = {
'format': {'key': 'Format', 'type': 'QueryFormat', 'xml': {'name': 'Format'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(QuerySerialization, self).__init__(**kwargs)
self.format = kwargs.get('format', None)
class RetentionPolicy(Model):
"""the retention policy which determines how long the associated data should
persist.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Indicates whether a retention policy is enabled
for the storage service
:type enabled: bool
:param days: Indicates the number of days that metrics or logging or
soft-deleted data should be retained. All data older than this value will
be deleted
:type days: int
"""
_validation = {
'enabled': {'required': True},
'days': {'minimum': 1},
}
_attribute_map = {
'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}},
'days': {'key': 'Days', 'type': 'int', 'xml': {'name': 'Days'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(RetentionPolicy, self).__init__(**kwargs)
self.enabled = kwargs.get('enabled', None)
self.days = kwargs.get('days', None)
class SequenceNumberAccessConditions(Model):
"""Additional parameters for a set of operations, such as:
PageBlob_upload_pages, PageBlob_clear_pages,
PageBlob_upload_pages_from_url.
:param if_sequence_number_less_than_or_equal_to: Specify this header value
to operate only on a blob if it has a sequence number less than or equal
to the specified.
:type if_sequence_number_less_than_or_equal_to: long
:param if_sequence_number_less_than: Specify this header value to operate
only on a blob if it has a sequence number less than the specified.
:type if_sequence_number_less_than: long
:param if_sequence_number_equal_to: Specify this header value to operate
only on a blob if it has the specified sequence number.
:type if_sequence_number_equal_to: long
"""
_attribute_map = {
'if_sequence_number_less_than_or_equal_to': {'key': '', 'type': 'long', 'xml': {'name': 'if_sequence_number_less_than_or_equal_to'}},
'if_sequence_number_less_than': {'key': '', 'type': 'long', 'xml': {'name': 'if_sequence_number_less_than'}},
'if_sequence_number_equal_to': {'key': '', 'type': 'long', 'xml': {'name': 'if_sequence_number_equal_to'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(SequenceNumberAccessConditions, self).__init__(**kwargs)
self.if_sequence_number_less_than_or_equal_to = kwargs.get('if_sequence_number_less_than_or_equal_to', None)
self.if_sequence_number_less_than = kwargs.get('if_sequence_number_less_than', None)
self.if_sequence_number_equal_to = kwargs.get('if_sequence_number_equal_to', None)
class SignedIdentifier(Model):
"""signed identifier.
All required parameters must be populated in order to send to Azure.
:param id: Required. a unique id
:type id: str
:param access_policy:
:type access_policy: ~azure.storage.blob.models.AccessPolicy
"""
_validation = {
'id': {'required': True},
}
_attribute_map = {
'id': {'key': 'Id', 'type': 'str', 'xml': {'name': 'Id'}},
'access_policy': {'key': 'AccessPolicy', 'type': 'AccessPolicy', 'xml': {'name': 'AccessPolicy'}},
}
_xml_map = {
'name': 'SignedIdentifier'
}
def __init__(self, **kwargs):
super(SignedIdentifier, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.access_policy = kwargs.get('access_policy', None)
class SourceModifiedAccessConditions(Model):
"""Additional parameters for a set of operations.
:param source_if_modified_since: Specify this header value to operate only
on a blob if it has been modified since the specified date/time.
:type source_if_modified_since: datetime
:param source_if_unmodified_since: Specify this header value to operate
only on a blob if it has not been modified since the specified date/time.
:type source_if_unmodified_since: datetime
:param source_if_match: Specify an ETag value to operate only on blobs
with a matching value.
:type source_if_match: str
:param source_if_none_match: Specify an ETag value to operate only on
blobs without a matching value.
:type source_if_none_match: str
:param source_if_tags: Specify a SQL where clause on blob tags to operate
only on blobs with a matching value.
:type source_if_tags: str
"""
_attribute_map = {
'source_if_modified_since': {'key': '', 'type': 'rfc-1123', 'xml': {'name': 'source_if_modified_since'}},
'source_if_unmodified_since': {'key': '', 'type': 'rfc-1123', 'xml': {'name': 'source_if_unmodified_since'}},
'source_if_match': {'key': '', 'type': 'str', 'xml': {'name': 'source_if_match'}},
'source_if_none_match': {'key': '', 'type': 'str', 'xml': {'name': 'source_if_none_match'}},
'source_if_tags': {'key': '', 'type': 'str', 'xml': {'name': 'source_if_tags'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(SourceModifiedAccessConditions, self).__init__(**kwargs)
self.source_if_modified_since = kwargs.get('source_if_modified_since', None)
self.source_if_unmodified_since = kwargs.get('source_if_unmodified_since', None)
self.source_if_match = kwargs.get('source_if_match', None)
self.source_if_none_match = kwargs.get('source_if_none_match', None)
self.source_if_tags = kwargs.get('source_if_tags', None)
class StaticWebsite(Model):
"""The properties that enable an account to host a static website.
All required parameters must be populated in order to send to Azure.
:param enabled: Required. Indicates whether this account is hosting a
static website
:type enabled: bool
:param index_document: The default name of the index page under each
directory
:type index_document: str
:param error_document404_path: The absolute path of the custom 404 page
:type error_document404_path: str
:param default_index_document_path: Absolute path of the default index
page
:type default_index_document_path: str
"""
_validation = {
'enabled': {'required': True},
}
_attribute_map = {
'enabled': {'key': 'Enabled', 'type': 'bool', 'xml': {'name': 'Enabled'}},
'index_document': {'key': 'IndexDocument', 'type': 'str', 'xml': {'name': 'IndexDocument'}},
'error_document404_path': {'key': 'ErrorDocument404Path', 'type': 'str', 'xml': {'name': 'ErrorDocument404Path'}},
'default_index_document_path': {'key': 'DefaultIndexDocumentPath', 'type': 'str', 'xml': {'name': 'DefaultIndexDocumentPath'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(StaticWebsite, self).__init__(**kwargs)
self.enabled = kwargs.get('enabled', None)
self.index_document = kwargs.get('index_document', None)
self.error_document404_path = kwargs.get('error_document404_path', None)
self.default_index_document_path = kwargs.get('default_index_document_path', None)
class StorageError(Model):
"""StorageError.
:param message:
:type message: str
"""
_attribute_map = {
'message': {'key': 'Message', 'type': 'str', 'xml': {'name': 'Message'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(StorageError, self).__init__(**kwargs)
self.message = kwargs.get('message', None)
class StorageErrorException(HttpResponseError):
"""Server responsed with exception of type: 'StorageError'.
:param deserialize: A deserializer
:param response: Server response to be deserialized.
"""
def __init__(self, response, deserialize, *args):
model_name = 'StorageError'
self.error = deserialize(model_name, response)
if self.error is None:
self.error = deserialize.dependencies[model_name]()
super(StorageErrorException, self).__init__(response=response)
class StorageServiceProperties(Model):
"""Storage Service Properties.
:param logging:
:type logging: ~azure.storage.blob.models.Logging
:param hour_metrics:
:type hour_metrics: ~azure.storage.blob.models.Metrics
:param minute_metrics:
:type minute_metrics: ~azure.storage.blob.models.Metrics
:param cors: The set of CORS rules.
:type cors: list[~azure.storage.blob.models.CorsRule]
:param default_service_version: The default version to use for requests to
the Blob service if an incoming request's version is not specified.
Possible values include version 2008-10-27 and all more recent versions
:type default_service_version: str
:param delete_retention_policy:
:type delete_retention_policy: ~azure.storage.blob.models.RetentionPolicy
:param static_website:
:type static_website: ~azure.storage.blob.models.StaticWebsite
"""
_attribute_map = {
'logging': {'key': 'Logging', 'type': 'Logging', 'xml': {'name': 'Logging'}},
'hour_metrics': {'key': 'HourMetrics', 'type': 'Metrics', 'xml': {'name': 'HourMetrics'}},
'minute_metrics': {'key': 'MinuteMetrics', 'type': 'Metrics', 'xml': {'name': 'MinuteMetrics'}},
'cors': {'key': 'Cors', 'type': '[CorsRule]', 'xml': {'name': 'Cors', 'itemsName': 'CorsRule', 'wrapped': True}},
'default_service_version': {'key': 'DefaultServiceVersion', 'type': 'str', 'xml': {'name': 'DefaultServiceVersion'}},
'delete_retention_policy': {'key': 'DeleteRetentionPolicy', 'type': 'RetentionPolicy', 'xml': {'name': 'DeleteRetentionPolicy'}},
'static_website': {'key': 'StaticWebsite', 'type': 'StaticWebsite', 'xml': {'name': 'StaticWebsite'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(StorageServiceProperties, self).__init__(**kwargs)
self.logging = kwargs.get('logging', None)
self.hour_metrics = kwargs.get('hour_metrics', None)
self.minute_metrics = kwargs.get('minute_metrics', None)
self.cors = kwargs.get('cors', None)
self.default_service_version = kwargs.get('default_service_version', None)
self.delete_retention_policy = kwargs.get('delete_retention_policy', None)
self.static_website = kwargs.get('static_website', None)
class StorageServiceStats(Model):
"""Stats for the storage service.
:param geo_replication:
:type geo_replication: ~azure.storage.blob.models.GeoReplication
"""
_attribute_map = {
'geo_replication': {'key': 'GeoReplication', 'type': 'GeoReplication', 'xml': {'name': 'GeoReplication'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(StorageServiceStats, self).__init__(**kwargs)
self.geo_replication = kwargs.get('geo_replication', None)
class UserDelegationKey(Model):
"""A user delegation key.
All required parameters must be populated in order to send to Azure.
:param signed_oid: Required. The Azure Active Directory object ID in GUID
format.
:type signed_oid: str
:param signed_tid: Required. The Azure Active Directory tenant ID in GUID
format
:type signed_tid: str
:param signed_start: Required. The date-time the key is active
:type signed_start: datetime
:param signed_expiry: Required. The date-time the key expires
:type signed_expiry: datetime
:param signed_service: Required. Abbreviation of the Azure Storage service
that accepts the key
:type signed_service: str
:param signed_version: Required. The service version that created the key
:type signed_version: str
:param value: Required. The key as a base64 string
:type value: str
"""
_validation = {
'signed_oid': {'required': True},
'signed_tid': {'required': True},
'signed_start': {'required': True},
'signed_expiry': {'required': True},
'signed_service': {'required': True},
'signed_version': {'required': True},
'value': {'required': True},
}
_attribute_map = {
'signed_oid': {'key': 'SignedOid', 'type': 'str', 'xml': {'name': 'SignedOid'}},
'signed_tid': {'key': 'SignedTid', 'type': 'str', 'xml': {'name': 'SignedTid'}},
'signed_start': {'key': 'SignedStart', 'type': 'iso-8601', 'xml': {'name': 'SignedStart'}},
'signed_expiry': {'key': 'SignedExpiry', 'type': 'iso-8601', 'xml': {'name': 'SignedExpiry'}},
'signed_service': {'key': 'SignedService', 'type': 'str', 'xml': {'name': 'SignedService'}},
'signed_version': {'key': 'SignedVersion', 'type': 'str', 'xml': {'name': 'SignedVersion'}},
'value': {'key': 'Value', 'type': 'str', 'xml': {'name': 'Value'}},
}
_xml_map = {
}
def __init__(self, **kwargs):
super(UserDelegationKey, self).__init__(**kwargs)
self.signed_oid = kwargs.get('signed_oid', None)
self.signed_tid = kwargs.get('signed_tid', None)
self.signed_start = kwargs.get('signed_start', None)
self.signed_expiry = kwargs.get('signed_expiry', None)
self.signed_service = kwargs.get('signed_service', None)
self.signed_version = kwargs.get('signed_version', None)
self.value = kwargs.get('value', None)
| 39.455948
| 163
| 0.639737
|
2bcf5c933a4a056b4316f42605fc2389eafcaf9e
| 76,291
|
py
|
Python
|
unittests/linuxliketests.py
|
xggrnx/meson
|
af8b55d49b64e72dbefbd40d613b93f56d17b855
|
[
"Apache-2.0"
] | null | null | null |
unittests/linuxliketests.py
|
xggrnx/meson
|
af8b55d49b64e72dbefbd40d613b93f56d17b855
|
[
"Apache-2.0"
] | null | null | null |
unittests/linuxliketests.py
|
xggrnx/meson
|
af8b55d49b64e72dbefbd40d613b93f56d17b855
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016-2021 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import stat
import subprocess
import re
import tempfile
import textwrap
import os
import shutil
import hashlib
from unittest import mock, skipUnless, SkipTest
from glob import glob
from pathlib import Path
import typing as T
import mesonbuild.mlog
import mesonbuild.depfile
import mesonbuild.dependencies.base
import mesonbuild.dependencies.factory
import mesonbuild.envconfig
import mesonbuild.environment
import mesonbuild.coredata
import mesonbuild.modules.gnome
from mesonbuild.mesonlib import (
MachineChoice, is_windows, is_osx, is_cygwin, is_openbsd, is_haiku,
is_sunos, windows_proof_rmtree, version_compare, is_linux,
OptionKey, EnvironmentException
)
from mesonbuild.compilers import (
detect_c_compiler, detect_cpp_compiler, compiler_from_language,
AppleClangCCompiler, AppleClangCPPCompiler, AppleClangObjCCompiler,
AppleClangObjCPPCompiler
)
from mesonbuild.dependencies import PkgConfigDependency
import mesonbuild.modules.pkgconfig
from run_tests import (
get_fake_env
)
from .baseplatformtests import BasePlatformTests
from .helpers import *
def _clang_at_least(compiler: 'Compiler', minver: str, apple_minver: T.Optional[str]) -> bool:
"""
check that Clang compiler is at least a specified version, whether AppleClang or regular Clang
Parameters
----------
compiler:
Meson compiler object
minver: str
Clang minimum version
apple_minver: str
AppleCLang minimum version
Returns
-------
at_least: bool
Clang is at least the specified version
"""
if isinstance(compiler, (AppleClangCCompiler, AppleClangCPPCompiler)):
if apple_minver is None:
return False
return version_compare(compiler.version, apple_minver)
return version_compare(compiler.version, minver)
@skipUnless(not is_windows(), "requires something Unix-like")
class LinuxlikeTests(BasePlatformTests):
'''
Tests that should run on Linux, macOS, and *BSD
'''
def test_basic_soname(self):
'''
Test that the soname is set correctly for shared libraries. This can't
be an ordinary test case because we need to run `readelf` and actually
check the soname.
https://github.com/mesonbuild/meson/issues/785
'''
testdir = os.path.join(self.common_test_dir, '4 shared')
self.init(testdir)
self.build()
lib1 = os.path.join(self.builddir, 'libmylib.so')
soname = get_soname(lib1)
self.assertEqual(soname, 'libmylib.so')
def test_custom_soname(self):
'''
Test that the soname is set correctly for shared libraries when
a custom prefix and/or suffix is used. This can't be an ordinary test
case because we need to run `readelf` and actually check the soname.
https://github.com/mesonbuild/meson/issues/785
'''
testdir = os.path.join(self.common_test_dir, '24 library versions')
self.init(testdir)
self.build()
lib1 = os.path.join(self.builddir, 'prefixsomelib.suffix')
soname = get_soname(lib1)
self.assertEqual(soname, 'prefixsomelib.suffix')
def test_pic(self):
'''
Test that -fPIC is correctly added to static libraries when b_staticpic
is true and not when it is false. This can't be an ordinary test case
because we need to inspect the compiler database.
'''
if is_windows() or is_cygwin() or is_osx():
raise SkipTest('PIC not relevant')
testdir = os.path.join(self.common_test_dir, '3 static')
self.init(testdir)
compdb = self.get_compdb()
self.assertIn('-fPIC', compdb[0]['command'])
self.setconf('-Db_staticpic=false')
# Regenerate build
self.build()
compdb = self.get_compdb()
self.assertNotIn('-fPIC', compdb[0]['command'])
@mock.patch.dict(os.environ)
def test_pkgconfig_gen(self):
'''
Test that generated pkg-config files can be found and have the correct
version and link args. This can't be an ordinary test case because we
need to run pkg-config outside of a Meson build file.
https://github.com/mesonbuild/meson/issues/889
'''
testdir = os.path.join(self.common_test_dir, '44 pkgconfig-gen')
self.init(testdir)
env = get_fake_env(testdir, self.builddir, self.prefix)
kwargs = {'required': True, 'silent': True}
os.environ['PKG_CONFIG_LIBDIR'] = self.privatedir
foo_dep = PkgConfigDependency('libfoo', env, kwargs)
self.assertTrue(foo_dep.found())
self.assertEqual(foo_dep.get_version(), '1.0')
self.assertIn('-lfoo', foo_dep.get_link_args())
self.assertEqual(foo_dep.get_pkgconfig_variable('foo', {}), 'bar')
self.assertPathEqual(foo_dep.get_pkgconfig_variable('datadir', {}), '/usr/data')
libhello_nolib = PkgConfigDependency('libhello_nolib', env, kwargs)
self.assertTrue(libhello_nolib.found())
self.assertEqual(libhello_nolib.get_link_args(), [])
self.assertEqual(libhello_nolib.get_compile_args(), [])
self.assertEqual(libhello_nolib.get_pkgconfig_variable('foo', {}), 'bar')
self.assertEqual(libhello_nolib.get_pkgconfig_variable('prefix', {}), self.prefix)
self.assertEqual(libhello_nolib.get_pkgconfig_variable('escaped_var', {}), r'hello\ world')
self.assertEqual(libhello_nolib.get_pkgconfig_variable('unescaped_var', {}), 'hello world')
cc = detect_c_compiler(env, MachineChoice.HOST)
if cc.get_id() in {'gcc', 'clang'}:
for name in {'ct', 'ct0'}:
ct_dep = PkgConfigDependency(name, env, kwargs)
self.assertTrue(ct_dep.found())
self.assertIn('-lct', ct_dep.get_link_args())
def test_pkgconfig_gen_deps(self):
'''
Test that generated pkg-config files correctly handle dependencies
'''
testdir = os.path.join(self.common_test_dir, '44 pkgconfig-gen')
self.init(testdir)
privatedir1 = self.privatedir
self.new_builddir()
testdir = os.path.join(self.common_test_dir, '44 pkgconfig-gen', 'dependencies')
self.init(testdir, override_envvars={'PKG_CONFIG_LIBDIR': privatedir1})
privatedir2 = self.privatedir
env = {
'PKG_CONFIG_LIBDIR': os.pathsep.join([privatedir1, privatedir2]),
'PKG_CONFIG_SYSTEM_LIBRARY_PATH': '/usr/lib',
}
self._run(['pkg-config', 'dependency-test', '--validate'], override_envvars=env)
# pkg-config strips some duplicated flags so we have to parse the
# generated file ourself.
expected = {
'Requires': 'libexposed',
'Requires.private': 'libfoo >= 1.0',
'Libs': '-L${libdir} -llibmain -pthread -lcustom',
'Libs.private': '-lcustom2 -L${libdir} -llibinternal',
'Cflags': '-I${includedir} -pthread -DCUSTOM',
}
if is_osx() or is_haiku():
expected['Cflags'] = expected['Cflags'].replace('-pthread ', '')
with open(os.path.join(privatedir2, 'dependency-test.pc'), encoding='utf-8') as f:
matched_lines = 0
for line in f:
parts = line.split(':', 1)
if parts[0] in expected:
key = parts[0]
val = parts[1].strip()
expected_val = expected[key]
self.assertEqual(expected_val, val)
matched_lines += 1
self.assertEqual(len(expected), matched_lines)
cmd = ['pkg-config', 'requires-test']
out = self._run(cmd + ['--print-requires'], override_envvars=env).strip().split('\n')
if not is_openbsd():
self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello']))
else:
self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo>=1.0', 'libhello']))
cmd = ['pkg-config', 'requires-private-test']
out = self._run(cmd + ['--print-requires-private'], override_envvars=env).strip().split('\n')
if not is_openbsd():
self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello']))
else:
self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo>=1.0', 'libhello']))
cmd = ['pkg-config', 'pub-lib-order']
out = self._run(cmd + ['--libs'], override_envvars=env).strip().split()
self.assertEqual(out, ['-llibmain2', '-llibinternal'])
# See common/44 pkgconfig-gen/meson.build for description of the case this test
with open(os.path.join(privatedir1, 'simple2.pc'), encoding='utf-8') as f:
content = f.read()
self.assertIn('Libs: -L${libdir} -lsimple2 -lsimple1', content)
self.assertIn('Libs.private: -lz', content)
with open(os.path.join(privatedir1, 'simple3.pc'), encoding='utf-8') as f:
content = f.read()
self.assertEqual(1, content.count('-lsimple3'))
with open(os.path.join(privatedir1, 'simple5.pc'), encoding='utf-8') as f:
content = f.read()
self.assertNotIn('-lstat2', content)
@mock.patch.dict(os.environ)
def test_pkgconfig_uninstalled(self):
testdir = os.path.join(self.common_test_dir, '44 pkgconfig-gen')
self.init(testdir)
self.build()
os.environ['PKG_CONFIG_LIBDIR'] = os.path.join(self.builddir, 'meson-uninstalled')
if is_cygwin():
os.environ['PATH'] += os.pathsep + self.builddir
self.new_builddir()
testdir = os.path.join(self.common_test_dir, '44 pkgconfig-gen', 'dependencies')
self.init(testdir)
self.build()
self.run_tests()
def test_pkg_unfound(self):
testdir = os.path.join(self.unit_test_dir, '23 unfound pkgconfig')
self.init(testdir)
with open(os.path.join(self.privatedir, 'somename.pc'), encoding='utf-8') as f:
pcfile = f.read()
self.assertFalse('blub_blob_blib' in pcfile)
def test_symlink_builddir(self) -> None:
'''
Test using a symlink as either the builddir for "setup" or
the argument for "-C".
'''
testdir = os.path.join(self.common_test_dir, '1 trivial')
symdir = f'{self.builddir}-symlink'
os.symlink(self.builddir, symdir)
self.addCleanup(os.unlink, symdir)
self.change_builddir(symdir)
self.init(testdir)
self.build()
self._run(self.mtest_command)
def test_vala_c_warnings(self):
'''
Test that no warnings are emitted for C code generated by Vala. This
can't be an ordinary test case because we need to inspect the compiler
database.
https://github.com/mesonbuild/meson/issues/864
'''
if not shutil.which('valac'):
raise SkipTest('valac not installed.')
testdir = os.path.join(self.vala_test_dir, '5 target glib')
self.init(testdir)
compdb = self.get_compdb()
vala_command = None
c_command = None
for each in compdb:
if each['file'].endswith('GLib.Thread.c'):
vala_command = each['command']
elif each['file'].endswith('GLib.Thread.vala'):
continue
elif each['file'].endswith('retcode.c'):
c_command = each['command']
else:
m = 'Unknown file {!r} in vala_c_warnings test'.format(each['file'])
raise AssertionError(m)
self.assertIsNotNone(vala_command)
self.assertIsNotNone(c_command)
# -w suppresses all warnings, should be there in Vala but not in C
self.assertIn(" -w ", vala_command)
self.assertNotIn(" -w ", c_command)
# -Wall enables all warnings, should be there in C but not in Vala
self.assertNotIn(" -Wall ", vala_command)
self.assertIn(" -Wall ", c_command)
# -Werror converts warnings to errors, should always be there since it's
# injected by an unrelated piece of code and the project has werror=true
self.assertIn(" -Werror ", vala_command)
self.assertIn(" -Werror ", c_command)
@skipIfNoPkgconfig
def test_qtdependency_pkgconfig_detection(self):
'''
Test that qt4 and qt5 detection with pkgconfig works.
'''
# Verify Qt4 or Qt5 can be found with pkg-config
qt4 = subprocess.call(['pkg-config', '--exists', 'QtCore'])
qt5 = subprocess.call(['pkg-config', '--exists', 'Qt5Core'])
testdir = os.path.join(self.framework_test_dir, '4 qt')
self.init(testdir, extra_args=['-Dmethod=pkg-config'])
# Confirm that the dependency was found with pkg-config
mesonlog = self.get_meson_log()
if qt4 == 0:
self.assertRegex('\n'.join(mesonlog),
r'Run-time dependency qt4 \(modules: Core\) found: YES 4.* \(pkg-config\)')
if qt5 == 0:
self.assertRegex('\n'.join(mesonlog),
r'Run-time dependency qt5 \(modules: Core\) found: YES 5.* \(pkg-config\)')
@skip_if_not_base_option('b_sanitize')
def test_generate_gir_with_address_sanitizer(self):
if is_cygwin():
raise SkipTest('asan not available on Cygwin')
if is_openbsd():
raise SkipTest('-fsanitize=address is not supported on OpenBSD')
testdir = os.path.join(self.framework_test_dir, '7 gnome')
self.init(testdir, extra_args=['-Db_sanitize=address', '-Db_lundef=false'])
self.build()
def test_qt5dependency_qmake_detection(self):
'''
Test that qt5 detection with qmake works. This can't be an ordinary
test case because it involves setting the environment.
'''
# Verify that qmake is for Qt5
if not shutil.which('qmake-qt5'):
if not shutil.which('qmake'):
raise SkipTest('QMake not found')
output = subprocess.getoutput('qmake --version')
if 'Qt version 5' not in output:
raise SkipTest('Qmake found, but it is not for Qt 5.')
# Disable pkg-config codepath and force searching with qmake/qmake-qt5
testdir = os.path.join(self.framework_test_dir, '4 qt')
self.init(testdir, extra_args=['-Dmethod=qmake'])
# Confirm that the dependency was found with qmake
mesonlog = self.get_meson_log()
self.assertRegex('\n'.join(mesonlog),
r'Run-time dependency qt5 \(modules: Core\) found: YES .* \(qmake\)\n')
def test_qt6dependency_qmake_detection(self):
'''
Test that qt6 detection with qmake works. This can't be an ordinary
test case because it involves setting the environment.
'''
# Verify that qmake is for Qt5
if not shutil.which('qmake-qt6'):
if not shutil.which('qmake'):
raise SkipTest('QMake not found')
output = subprocess.getoutput('qmake --version')
if 'Qt version 6' not in output:
raise SkipTest('Qmake found, but it is not for Qt 6.')
# Disable pkg-config codepath and force searching with qmake/qmake-qt6
testdir = os.path.join(self.framework_test_dir, '4 qt')
self.init(testdir, extra_args=['-Dmethod=qmake'])
# Confirm that the dependency was found with qmake
mesonlog = self.get_meson_log()
self.assertRegex('\n'.join(mesonlog),
r'Run-time dependency qt6 \(modules: Core\) found: YES .* \(qmake\)\n')
def glob_sofiles_without_privdir(self, g):
files = glob(g)
return [f for f in files if not f.endswith('.p')]
def _test_soname_impl(self, libpath, install):
if is_cygwin() or is_osx():
raise SkipTest('Test only applicable to ELF and linuxlike sonames')
testdir = os.path.join(self.unit_test_dir, '1 soname')
self.init(testdir)
self.build()
if install:
self.install()
# File without aliases set.
nover = os.path.join(libpath, 'libnover.so')
self.assertPathExists(nover)
self.assertFalse(os.path.islink(nover))
self.assertEqual(get_soname(nover), 'libnover.so')
self.assertEqual(len(self.glob_sofiles_without_privdir(nover[:-3] + '*')), 1)
# File with version set
verset = os.path.join(libpath, 'libverset.so')
self.assertPathExists(verset + '.4.5.6')
self.assertEqual(os.readlink(verset), 'libverset.so.4')
self.assertEqual(get_soname(verset), 'libverset.so.4')
self.assertEqual(len(self.glob_sofiles_without_privdir(verset[:-3] + '*')), 3)
# File with soversion set
soverset = os.path.join(libpath, 'libsoverset.so')
self.assertPathExists(soverset + '.1.2.3')
self.assertEqual(os.readlink(soverset), 'libsoverset.so.1.2.3')
self.assertEqual(get_soname(soverset), 'libsoverset.so.1.2.3')
self.assertEqual(len(self.glob_sofiles_without_privdir(soverset[:-3] + '*')), 2)
# File with version and soversion set to same values
settosame = os.path.join(libpath, 'libsettosame.so')
self.assertPathExists(settosame + '.7.8.9')
self.assertEqual(os.readlink(settosame), 'libsettosame.so.7.8.9')
self.assertEqual(get_soname(settosame), 'libsettosame.so.7.8.9')
self.assertEqual(len(self.glob_sofiles_without_privdir(settosame[:-3] + '*')), 2)
# File with version and soversion set to different values
bothset = os.path.join(libpath, 'libbothset.so')
self.assertPathExists(bothset + '.1.2.3')
self.assertEqual(os.readlink(bothset), 'libbothset.so.1.2.3')
self.assertEqual(os.readlink(bothset + '.1.2.3'), 'libbothset.so.4.5.6')
self.assertEqual(get_soname(bothset), 'libbothset.so.1.2.3')
self.assertEqual(len(self.glob_sofiles_without_privdir(bothset[:-3] + '*')), 3)
def test_soname(self):
self._test_soname_impl(self.builddir, False)
def test_installed_soname(self):
libdir = self.installdir + os.path.join(self.prefix, self.libdir)
self._test_soname_impl(libdir, True)
def test_compiler_check_flags_order(self):
'''
Test that compiler check flags override all other flags. This can't be
an ordinary test case because it needs the environment to be set.
'''
testdir = os.path.join(self.common_test_dir, '36 has function')
env = get_fake_env(testdir, self.builddir, self.prefix)
cpp = detect_cpp_compiler(env, MachineChoice.HOST)
Oflag = '-O3'
OflagCPP = Oflag
if cpp.get_id() in ('clang', 'gcc'):
# prevent developers from adding "int main(int argc, char **argv)"
# to small Meson checks unless these parameters are actually used
OflagCPP += ' -Werror=unused-parameter'
env = {'CFLAGS': Oflag,
'CXXFLAGS': OflagCPP}
self.init(testdir, override_envvars=env)
cmds = self.get_meson_log_compiler_checks()
for cmd in cmds:
if cmd[0] == 'ccache':
cmd = cmd[1:]
# Verify that -I flags from the `args` kwarg are first
# This is set in the '36 has function' test case
self.assertEqual(cmd[1], '-I/tmp')
# Verify that -O3 set via the environment is overridden by -O0
Oargs = [arg for arg in cmd if arg.startswith('-O')]
self.assertEqual(Oargs, [Oflag, '-O0'])
def _test_stds_impl(self, testdir: str, compiler: 'Compiler') -> None:
has_cpp17 = (compiler.get_id() not in {'clang', 'gcc'} or
compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=5.0.0', '>=9.1') or
compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=5.0.0'))
has_cpp2a_c17 = (compiler.get_id() not in {'clang', 'gcc'} or
compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=6.0.0', '>=10.0') or
compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=8.0.0'))
has_cpp20 = (compiler.get_id() not in {'clang', 'gcc'} or
compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=10.0.0', None) or
compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=10.0.0'))
has_c18 = (compiler.get_id() not in {'clang', 'gcc'} or
compiler.get_id() == 'clang' and _clang_at_least(compiler, '>=8.0.0', '>=11.0') or
compiler.get_id() == 'gcc' and version_compare(compiler.version, '>=8.0.0'))
# Check that all the listed -std=xxx options for this compiler work just fine when used
# https://en.wikipedia.org/wiki/Xcode#Latest_versions
# https://www.gnu.org/software/gcc/projects/cxx-status.html
key = OptionKey('std', lang=compiler.language)
for v in compiler.get_options()[key].choices:
# we do it like this to handle gnu++17,c++17 and gnu17,c17 cleanly
# thus, C++ first
if '++17' in v and not has_cpp17:
continue
elif '++2a' in v and not has_cpp2a_c17: # https://en.cppreference.com/w/cpp/compiler_support
continue
elif '++20' in v and not has_cpp20:
continue
# now C
elif '17' in v and not has_cpp2a_c17:
continue
elif '18' in v and not has_c18:
continue
self.init(testdir, extra_args=[f'-D{key!s}={v}'])
cmd = self.get_compdb()[0]['command']
# c++03 and gnu++03 are not understood by ICC, don't try to look for them
skiplist = frozenset([
('intel', 'c++03'),
('intel', 'gnu++03')])
if v != 'none' and not (compiler.get_id(), v) in skiplist:
cmd_std = f" -std={v} "
self.assertIn(cmd_std, cmd)
try:
self.build()
except Exception:
print(f'{key!s} was {v!r}')
raise
self.wipe()
# Check that an invalid std option in CFLAGS/CPPFLAGS fails
# Needed because by default ICC ignores invalid options
cmd_std = '-std=FAIL'
if compiler.language == 'c':
env_flag_name = 'CFLAGS'
elif compiler.language == 'cpp':
env_flag_name = 'CXXFLAGS'
else:
raise NotImplementedError(f'Language {compiler.language} not defined.')
env = {}
env[env_flag_name] = cmd_std
with self.assertRaises((subprocess.CalledProcessError, EnvironmentException),
msg='C compiler should have failed with -std=FAIL'):
self.init(testdir, override_envvars = env)
# ICC won't fail in the above because additional flags are needed to
# make unknown -std=... options errors.
self.build()
def test_compiler_c_stds(self):
'''
Test that C stds specified for this compiler can all be used. Can't be
an ordinary test because it requires passing options to meson.
'''
testdir = os.path.join(self.common_test_dir, '1 trivial')
env = get_fake_env(testdir, self.builddir, self.prefix)
cc = detect_c_compiler(env, MachineChoice.HOST)
self._test_stds_impl(testdir, cc)
def test_compiler_cpp_stds(self):
'''
Test that C++ stds specified for this compiler can all be used. Can't
be an ordinary test because it requires passing options to meson.
'''
testdir = os.path.join(self.common_test_dir, '2 cpp')
env = get_fake_env(testdir, self.builddir, self.prefix)
cpp = detect_cpp_compiler(env, MachineChoice.HOST)
self._test_stds_impl(testdir, cpp)
def test_unity_subproj(self):
testdir = os.path.join(self.common_test_dir, '42 subproject')
self.init(testdir, extra_args='--unity=subprojects')
pdirs = glob(os.path.join(self.builddir, 'subprojects/sublib/simpletest*.p'))
self.assertEqual(len(pdirs), 1)
self.assertPathExists(os.path.join(pdirs[0], 'simpletest-unity0.c'))
sdirs = glob(os.path.join(self.builddir, 'subprojects/sublib/*sublib*.p'))
self.assertEqual(len(sdirs), 1)
self.assertPathExists(os.path.join(sdirs[0], 'sublib-unity0.c'))
self.assertPathDoesNotExist(os.path.join(self.builddir, 'user@exe/user-unity.c'))
self.build()
def test_installed_modes(self):
'''
Test that files installed by these tests have the correct permissions.
Can't be an ordinary test because our installed_files.txt is very basic.
'''
# Test file modes
testdir = os.path.join(self.common_test_dir, '12 data')
self.init(testdir)
self.install()
f = os.path.join(self.installdir, 'etc', 'etcfile.dat')
found_mode = stat.filemode(os.stat(f).st_mode)
want_mode = 'rw------T'
self.assertEqual(want_mode, found_mode[1:])
f = os.path.join(self.installdir, 'usr', 'bin', 'runscript.sh')
statf = os.stat(f)
found_mode = stat.filemode(statf.st_mode)
want_mode = 'rwxr-sr-x'
self.assertEqual(want_mode, found_mode[1:])
if os.getuid() == 0:
# The chown failed nonfatally if we're not root
self.assertEqual(0, statf.st_uid)
self.assertEqual(0, statf.st_gid)
f = os.path.join(self.installdir, 'usr', 'share', 'progname',
'fileobject_datafile.dat')
orig = os.path.join(testdir, 'fileobject_datafile.dat')
statf = os.stat(f)
statorig = os.stat(orig)
found_mode = stat.filemode(statf.st_mode)
orig_mode = stat.filemode(statorig.st_mode)
self.assertEqual(orig_mode[1:], found_mode[1:])
self.assertEqual(os.getuid(), statf.st_uid)
if os.getuid() == 0:
# The chown failed nonfatally if we're not root
self.assertEqual(0, statf.st_gid)
self.wipe()
# Test directory modes
testdir = os.path.join(self.common_test_dir, '59 install subdir')
self.init(testdir)
self.install()
f = os.path.join(self.installdir, 'usr', 'share', 'sub1', 'second.dat')
statf = os.stat(f)
found_mode = stat.filemode(statf.st_mode)
want_mode = 'rwxr-x--t'
self.assertEqual(want_mode, found_mode[1:])
if os.getuid() == 0:
# The chown failed nonfatally if we're not root
self.assertEqual(0, statf.st_uid)
def test_installed_modes_extended(self):
'''
Test that files are installed with correct permissions using install_mode.
'''
testdir = os.path.join(self.common_test_dir, '190 install_mode')
self.init(testdir)
self.build()
self.install()
for fsobj, want_mode in [
('bin', 'drwxr-x---'),
('bin/runscript.sh', '-rwxr-sr-x'),
('bin/trivialprog', '-rwxr-sr-x'),
('include', 'drwxr-x---'),
('include/config.h', '-rw-rwSr--'),
('include/rootdir.h', '-r--r--r-T'),
('lib', 'drwxr-x---'),
('lib/libstat.a', '-rw---Sr--'),
('share', 'drwxr-x---'),
('share/man', 'drwxr-x---'),
('share/man/man1', 'drwxr-x---'),
('share/man/man1/foo.1', '-r--r--r-T'),
('share/sub1', 'drwxr-x---'),
('share/sub1/second.dat', '-rwxr-x--t'),
('subdir', 'drwxr-x---'),
('subdir/data.dat', '-rw-rwSr--'),
]:
f = os.path.join(self.installdir, 'usr', *fsobj.split('/'))
found_mode = stat.filemode(os.stat(f).st_mode)
self.assertEqual(want_mode, found_mode,
msg=('Expected file %s to have mode %s but found %s instead.' %
(fsobj, want_mode, found_mode)))
# Ensure that introspect --installed works on all types of files
# FIXME: also verify the files list
self.introspect('--installed')
def test_install_umask(self):
'''
Test that files are installed with correct permissions using default
install umask of 022, regardless of the umask at time the worktree
was checked out or the build was executed.
'''
# Copy source tree to a temporary directory and change permissions
# there to simulate a checkout with umask 002.
orig_testdir = os.path.join(self.unit_test_dir, '26 install umask')
# Create a new testdir under tmpdir.
tmpdir = os.path.realpath(tempfile.mkdtemp())
self.addCleanup(windows_proof_rmtree, tmpdir)
testdir = os.path.join(tmpdir, '26 install umask')
# Copy the tree using shutil.copyfile, which will use the current umask
# instead of preserving permissions of the old tree.
save_umask = os.umask(0o002)
self.addCleanup(os.umask, save_umask)
shutil.copytree(orig_testdir, testdir, copy_function=shutil.copyfile)
# Preserve the executable status of subdir/sayhello though.
os.chmod(os.path.join(testdir, 'subdir', 'sayhello'), 0o775)
self.init(testdir)
# Run the build under a 027 umask now.
os.umask(0o027)
self.build()
# And keep umask 027 for the install step too.
self.install()
for executable in [
'bin/prog',
'share/subdir/sayhello',
]:
f = os.path.join(self.installdir, 'usr', *executable.split('/'))
found_mode = stat.filemode(os.stat(f).st_mode)
want_mode = '-rwxr-xr-x'
self.assertEqual(want_mode, found_mode,
msg=('Expected file %s to have mode %s but found %s instead.' %
(executable, want_mode, found_mode)))
for directory in [
'usr',
'usr/bin',
'usr/include',
'usr/share',
'usr/share/man',
'usr/share/man/man1',
'usr/share/subdir',
]:
f = os.path.join(self.installdir, *directory.split('/'))
found_mode = stat.filemode(os.stat(f).st_mode)
want_mode = 'drwxr-xr-x'
self.assertEqual(want_mode, found_mode,
msg=('Expected directory %s to have mode %s but found %s instead.' %
(directory, want_mode, found_mode)))
for datafile in [
'include/sample.h',
'share/datafile.cat',
'share/file.dat',
'share/man/man1/prog.1',
'share/subdir/datafile.dog',
]:
f = os.path.join(self.installdir, 'usr', *datafile.split('/'))
found_mode = stat.filemode(os.stat(f).st_mode)
want_mode = '-rw-r--r--'
self.assertEqual(want_mode, found_mode,
msg=('Expected file %s to have mode %s but found %s instead.' %
(datafile, want_mode, found_mode)))
def test_cpp_std_override(self):
testdir = os.path.join(self.unit_test_dir, '6 std override')
self.init(testdir)
compdb = self.get_compdb()
# Don't try to use -std=c++03 as a check for the
# presence of a compiler flag, as ICC does not
# support it.
for i in compdb:
if 'prog98' in i['file']:
c98_comp = i['command']
if 'prog11' in i['file']:
c11_comp = i['command']
if 'progp' in i['file']:
plain_comp = i['command']
self.assertNotEqual(len(plain_comp), 0)
self.assertIn('-std=c++98', c98_comp)
self.assertNotIn('-std=c++11', c98_comp)
self.assertIn('-std=c++11', c11_comp)
self.assertNotIn('-std=c++98', c11_comp)
self.assertNotIn('-std=c++98', plain_comp)
self.assertNotIn('-std=c++11', plain_comp)
# Now werror
self.assertIn('-Werror', plain_comp)
self.assertNotIn('-Werror', c98_comp)
def test_run_installed(self):
if is_cygwin() or is_osx():
raise SkipTest('LD_LIBRARY_PATH and RPATH not applicable')
testdir = os.path.join(self.unit_test_dir, '7 run installed')
self.init(testdir)
self.build()
self.install()
installed_exe = os.path.join(self.installdir, 'usr/bin/prog')
installed_libdir = os.path.join(self.installdir, 'usr/foo')
installed_lib = os.path.join(installed_libdir, 'libfoo.so')
self.assertTrue(os.path.isfile(installed_exe))
self.assertTrue(os.path.isdir(installed_libdir))
self.assertTrue(os.path.isfile(installed_lib))
# Must fail when run without LD_LIBRARY_PATH to ensure that
# rpath has been properly stripped rather than pointing to the builddir.
self.assertNotEqual(subprocess.call(installed_exe, stderr=subprocess.DEVNULL), 0)
# When LD_LIBRARY_PATH is set it should start working.
# For some reason setting LD_LIBRARY_PATH in os.environ fails
# when all tests are run (but works when only this test is run),
# but doing this explicitly works.
env = os.environ.copy()
env['LD_LIBRARY_PATH'] = ':'.join([installed_libdir, env.get('LD_LIBRARY_PATH', '')])
self.assertEqual(subprocess.call(installed_exe, env=env), 0)
# Ensure that introspect --installed works
installed = self.introspect('--installed')
for v in installed.values():
self.assertTrue('prog' in v or 'foo' in v)
@skipIfNoPkgconfig
def test_order_of_l_arguments(self):
testdir = os.path.join(self.unit_test_dir, '8 -L -l order')
self.init(testdir, override_envvars={'PKG_CONFIG_PATH': testdir})
# NOTE: .pc file has -Lfoo -lfoo -Lbar -lbar but pkg-config reorders
# the flags before returning them to -Lfoo -Lbar -lfoo -lbar
# but pkgconf seems to not do that. Sigh. Support both.
expected_order = [('-L/me/first', '-lfoo1'),
('-L/me/second', '-lfoo2'),
('-L/me/first', '-L/me/second'),
('-lfoo1', '-lfoo2'),
('-L/me/second', '-L/me/third'),
('-L/me/third', '-L/me/fourth',),
('-L/me/third', '-lfoo3'),
('-L/me/fourth', '-lfoo4'),
('-lfoo3', '-lfoo4'),
]
with open(os.path.join(self.builddir, 'build.ninja'), encoding='utf-8') as ifile:
for line in ifile:
if expected_order[0][0] in line:
for first, second in expected_order:
self.assertLess(line.index(first), line.index(second))
return
raise RuntimeError('Linker entries not found in the Ninja file.')
def test_introspect_dependencies(self):
'''
Tests that mesonintrospect --dependencies returns expected output.
'''
testdir = os.path.join(self.framework_test_dir, '7 gnome')
self.init(testdir)
glib_found = False
gobject_found = False
deps = self.introspect('--dependencies')
self.assertIsInstance(deps, list)
for dep in deps:
self.assertIsInstance(dep, dict)
self.assertIn('name', dep)
self.assertIn('compile_args', dep)
self.assertIn('link_args', dep)
if dep['name'] == 'glib-2.0':
glib_found = True
elif dep['name'] == 'gobject-2.0':
gobject_found = True
self.assertTrue(glib_found)
self.assertTrue(gobject_found)
if subprocess.call(['pkg-config', '--exists', 'glib-2.0 >= 2.56.2']) != 0:
raise SkipTest('glib >= 2.56.2 needed for the rest')
targets = self.introspect('--targets')
docbook_target = None
for t in targets:
if t['name'] == 'generated-gdbus-docbook':
docbook_target = t
break
self.assertIsInstance(docbook_target, dict)
self.assertEqual(os.path.basename(t['filename'][0]), 'generated-gdbus-doc-' + os.path.basename(t['target_sources'][0]['sources'][0]))
def test_introspect_installed(self):
testdir = os.path.join(self.linuxlike_test_dir, '7 library versions')
self.init(testdir)
install = self.introspect('--installed')
install = {os.path.basename(k): v for k, v in install.items()}
print(install)
if is_osx():
the_truth = {
'libmodule.dylib': '/usr/lib/libmodule.dylib',
'libnoversion.dylib': '/usr/lib/libnoversion.dylib',
'libonlysoversion.5.dylib': '/usr/lib/libonlysoversion.5.dylib',
'libonlysoversion.dylib': '/usr/lib/libonlysoversion.dylib',
'libonlyversion.1.dylib': '/usr/lib/libonlyversion.1.dylib',
'libonlyversion.dylib': '/usr/lib/libonlyversion.dylib',
'libsome.0.dylib': '/usr/lib/libsome.0.dylib',
'libsome.dylib': '/usr/lib/libsome.dylib',
}
the_truth_2 = {'/usr/lib/libsome.dylib',
'/usr/lib/libsome.0.dylib',
}
else:
the_truth = {
'libmodule.so': '/usr/lib/libmodule.so',
'libnoversion.so': '/usr/lib/libnoversion.so',
'libonlysoversion.so': '/usr/lib/libonlysoversion.so',
'libonlysoversion.so.5': '/usr/lib/libonlysoversion.so.5',
'libonlyversion.so': '/usr/lib/libonlyversion.so',
'libonlyversion.so.1': '/usr/lib/libonlyversion.so.1',
'libonlyversion.so.1.4.5': '/usr/lib/libonlyversion.so.1.4.5',
'libsome.so': '/usr/lib/libsome.so',
'libsome.so.0': '/usr/lib/libsome.so.0',
'libsome.so.1.2.3': '/usr/lib/libsome.so.1.2.3',
}
the_truth_2 = {'/usr/lib/libsome.so',
'/usr/lib/libsome.so.0',
'/usr/lib/libsome.so.1.2.3'}
self.assertDictEqual(install, the_truth)
targets = self.introspect('--targets')
for t in targets:
if t['name'] != 'some':
continue
self.assertSetEqual(the_truth_2, set(t['install_filename']))
def test_build_rpath(self):
if is_cygwin():
raise SkipTest('Windows PE/COFF binaries do not use RPATH')
testdir = os.path.join(self.unit_test_dir, '10 build_rpath')
self.init(testdir)
self.build()
build_rpath = get_rpath(os.path.join(self.builddir, 'prog'))
self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar')
build_rpath = get_rpath(os.path.join(self.builddir, 'progcxx'))
self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar')
self.install()
install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/prog'))
self.assertEqual(install_rpath, '/baz')
install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/progcxx'))
self.assertEqual(install_rpath, 'baz')
@skipIfNoPkgconfig
def test_build_rpath_pkgconfig(self):
'''
Test that current build artefacts (libs) are found first on the rpath,
manually specified rpath comes second and additional rpath elements (from
pkg-config files) come last
'''
if is_cygwin():
raise SkipTest('Windows PE/COFF binaries do not use RPATH')
testdir = os.path.join(self.unit_test_dir, '90 pkgconfig build rpath order')
self.init(testdir, override_envvars={'PKG_CONFIG_PATH': testdir})
self.build()
build_rpath = get_rpath(os.path.join(self.builddir, 'prog'))
self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar:/foo/dummy')
build_rpath = get_rpath(os.path.join(self.builddir, 'progcxx'))
self.assertEqual(build_rpath, '$ORIGIN/sub:/foo/bar:/foo/dummy')
self.install()
install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/prog'))
self.assertEqual(install_rpath, '/baz:/foo/dummy')
install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/progcxx'))
self.assertEqual(install_rpath, 'baz:/foo/dummy')
def test_global_rpath(self):
if is_cygwin():
raise SkipTest('Windows PE/COFF binaries do not use RPATH')
if is_osx():
raise SkipTest('Global RPATHs via LDFLAGS not yet supported on MacOS (does anybody need it?)')
testdir = os.path.join(self.unit_test_dir, '80 global-rpath')
oldinstalldir = self.installdir
# Build and install an external library without DESTDIR.
# The external library generates a .pc file without an rpath.
yonder_dir = os.path.join(testdir, 'yonder')
yonder_prefix = os.path.join(oldinstalldir, 'yonder')
yonder_libdir = os.path.join(yonder_prefix, self.libdir)
self.prefix = yonder_prefix
self.installdir = yonder_prefix
self.init(yonder_dir)
self.build()
self.install(use_destdir=False)
# Since rpath has multiple valid formats we need to
# test that they are all properly used.
rpath_formats = [
('-Wl,-rpath=', False),
('-Wl,-rpath,', False),
('-Wl,--just-symbols=', True),
('-Wl,--just-symbols,', True),
('-Wl,-R', False),
('-Wl,-R,', False)
]
for rpath_format, exception in rpath_formats:
# Build an app that uses that installed library.
# Supply the rpath to the installed library via LDFLAGS
# (as systems like buildroot and guix are wont to do)
# and verify install preserves that rpath.
self.new_builddir()
env = {'LDFLAGS': rpath_format + yonder_libdir,
'PKG_CONFIG_PATH': os.path.join(yonder_libdir, 'pkgconfig')}
if exception:
with self.assertRaises(subprocess.CalledProcessError):
self.init(testdir, override_envvars=env)
continue
self.init(testdir, override_envvars=env)
self.build()
self.install(use_destdir=False)
got_rpath = get_rpath(os.path.join(yonder_prefix, 'bin/rpathified'))
self.assertEqual(got_rpath, yonder_libdir, rpath_format)
@skip_if_not_base_option('b_sanitize')
def test_pch_with_address_sanitizer(self):
if is_cygwin():
raise SkipTest('asan not available on Cygwin')
if is_openbsd():
raise SkipTest('-fsanitize=address is not supported on OpenBSD')
testdir = os.path.join(self.common_test_dir, '13 pch')
self.init(testdir, extra_args=['-Db_sanitize=address', '-Db_lundef=false'])
self.build()
compdb = self.get_compdb()
for i in compdb:
self.assertIn("-fsanitize=address", i["command"])
def test_cross_find_program(self):
testdir = os.path.join(self.unit_test_dir, '11 cross prog')
crossfile = tempfile.NamedTemporaryFile(mode='w')
print(os.path.join(testdir, 'some_cross_tool.py'))
tool_path = os.path.join(testdir, 'some_cross_tool.py')
crossfile.write(textwrap.dedent(f'''\
[binaries]
c = '{shutil.which('gcc' if is_sunos() else 'cc')}'
ar = '{shutil.which('ar')}'
strip = '{shutil.which('strip')}'
sometool.py = ['{tool_path}']
someothertool.py = '{tool_path}'
[properties]
[host_machine]
system = 'linux'
cpu_family = 'arm'
cpu = 'armv7' # Not sure if correct.
endian = 'little'
'''))
crossfile.flush()
self.meson_cross_file = crossfile.name
self.init(testdir)
def test_reconfigure(self):
testdir = os.path.join(self.unit_test_dir, '13 reconfigure')
self.init(testdir, extra_args=['-Db_coverage=true'], default_args=False)
self.build('reconfigure')
def test_vala_generated_source_buildir_inside_source_tree(self):
'''
Test that valac outputs generated C files in the expected location when
the builddir is a subdir of the source tree.
'''
if not shutil.which('valac'):
raise SkipTest('valac not installed.')
testdir = os.path.join(self.vala_test_dir, '8 generated sources')
newdir = os.path.join(self.builddir, 'srctree')
shutil.copytree(testdir, newdir)
testdir = newdir
# New builddir
builddir = os.path.join(testdir, 'subdir/_build')
os.makedirs(builddir, exist_ok=True)
self.change_builddir(builddir)
self.init(testdir)
self.build()
def test_old_gnome_module_codepaths(self):
'''
A lot of code in the GNOME module is conditional on the version of the
glib tools that are installed, and breakages in the old code can slip
by once the CI has a newer glib version. So we force the GNOME module
to pretend that it's running on an ancient glib so the fallback code is
also tested.
'''
testdir = os.path.join(self.framework_test_dir, '7 gnome')
mesonbuild.modules.gnome.native_glib_version = '2.20'
env = {'MESON_UNIT_TEST_PRETEND_GLIB_OLD': "1"}
try:
self.init(testdir,
inprocess=True,
override_envvars=env)
self.build(override_envvars=env)
finally:
mesonbuild.modules.gnome.native_glib_version = None
@skipIfNoPkgconfig
def test_pkgconfig_usage(self):
testdir1 = os.path.join(self.unit_test_dir, '27 pkgconfig usage/dependency')
testdir2 = os.path.join(self.unit_test_dir, '27 pkgconfig usage/dependee')
if subprocess.call(['pkg-config', '--cflags', 'glib-2.0'],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL) != 0:
raise SkipTest('Glib 2.0 dependency not available.')
with tempfile.TemporaryDirectory() as tempdirname:
self.init(testdir1, extra_args=['--prefix=' + tempdirname, '--libdir=lib'], default_args=False)
self.install(use_destdir=False)
shutil.rmtree(self.builddir)
os.mkdir(self.builddir)
pkg_dir = os.path.join(tempdirname, 'lib/pkgconfig')
self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'libpkgdep.pc')))
lib_dir = os.path.join(tempdirname, 'lib')
myenv = os.environ.copy()
myenv['PKG_CONFIG_PATH'] = pkg_dir
# Private internal libraries must not leak out.
pkg_out = subprocess.check_output(['pkg-config', '--static', '--libs', 'libpkgdep'], env=myenv)
self.assertFalse(b'libpkgdep-int' in pkg_out, 'Internal library leaked out.')
# Dependencies must not leak to cflags when building only a shared library.
pkg_out = subprocess.check_output(['pkg-config', '--cflags', 'libpkgdep'], env=myenv)
self.assertFalse(b'glib' in pkg_out, 'Internal dependency leaked to headers.')
# Test that the result is usable.
self.init(testdir2, override_envvars=myenv)
self.build(override_envvars=myenv)
myenv = os.environ.copy()
myenv['LD_LIBRARY_PATH'] = ':'.join([lib_dir, myenv.get('LD_LIBRARY_PATH', '')])
if is_cygwin():
bin_dir = os.path.join(tempdirname, 'bin')
myenv['PATH'] = bin_dir + os.pathsep + myenv['PATH']
self.assertTrue(os.path.isdir(lib_dir))
test_exe = os.path.join(self.builddir, 'pkguser')
self.assertTrue(os.path.isfile(test_exe))
subprocess.check_call(test_exe, env=myenv)
@skipIfNoPkgconfig
def test_pkgconfig_relative_paths(self):
testdir = os.path.join(self.unit_test_dir, '62 pkgconfig relative paths')
pkg_dir = os.path.join(testdir, 'pkgconfig')
self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'librelativepath.pc')))
env = get_fake_env(testdir, self.builddir, self.prefix)
env.coredata.set_options({OptionKey('pkg_config_path'): pkg_dir}, subproject='')
kwargs = {'required': True, 'silent': True}
relative_path_dep = PkgConfigDependency('librelativepath', env, kwargs)
self.assertTrue(relative_path_dep.found())
# Ensure link_args are properly quoted
libpath = Path(self.builddir) / '../relativepath/lib'
link_args = ['-L' + libpath.as_posix(), '-lrelativepath']
self.assertEqual(relative_path_dep.get_link_args(), link_args)
@skipIfNoPkgconfig
def test_pkgconfig_duplicate_path_entries(self):
testdir = os.path.join(self.unit_test_dir, '111 pkgconfig duplicate path entries')
pkg_dir = os.path.join(testdir, 'pkgconfig')
env = get_fake_env(testdir, self.builddir, self.prefix)
env.coredata.set_options({OptionKey('pkg_config_path'): pkg_dir}, subproject='')
PkgConfigDependency.setup_env({}, env, MachineChoice.HOST, pkg_dir)
pkg_config_path = env.coredata.options[OptionKey('pkg_config_path')].value
self.assertTrue(len(pkg_config_path) == 1)
@skipIfNoPkgconfig
def test_pkgconfig_internal_libraries(self):
'''
'''
with tempfile.TemporaryDirectory() as tempdirname:
# build library
testdirbase = os.path.join(self.unit_test_dir, '32 pkgconfig use libraries')
testdirlib = os.path.join(testdirbase, 'lib')
self.init(testdirlib, extra_args=['--prefix=' + tempdirname,
'--libdir=lib',
'--default-library=static'], default_args=False)
self.build()
self.install(use_destdir=False)
# build user of library
pkg_dir = os.path.join(tempdirname, 'lib/pkgconfig')
self.new_builddir()
self.init(os.path.join(testdirbase, 'app'),
override_envvars={'PKG_CONFIG_PATH': pkg_dir})
self.build()
@skipIfNoPkgconfig
def test_static_archive_stripping(self):
'''
Check that Meson produces valid static archives with --strip enabled
'''
with tempfile.TemporaryDirectory() as tempdirname:
testdirbase = os.path.join(self.unit_test_dir, '66 static archive stripping')
# build lib
self.new_builddir()
testdirlib = os.path.join(testdirbase, 'lib')
testlibprefix = os.path.join(tempdirname, 'libprefix')
self.init(testdirlib, extra_args=['--prefix=' + testlibprefix,
'--libdir=lib',
'--default-library=static',
'--buildtype=debug',
'--strip'], default_args=False)
self.build()
self.install(use_destdir=False)
# build executable (uses lib, fails if static archive has been stripped incorrectly)
pkg_dir = os.path.join(testlibprefix, 'lib/pkgconfig')
self.new_builddir()
self.init(os.path.join(testdirbase, 'app'),
override_envvars={'PKG_CONFIG_PATH': pkg_dir})
self.build()
@skipIfNoPkgconfig
def test_pkgconfig_formatting(self):
testdir = os.path.join(self.unit_test_dir, '38 pkgconfig format')
self.init(testdir)
myenv = os.environ.copy()
myenv['PKG_CONFIG_PATH'] = self.privatedir
stdo = subprocess.check_output(['pkg-config', '--libs-only-l', 'libsomething'], env=myenv)
deps = [b'-lgobject-2.0', b'-lgio-2.0', b'-lglib-2.0', b'-lsomething']
if is_windows() or is_cygwin() or is_osx() or is_openbsd():
# On Windows, libintl is a separate library
deps.append(b'-lintl')
self.assertEqual(set(deps), set(stdo.split()))
@skipIfNoPkgconfig
@skip_if_not_language('cs')
def test_pkgconfig_csharp_library(self):
testdir = os.path.join(self.unit_test_dir, '50 pkgconfig csharp library')
self.init(testdir)
myenv = os.environ.copy()
myenv['PKG_CONFIG_PATH'] = self.privatedir
stdo = subprocess.check_output(['pkg-config', '--libs', 'libsomething'], env=myenv)
self.assertEqual("-r/usr/lib/libsomething.dll", str(stdo.decode('ascii')).strip())
@skipIfNoPkgconfig
def test_pkgconfig_link_order(self):
'''
Test that libraries are listed before their dependencies.
'''
testdir = os.path.join(self.unit_test_dir, '53 pkgconfig static link order')
self.init(testdir)
myenv = os.environ.copy()
myenv['PKG_CONFIG_PATH'] = self.privatedir
stdo = subprocess.check_output(['pkg-config', '--libs', 'libsomething'], env=myenv)
deps = stdo.split()
self.assertTrue(deps.index(b'-lsomething') < deps.index(b'-ldependency'))
def test_deterministic_dep_order(self):
'''
Test that the dependencies are always listed in a deterministic order.
'''
testdir = os.path.join(self.unit_test_dir, '43 dep order')
self.init(testdir)
with open(os.path.join(self.builddir, 'build.ninja'), encoding='utf-8') as bfile:
for line in bfile:
if 'build myexe:' in line or 'build myexe.exe:' in line:
self.assertIn('liblib1.a liblib2.a', line)
return
raise RuntimeError('Could not find the build rule')
def test_deterministic_rpath_order(self):
'''
Test that the rpaths are always listed in a deterministic order.
'''
if is_cygwin():
raise SkipTest('rpath are not used on Cygwin')
testdir = os.path.join(self.unit_test_dir, '42 rpath order')
self.init(testdir)
if is_osx():
rpathre = re.compile(r'-rpath,.*/subprojects/sub1.*-rpath,.*/subprojects/sub2')
else:
rpathre = re.compile(r'-rpath,\$\$ORIGIN/subprojects/sub1:\$\$ORIGIN/subprojects/sub2')
with open(os.path.join(self.builddir, 'build.ninja'), encoding='utf-8') as bfile:
for line in bfile:
if '-rpath' in line:
self.assertRegex(line, rpathre)
return
raise RuntimeError('Could not find the rpath')
def test_override_with_exe_dep(self):
'''
Test that we produce the correct dependencies when a program is overridden with an executable.
'''
testdir = os.path.join(self.src_root, 'test cases', 'native', '9 override with exe')
self.init(testdir)
with open(os.path.join(self.builddir, 'build.ninja'), encoding='utf-8') as bfile:
for line in bfile:
if 'main1.c:' in line or 'main2.c:' in line:
self.assertIn('| subprojects/sub/foobar', line)
@skipIfNoPkgconfig
def test_usage_external_library(self):
'''
Test that uninstalled usage of an external library (from the system or
PkgConfigDependency) works. On macOS, this workflow works out of the
box. On Linux, BSDs, Windows, etc, you need to set extra arguments such
as LD_LIBRARY_PATH, etc, so this test is skipped.
The system library is found with cc.find_library() and pkg-config deps.
'''
oldprefix = self.prefix
# Install external library so we can find it
testdir = os.path.join(self.unit_test_dir, '40 external, internal library rpath', 'external library')
# install into installdir without using DESTDIR
installdir = self.installdir
self.prefix = installdir
self.init(testdir)
self.prefix = oldprefix
self.build()
self.install(use_destdir=False)
## New builddir for the consumer
self.new_builddir()
env = {'LIBRARY_PATH': os.path.join(installdir, self.libdir),
'PKG_CONFIG_PATH': os.path.join(installdir, self.libdir, 'pkgconfig')}
testdir = os.path.join(self.unit_test_dir, '40 external, internal library rpath', 'built library')
# install into installdir without using DESTDIR
self.prefix = self.installdir
self.init(testdir, override_envvars=env)
self.prefix = oldprefix
self.build(override_envvars=env)
# test uninstalled
self.run_tests(override_envvars=env)
if not (is_osx() or is_linux()):
return
# test running after installation
self.install(use_destdir=False)
prog = os.path.join(self.installdir, 'bin', 'prog')
self._run([prog])
if not is_osx():
# Rest of the workflow only works on macOS
return
out = self._run(['otool', '-L', prog])
self.assertNotIn('@rpath', out)
## New builddir for testing that DESTDIR is not added to install_name
self.new_builddir()
# install into installdir with DESTDIR
self.init(testdir, override_envvars=env)
self.build(override_envvars=env)
# test running after installation
self.install(override_envvars=env)
prog = self.installdir + os.path.join(self.prefix, 'bin', 'prog')
lib = self.installdir + os.path.join(self.prefix, 'lib', 'libbar_built.dylib')
for f in prog, lib:
out = self._run(['otool', '-L', f])
# Ensure that the otool output does not contain self.installdir
self.assertNotRegex(out, self.installdir + '.*dylib ')
@skipIfNoPkgconfig
def test_link_arg_fullname(self):
'''
Test for support of -l:libfullname.a
see: https://github.com/mesonbuild/meson/issues/9000
https://stackoverflow.com/questions/48532868/gcc-library-option-with-a-colon-llibevent-a
'''
testdir = os.path.join(self.unit_test_dir, '97 link full name','libtestprovider')
oldprefix = self.prefix
# install into installdir without using DESTDIR
installdir = self.installdir
self.prefix = installdir
self.init(testdir)
self.prefix=oldprefix
self.build()
self.install(use_destdir=False)
self.new_builddir()
env = {'LIBRARY_PATH': os.path.join(installdir, self.libdir),
'PKG_CONFIG_PATH': os.path.join(installdir, self.libdir, 'pkgconfig')}
testdir = os.path.join(self.unit_test_dir, '97 link full name','proguser')
self.init(testdir,override_envvars=env)
# test for link with full path
with open(os.path.join(self.builddir, 'build.ninja'), encoding='utf-8') as bfile:
for line in bfile:
if 'build dprovidertest:' in line:
self.assertIn('/libtestprovider.a', line)
if is_osx():
# macOS's ld do not supports `--whole-archive`, skip build & run
return
self.build(override_envvars=env)
# skip test if pkg-config is too old.
# before v0.28, Libs flags like -Wl will not kept in context order with -l flags.
# see https://gitlab.freedesktop.org/pkg-config/pkg-config/-/blob/master/NEWS
pkgconfigver = subprocess.check_output(['pkg-config', '--version'])
if b'0.28' > pkgconfigver:
raise SkipTest('pkg-config is too old to be correctly done this.')
self.run_tests()
@skipIfNoPkgconfig
def test_usage_pkgconfig_prefixes(self):
'''
Build and install two external libraries, to different prefixes,
then build and install a client program that finds them via pkgconfig,
and verify the installed client program runs.
'''
oldinstalldir = self.installdir
# Build and install both external libraries without DESTDIR
val1dir = os.path.join(self.unit_test_dir, '75 pkgconfig prefixes', 'val1')
val1prefix = os.path.join(oldinstalldir, 'val1')
self.prefix = val1prefix
self.installdir = val1prefix
self.init(val1dir)
self.build()
self.install(use_destdir=False)
self.new_builddir()
env1 = {}
env1['PKG_CONFIG_PATH'] = os.path.join(val1prefix, self.libdir, 'pkgconfig')
val2dir = os.path.join(self.unit_test_dir, '75 pkgconfig prefixes', 'val2')
val2prefix = os.path.join(oldinstalldir, 'val2')
self.prefix = val2prefix
self.installdir = val2prefix
self.init(val2dir, override_envvars=env1)
self.build()
self.install(use_destdir=False)
self.new_builddir()
# Build, install, and run the client program
env2 = {}
env2['PKG_CONFIG_PATH'] = os.path.join(val2prefix, self.libdir, 'pkgconfig')
testdir = os.path.join(self.unit_test_dir, '75 pkgconfig prefixes', 'client')
testprefix = os.path.join(oldinstalldir, 'client')
self.prefix = testprefix
self.installdir = testprefix
self.init(testdir, override_envvars=env2)
self.build()
self.install(use_destdir=False)
prog = os.path.join(self.installdir, 'bin', 'client')
env3 = {}
if is_cygwin():
env3['PATH'] = os.path.join(val1prefix, 'bin') + \
os.pathsep + \
os.path.join(val2prefix, 'bin') + \
os.pathsep + os.environ['PATH']
out = self._run([prog], override_envvars=env3).strip()
# Expected output is val1 + val2 = 3
self.assertEqual(out, '3')
def install_subdir_invalid_symlinks(self, testdir, subdir_path):
'''
Test that installation of broken symlinks works fine.
https://github.com/mesonbuild/meson/issues/3914
'''
testdir = os.path.join(self.common_test_dir, testdir)
subdir = os.path.join(testdir, subdir_path)
with chdir(subdir):
# Can't distribute broken symlinks in the source tree because it breaks
# the creation of zipapps. Create it dynamically and run the test by
# hand.
src = '../../nonexistent.txt'
os.symlink(src, 'invalid-symlink.txt')
try:
self.init(testdir)
self.build()
self.install()
install_path = subdir_path.split(os.path.sep)[-1]
link = os.path.join(self.installdir, 'usr', 'share', install_path, 'invalid-symlink.txt')
self.assertTrue(os.path.islink(link), msg=link)
self.assertEqual(src, os.readlink(link))
self.assertFalse(os.path.isfile(link), msg=link)
finally:
os.remove(os.path.join(subdir, 'invalid-symlink.txt'))
def test_install_subdir_symlinks(self):
self.install_subdir_invalid_symlinks('59 install subdir', os.path.join('sub', 'sub1'))
def test_install_subdir_symlinks_with_default_umask(self):
self.install_subdir_invalid_symlinks('190 install_mode', 'sub2')
def test_install_subdir_symlinks_with_default_umask_and_mode(self):
self.install_subdir_invalid_symlinks('190 install_mode', 'sub1')
@skipIfNoPkgconfigDep('gmodule-2.0')
def test_ldflag_dedup(self):
testdir = os.path.join(self.unit_test_dir, '52 ldflagdedup')
if is_cygwin() or is_osx():
raise SkipTest('Not applicable on Cygwin or OSX.')
env = get_fake_env()
cc = detect_c_compiler(env, MachineChoice.HOST)
linker = cc.linker
if not linker.export_dynamic_args(env):
raise SkipTest('Not applicable for linkers without --export-dynamic')
self.init(testdir)
build_ninja = os.path.join(self.builddir, 'build.ninja')
max_count = 0
search_term = '-Wl,--export-dynamic'
with open(build_ninja, encoding='utf-8') as f:
for line in f:
max_count = max(max_count, line.count(search_term))
self.assertEqual(max_count, 1, 'Export dynamic incorrectly deduplicated.')
def test_compiler_libs_static_dedup(self):
testdir = os.path.join(self.unit_test_dir, '56 dedup compiler libs')
self.init(testdir)
build_ninja = os.path.join(self.builddir, 'build.ninja')
with open(build_ninja, encoding='utf-8') as f:
lines = f.readlines()
for lib in ('-ldl', '-lm', '-lc', '-lrt'):
for line in lines:
if lib not in line:
continue
# Assert that
self.assertEqual(len(line.split(lib)), 2, msg=(lib, line))
@skipIfNoPkgconfig
def test_noncross_options(self):
# C_std defined in project options must be in effect also when native compiling.
testdir = os.path.join(self.unit_test_dir, '51 noncross options')
self.init(testdir, extra_args=['-Dpkg_config_path=' + testdir])
compdb = self.get_compdb()
self.assertEqual(len(compdb), 2)
self.assertRegex(compdb[0]['command'], '-std=c99')
self.assertRegex(compdb[1]['command'], '-std=c99')
self.build()
def test_identity_cross(self):
testdir = os.path.join(self.unit_test_dir, '61 identity cross')
nativefile = tempfile.NamedTemporaryFile(mode='w')
nativefile.write(textwrap.dedent('''\
[binaries]
c = ['{}']
'''.format(os.path.join(testdir, 'build_wrapper.py'))))
nativefile.flush()
self.meson_native_file = nativefile.name
crossfile = tempfile.NamedTemporaryFile(mode='w')
crossfile.write(textwrap.dedent('''\
[binaries]
c = ['{}']
'''.format(os.path.join(testdir, 'host_wrapper.py'))))
crossfile.flush()
self.meson_cross_file = crossfile.name
# TODO should someday be explicit about build platform only here
self.init(testdir)
def test_identity_cross_env(self):
testdir = os.path.join(self.unit_test_dir, '61 identity cross')
env = {
'CC_FOR_BUILD': '"' + os.path.join(testdir, 'build_wrapper.py') + '"',
}
crossfile = tempfile.NamedTemporaryFile(mode='w')
crossfile.write(textwrap.dedent('''\
[binaries]
c = ['{}']
'''.format(os.path.join(testdir, 'host_wrapper.py'))))
crossfile.flush()
self.meson_cross_file = crossfile.name
# TODO should someday be explicit about build platform only here
self.init(testdir, override_envvars=env)
@skipIfNoPkgconfig
def test_static_link(self):
if is_cygwin():
raise SkipTest("Cygwin doesn't support LD_LIBRARY_PATH.")
# Build some libraries and install them
testdir = os.path.join(self.unit_test_dir, '67 static link/lib')
libdir = os.path.join(self.installdir, self.libdir)
oldprefix = self.prefix
self.prefix = self.installdir
self.init(testdir)
self.install(use_destdir=False)
# Test that installed libraries works
self.new_builddir()
self.prefix = oldprefix
meson_args = [f'-Dc_link_args=-L{libdir}',
'--fatal-meson-warnings']
testdir = os.path.join(self.unit_test_dir, '67 static link')
env = {'PKG_CONFIG_LIBDIR': os.path.join(libdir, 'pkgconfig')}
self.init(testdir, extra_args=meson_args, override_envvars=env)
self.build()
self.run_tests()
def _check_ld(self, check: str, name: str, lang: str, expected: str) -> None:
if is_sunos():
raise SkipTest('Solaris currently cannot override the linker.')
if not shutil.which(check):
raise SkipTest(f'Could not find {check}.')
envvars = [mesonbuild.envconfig.ENV_VAR_PROG_MAP[f'{lang}_ld']]
# Also test a deprecated variable if there is one.
if f'{lang}_ld' in mesonbuild.envconfig.DEPRECATED_ENV_PROG_MAP:
envvars.append(
mesonbuild.envconfig.DEPRECATED_ENV_PROG_MAP[f'{lang}_ld'])
for envvar in envvars:
with mock.patch.dict(os.environ, {envvar: name}):
env = get_fake_env()
comp = compiler_from_language(env, lang, MachineChoice.HOST)
if isinstance(comp, (AppleClangCCompiler, AppleClangCPPCompiler,
AppleClangObjCCompiler, AppleClangObjCPPCompiler)):
raise SkipTest('AppleClang is currently only supported with ld64')
if lang != 'rust' and comp.use_linker_args('bfd') == []:
raise SkipTest(
f'Compiler {comp.id} does not support using alternative linkers')
self.assertEqual(comp.linker.id, expected)
def test_ld_environment_variable_bfd(self):
self._check_ld('ld.bfd', 'bfd', 'c', 'ld.bfd')
def test_ld_environment_variable_gold(self):
self._check_ld('ld.gold', 'gold', 'c', 'ld.gold')
def test_ld_environment_variable_lld(self):
self._check_ld('ld.lld', 'lld', 'c', 'ld.lld')
@skip_if_not_language('rust')
@skipIfNoExecutable('ld.gold') # need an additional check here because _check_ld checks for gcc
def test_ld_environment_variable_rust(self):
self._check_ld('gcc', 'gcc -fuse-ld=gold', 'rust', 'ld.gold')
def test_ld_environment_variable_cpp(self):
self._check_ld('ld.gold', 'gold', 'cpp', 'ld.gold')
@skip_if_not_language('objc')
def test_ld_environment_variable_objc(self):
self._check_ld('ld.gold', 'gold', 'objc', 'ld.gold')
@skip_if_not_language('objcpp')
def test_ld_environment_variable_objcpp(self):
self._check_ld('ld.gold', 'gold', 'objcpp', 'ld.gold')
@skip_if_not_language('fortran')
def test_ld_environment_variable_fortran(self):
self._check_ld('ld.gold', 'gold', 'fortran', 'ld.gold')
@skip_if_not_language('d')
def test_ld_environment_variable_d(self):
# At least for me, ldc defaults to gold, and gdc defaults to bfd, so
# let's pick lld, which isn't the default for either (currently)
if is_osx():
expected = 'ld64'
else:
expected = 'ld.lld'
self._check_ld('ld.lld', 'lld', 'd', expected)
def compute_sha256(self, filename):
with open(filename, 'rb') as f:
return hashlib.sha256(f.read()).hexdigest()
def test_wrap_with_file_url(self):
testdir = os.path.join(self.unit_test_dir, '73 wrap file url')
source_filename = os.path.join(testdir, 'subprojects', 'foo.tar.xz')
patch_filename = os.path.join(testdir, 'subprojects', 'foo-patch.tar.xz')
wrap_filename = os.path.join(testdir, 'subprojects', 'foo.wrap')
source_hash = self.compute_sha256(source_filename)
patch_hash = self.compute_sha256(patch_filename)
wrap = textwrap.dedent("""\
[wrap-file]
directory = foo
source_url = http://server.invalid/foo
source_fallback_url = file://{}
source_filename = foo.tar.xz
source_hash = {}
patch_url = http://server.invalid/foo
patch_fallback_url = file://{}
patch_filename = foo-patch.tar.xz
patch_hash = {}
""".format(source_filename, source_hash, patch_filename, patch_hash))
with open(wrap_filename, 'w', encoding='utf-8') as f:
f.write(wrap)
self.init(testdir)
self.build()
self.run_tests()
windows_proof_rmtree(os.path.join(testdir, 'subprojects', 'packagecache'))
windows_proof_rmtree(os.path.join(testdir, 'subprojects', 'foo'))
os.unlink(wrap_filename)
def test_no_rpath_for_static(self):
testdir = os.path.join(self.common_test_dir, '5 linkstatic')
self.init(testdir)
self.build()
build_rpath = get_rpath(os.path.join(self.builddir, 'prog'))
self.assertIsNone(build_rpath)
def test_lookup_system_after_broken_fallback(self):
# Just to generate libfoo.pc so we can test system dependency lookup.
testdir = os.path.join(self.common_test_dir, '44 pkgconfig-gen')
self.init(testdir)
privatedir = self.privatedir
# Write test project where the first dependency() returns not-found
# because 'broken' subproject does not exit, but that should not prevent
# the 2nd dependency() to lookup on system.
self.new_builddir()
with tempfile.TemporaryDirectory() as d:
with open(os.path.join(d, 'meson.build'), 'w', encoding='utf-8') as f:
f.write(textwrap.dedent('''\
project('test')
dependency('notfound', fallback: 'broken', required: false)
dependency('libfoo', fallback: 'broken', required: true)
'''))
self.init(d, override_envvars={'PKG_CONFIG_LIBDIR': privatedir})
def test_as_link_whole(self):
testdir = os.path.join(self.unit_test_dir, '77 as link whole')
self.init(testdir)
with open(os.path.join(self.privatedir, 'bar1.pc'), encoding='utf-8') as f:
content = f.read()
self.assertIn('-lfoo', content)
with open(os.path.join(self.privatedir, 'bar2.pc'), encoding='utf-8') as f:
content = f.read()
self.assertNotIn('-lfoo', content)
def test_prelinking(self):
# Prelinking currently only works on recently new GNU toolchains.
# Skip everything else. When support for other toolchains is added,
# remove limitations as necessary.
if is_osx():
raise SkipTest('Prelinking not supported on Darwin.')
if 'clang' in os.environ.get('CC', 'dummy'):
raise SkipTest('Prelinking not supported with Clang.')
gccver = subprocess.check_output(['cc', '--version'])
if b'7.5.0' in gccver:
raise SkipTest('GCC on Bionic is too old to be supported.')
testdir = os.path.join(self.unit_test_dir, '87 prelinking')
self.init(testdir)
self.build()
outlib = os.path.join(self.builddir, 'libprelinked.a')
ar = shutil.which('ar')
self.assertTrue(os.path.exists(outlib))
self.assertTrue(ar is not None)
p = subprocess.run([ar, 't', outlib],
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
universal_newlines=True, timeout=1)
obj_files = p.stdout.strip().split('\n')
self.assertEqual(len(obj_files), 1)
self.assertTrue(obj_files[0].endswith('-prelink.o'))
| 44.982901
| 141
| 0.604881
|
f8cb449ac01f4bcd9724e122a9a3376ee0c83dee
| 1,786
|
py
|
Python
|
object_detection/box_coders/mean_stddev_box_coder_test.py
|
travisyates81/object-detection
|
931bebfa54798c08d2c401e9c1bad39015d8c832
|
[
"MIT"
] | 1
|
2019-09-19T18:24:55.000Z
|
2019-09-19T18:24:55.000Z
|
object_detection/box_coders/mean_stddev_box_coder_test.py
|
travisyates81/object-detection
|
931bebfa54798c08d2c401e9c1bad39015d8c832
|
[
"MIT"
] | null | null | null |
object_detection/box_coders/mean_stddev_box_coder_test.py
|
travisyates81/object-detection
|
931bebfa54798c08d2c401e9c1bad39015d8c832
|
[
"MIT"
] | null | null | null |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Travis Yates
"""Tests for object_detection.box_coder.mean_stddev_boxcoder."""
import tensorflow as tf
from object_detection.box_coders import mean_stddev_box_coder
from object_detection.core import box_list
class MeanStddevBoxCoderTest(tf.test.TestCase):
def testGetCorrectRelativeCodesAfterEncoding(self):
box_corners = [[0.0, 0.0, 0.5, 0.5], [0.0, 0.0, 0.5, 0.5]]
boxes = box_list.BoxList(tf.constant(box_corners))
expected_rel_codes = [[0.0, 0.0, 0.0, 0.0], [-5.0, -5.0, -5.0, -3.0]]
prior_means = tf.constant([[0.0, 0.0, 0.5, 0.5], [0.5, 0.5, 1.0, 0.8]])
prior_stddevs = tf.constant(2 * [4 * [.1]])
priors = box_list.BoxList(prior_means)
priors.add_field('stddev', prior_stddevs)
coder = mean_stddev_box_coder.MeanStddevBoxCoder()
rel_codes = coder.encode(boxes, priors)
with self.test_session() as sess:
rel_codes_out = sess.run(rel_codes)
self.assertAllClose(rel_codes_out, expected_rel_codes)
def testGetCorrectBoxesAfterDecoding(self):
rel_codes = tf.constant([[0.0, 0.0, 0.0, 0.0], [-5.0, -5.0, -5.0, -3.0]])
expected_box_corners = [[0.0, 0.0, 0.5, 0.5], [0.0, 0.0, 0.5, 0.5]]
prior_means = tf.constant([[0.0, 0.0, 0.5, 0.5], [0.5, 0.5, 1.0, 0.8]])
prior_stddevs = tf.constant(2 * [4 * [.1]])
priors = box_list.BoxList(prior_means)
priors.add_field('stddev', prior_stddevs)
coder = mean_stddev_box_coder.MeanStddevBoxCoder()
decoded_boxes = coder.decode(rel_codes, priors)
decoded_box_corners = decoded_boxes.get()
with self.test_session() as sess:
decoded_out = sess.run(decoded_box_corners)
self.assertAllClose(decoded_out, expected_box_corners)
if __name__ == '__main__':
tf.test.main()
| 37.208333
| 77
| 0.68533
|
cabadcee5e81b002c527e9219aa4bae243ee6267
| 1,509
|
py
|
Python
|
sender.py
|
obrasier/tablets-of-stone-radio
|
8e79da16b03fe93d4cf1758a921b5c40a4f964c5
|
[
"MIT"
] | null | null | null |
sender.py
|
obrasier/tablets-of-stone-radio
|
8e79da16b03fe93d4cf1758a921b5c40a4f964c5
|
[
"MIT"
] | null | null | null |
sender.py
|
obrasier/tablets-of-stone-radio
|
8e79da16b03fe93d4cf1758a921b5c40a4f964c5
|
[
"MIT"
] | null | null | null |
from microbit import *
import radio
radio.config(channel=7)
radio.on()
PACKET_SIZE = 6
msg_size = PACKET_SIZE - 1
def generate_packets(message):
packets = []
packet_num = 0
while message:
if packet_num > 9:
return packets
packet = str(packet_num) + message[:msg_size]
packets.append(packet)
packet_num += 1
message = message[msg_size:]
return packets
def send_packets(acks_to_receive, packets):
for ack in acks_to_receive:
radio.send(packets[ack])
print(packets[ack])
acks_to_receive = []
TIMEOUT = 1000
msg_sent = False
while True:
current_time = running_time()
msg = "today is the first day of summer"
# button a to start and send all packets
if button_a.was_pressed():
packets = generate_packets(msg)
for i in range(len(packets)):
acks_to_receive.append(i)
send_packets(acks_to_receive, packets)
time_sent = current_time
msg_sent = True
# receive acks
msg = radio.receive()
if msg and 'ack00' in msg:
ack_num = int(msg[0])
if ack_num in acks_to_receive:
acks_to_receive.remove(ack_num)
# on timeout, send all which haven't been acked
if msg_sent and (current_time - time_sent) > TIMEOUT:
send_packets(acks_to_receive, packets)
time_sent = current_time
# have sent and received all acks
if msg_sent and len(acks_to_receive) == 0:
display.show(Image.HAPPY)
# button b resets
if button_b.was_pressed():
msg_sent = False
acks_to_receive = []
display.clear()
| 23.578125
| 55
| 0.6945
|
71c3d256540447d130560ac9efdd84ad55be2fad
| 970
|
py
|
Python
|
IceSpringMusicPlayer/plugins/IceSpringHelloWorldPlugin/helloWorldPlugin.py
|
baijifeilong/rawsteelp
|
425547e6e2395bf4acb62435b18b5b3a4b7ebef4
|
[
"MIT"
] | null | null | null |
IceSpringMusicPlayer/plugins/IceSpringHelloWorldPlugin/helloWorldPlugin.py
|
baijifeilong/rawsteelp
|
425547e6e2395bf4acb62435b18b5b3a4b7ebef4
|
[
"MIT"
] | null | null | null |
IceSpringMusicPlayer/plugins/IceSpringHelloWorldPlugin/helloWorldPlugin.py
|
baijifeilong/rawsteelp
|
425547e6e2395bf4acb62435b18b5b3a4b7ebef4
|
[
"MIT"
] | null | null | null |
# Created by BaiJiFeiLong@gmail.com at 2022/1/21 17:13
import typing
from IceSpringRealOptional.typingUtils import gg
from PySide2 import QtWidgets, QtCore
from IceSpringMusicPlayer import tt
from IceSpringMusicPlayer.common.pluginMixin import PluginMixin
from IceSpringMusicPlayer.common.pluginWidgetMixin import PluginWidgetMixin
from IceSpringMusicPlayer.tt import Text
class HelloWorldPlugin(QtWidgets.QWidget, PluginMixin, PluginWidgetMixin):
@classmethod
def getPluginName(cls) -> Text:
return tt.HelloWorldPlugin_Name
@classmethod
def getPluginReplacers(cls) -> typing.Dict[Text, typing.Callable[[], PluginWidgetMixin]]:
return {tt.HelloWorldWidget_Name: lambda: cls()}
def __init__(self):
super().__init__()
label = QtWidgets.QLabel("Hello World")
label.setAlignment(gg(QtCore.Qt.AlignmentFlag.AlignCenter))
self.setLayout(QtWidgets.QGridLayout())
self.layout().addWidget(label)
| 33.448276
| 93
| 0.760825
|
9ad55b9e0e93b3cc29f6243a0df2f2953e5469d2
| 5,468
|
py
|
Python
|
ryu/contrib/tinyrpc/protocols/__init__.py
|
umkcdcrg01/ryu_openflow
|
37ed5b88f7d119344e07c95314a7450235c037a8
|
[
"Apache-2.0"
] | 269
|
2015-03-08T11:32:45.000Z
|
2022-03-30T11:18:16.000Z
|
ryu/contrib/tinyrpc/protocols/__init__.py
|
umkcdcrg01/ryu_openflow
|
37ed5b88f7d119344e07c95314a7450235c037a8
|
[
"Apache-2.0"
] | 14
|
2015-05-01T04:45:45.000Z
|
2016-05-11T01:29:23.000Z
|
ryu/contrib/tinyrpc/protocols/__init__.py
|
umkcdcrg01/ryu_openflow
|
37ed5b88f7d119344e07c95314a7450235c037a8
|
[
"Apache-2.0"
] | 205
|
2015-01-13T04:52:25.000Z
|
2022-03-30T13:37:33.000Z
|
#!/usr/bin/env python
from ..exc import *
class RPCRequest(object):
unique_id = None
"""A unique ID to remember the request by. Protocol specific, may or
may not be set. This value should only be set by
:py:func:`~tinyrpc.RPCProtocol.create_request`.
The ID allows client to receive responses out-of-order and still allocate
them to the correct request.
Only supported if the parent protocol has
:py:attr:`~tinyrpc.RPCProtocol.supports_out_of_order` set to ``True``.
"""
method = None
"""The name of the method to be called."""
args = []
"""The positional arguments of the method call."""
kwargs = {}
"""The keyword arguments of the method call."""
def error_respond(self, error):
"""Creates an error response.
Create a response indicating that the request was parsed correctly,
but an error has occured trying to fulfill it.
:param error: An exception or a string describing the error.
:return: A response or ``None`` to indicate that no error should be sent
out.
"""
raise NotImplementedError()
def respond(self, result):
"""Create a response.
Call this to return the result of a successful method invocation.
This creates and returns an instance of a protocol-specific subclass of
:py:class:`~tinyrpc.RPCResponse`.
:param result: Passed on to new response instance.
:return: A response or ``None`` to indicate this request does not expect a
response.
"""
raise NotImplementedError()
def serialize(self):
"""Returns a serialization of the request.
:return: A string to be passed on to a transport.
"""
raise NotImplementedError()
class RPCBatchRequest(list):
"""Multiple requests batched together.
A batch request is a subclass of :py:class:`list`. Protocols that support
multiple requests in a single message use this to group them together.
Handling a batch requests is done in any order, responses must be gathered
in a batch response and be in the same order as their respective requests.
Any item of a batch request is either a request or a subclass of
:py:class:`~tinyrpc.BadRequestError`, which indicates that there has been
an error in parsing the request.
"""
def create_batch_response(self):
"""Creates a response suitable for responding to this request.
:return: An :py:class:`~tinyrpc.RPCBatchResponse` or ``None``, if no
response is expected."""
raise NotImplementedError()
def serialize(self):
raise NotImplementedError()
class RPCResponse(object):
"""RPC call response class.
Base class for all deriving responses.
Has an attribute ``result`` containing the result of the RPC call, unless
an error occured, in which case an attribute ``error`` will contain the
error message."""
unique_id = None
def serialize(self):
"""Returns a serialization of the response.
:return: A reply to be passed on to a transport.
"""
raise NotImplementedError()
class RPCErrorResponse(RPCResponse):
pass
class RPCBatchResponse(list):
"""Multiple response from a batch request. See
:py:class:`~tinyrpc.RPCBatchRequest` on how to handle.
Items in a batch response need to be
:py:class:`~tinyrpc.RPCResponse` instances or None, meaning no reply should
generated for the request.
"""
def serialize(self):
"""Returns a serialization of the batch response."""
raise NotImplementedError()
class RPCProtocol(object):
"""Base class for all protocol implementations."""
supports_out_of_order = False
"""If true, this protocol can receive responses out of order correctly.
Note that this usually depends on the generation of unique_ids, the
generation of these may or may not be thread safe, depending on the
protocol. Ideally, only once instance of RPCProtocol should be used per
client."""
def create_request(self, method, args=None, kwargs=None, one_way=False):
"""Creates a new RPCRequest object.
It is up to the implementing protocol whether or not ``args``,
``kwargs``, one of these, both at once or none of them are supported.
:param method: The method name to invoke.
:param args: The positional arguments to call the method with.
:param kwargs: The keyword arguments to call the method with.
:param one_way: The request is an update, i.e. it does not expect a
reply.
:return: A new :py:class:`~tinyrpc.RPCRequest` instance.
"""
raise NotImplementedError()
def parse_request(self, data):
"""Parses a request given as a string and returns an
:py:class:`RPCRequest` instance.
:return: An instanced request.
"""
raise NotImplementedError()
def parse_reply(self, data):
"""Parses a reply and returns an :py:class:`RPCResponse` instance.
:return: An instanced response.
"""
raise NotImplementedError()
class RPCBatchProtocol(RPCProtocol):
def create_batch_request(self, requests=None):
"""Create a new :py:class:`tinyrpc.RPCBatchRequest` object.
:param requests: A list of requests.
"""
raise NotImplementedError()
| 31.425287
| 82
| 0.662765
|
f050cf243a9af8e9383ce2f6eb034a21eee2f4be
| 1,344
|
py
|
Python
|
prompt_toolkit/key_binding/bindings/utils.py
|
gigforks/python-prompt-toolkit
|
d12cdbb556bef84011792108b1027930b81c4813
|
[
"BSD-3-Clause"
] | 1
|
2016-10-01T20:28:31.000Z
|
2016-10-01T20:28:31.000Z
|
prompt_toolkit/key_binding/bindings/utils.py
|
gigforks/python-prompt-toolkit
|
d12cdbb556bef84011792108b1027930b81c4813
|
[
"BSD-3-Clause"
] | null | null | null |
prompt_toolkit/key_binding/bindings/utils.py
|
gigforks/python-prompt-toolkit
|
d12cdbb556bef84011792108b1027930b81c4813
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import unicode_literals
from functools import wraps
from prompt_toolkit.filters import CLIFilter, Always
__all__ = (
'create_handle_decorator',
)
def create_handle_decorator(registry, filter=Always()):
"""
Create a key handle decorator, which is compatible with `Registry.handle`
but has a `save_before` option, which will make sure that changes are saved
to the undo stack of the `Buffer` object before every key press event.
:param save_before: Callable that takes an `Event` and returns True if we
should save the current buffer, before handling the event. (That's the
default.)
"""
assert isinstance(filter, CLIFilter)
def handle(*keys, **kw):
save_before = kw.pop('save_before', lambda e: True)
# Chain the given filter to the filter of this specific binding.
if 'filter' in kw:
kw['filter'] = kw['filter'] & filter
else:
kw['filter'] = filter
def decorator(handler_func):
@registry.add_binding(*keys, **kw)
@wraps(handler_func)
def wrapper(event):
if save_before(event):
event.cli.current_buffer.save_to_undo_stack()
handler_func(event)
return handler_func
return decorator
return handle
| 33.6
| 79
| 0.644345
|
64ae34100ad6553fbdad05932a02f06a7d0baaa7
| 351
|
py
|
Python
|
hwt/hdl/entity.py
|
mgielda/hwt
|
e6c699fb154f93ac03523bfe40a3d4fc1912d28b
|
[
"MIT"
] | null | null | null |
hwt/hdl/entity.py
|
mgielda/hwt
|
e6c699fb154f93ac03523bfe40a3d4fc1912d28b
|
[
"MIT"
] | null | null | null |
hwt/hdl/entity.py
|
mgielda/hwt
|
e6c699fb154f93ac03523bfe40a3d4fc1912d28b
|
[
"MIT"
] | null | null | null |
from hwt.hdl.hdlObject import HdlObject
class Entity(HdlObject):
"""
Hdl container of hdl configuration and interfaces
"""
def __init__(self, name):
self.name = name
self.origin = None # creator of this object
self.generics = []
self.ports = []
self.ctx = {}
self.discovered = False
| 21.9375
| 53
| 0.592593
|
a7ccd767f495fc80ac86c346fd952d0ae1afc61d
| 12,208
|
py
|
Python
|
sdk/security/azure-mgmt-security/azure/mgmt/security/operations/_information_protection_policies_operations.py
|
ankitarorabit/azure-sdk-for-python
|
dd90281cbad9400f8080754a5ef2f56791a5a88f
|
[
"MIT"
] | 3
|
2020-06-23T02:25:27.000Z
|
2021-09-07T18:48:11.000Z
|
sdk/security/azure-mgmt-security/azure/mgmt/security/operations/_information_protection_policies_operations.py
|
ankitarorabit/azure-sdk-for-python
|
dd90281cbad9400f8080754a5ef2f56791a5a88f
|
[
"MIT"
] | 510
|
2019-07-17T16:11:19.000Z
|
2021-08-02T08:38:32.000Z
|
sdk/security/azure-mgmt-security/azure/mgmt/security/operations/_information_protection_policies_operations.py
|
ankitarorabit/azure-sdk-for-python
|
dd90281cbad9400f8080754a5ef2f56791a5a88f
|
[
"MIT"
] | 5
|
2019-09-04T12:51:37.000Z
|
2020-09-16T07:28:40.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class InformationProtectionPoliciesOperations(object):
"""InformationProtectionPoliciesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.security.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
scope, # type: str
information_protection_policy_name, # type: Union[str, "_models.Enum17"]
**kwargs # type: Any
):
# type: (...) -> "_models.InformationProtectionPolicy"
"""Details of the information protection policy.
:param scope: Scope of the query, can be subscription (/subscriptions/0b06d9ea-
afe6-4779-bd59-30e5c2d9d13f) or management group
(/providers/Microsoft.Management/managementGroups/mgName).
:type scope: str
:param information_protection_policy_name: Name of the information protection policy.
:type information_protection_policy_name: str or ~azure.mgmt.security.models.Enum17
:keyword callable cls: A custom type or function that will be passed the direct response
:return: InformationProtectionPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.security.models.InformationProtectionPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.InformationProtectionPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-08-01-preview"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
'informationProtectionPolicyName': self._serialize.url("information_protection_policy_name", information_protection_policy_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('InformationProtectionPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/{scope}/providers/Microsoft.Security/informationProtectionPolicies/{informationProtectionPolicyName}'} # type: ignore
def create_or_update(
self,
scope, # type: str
information_protection_policy_name, # type: Union[str, "_models.Enum17"]
information_protection_policy, # type: "_models.InformationProtectionPolicy"
**kwargs # type: Any
):
# type: (...) -> "_models.InformationProtectionPolicy"
"""Details of the information protection policy.
:param scope: Scope of the query, can be subscription (/subscriptions/0b06d9ea-
afe6-4779-bd59-30e5c2d9d13f) or management group
(/providers/Microsoft.Management/managementGroups/mgName).
:type scope: str
:param information_protection_policy_name: Name of the information protection policy.
:type information_protection_policy_name: str or ~azure.mgmt.security.models.Enum17
:param information_protection_policy: Information protection policy.
:type information_protection_policy: ~azure.mgmt.security.models.InformationProtectionPolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:return: InformationProtectionPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.security.models.InformationProtectionPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.InformationProtectionPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-08-01-preview"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
'informationProtectionPolicyName': self._serialize.url("information_protection_policy_name", information_protection_policy_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(information_protection_policy, 'InformationProtectionPolicy')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('InformationProtectionPolicy', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('InformationProtectionPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/{scope}/providers/Microsoft.Security/informationProtectionPolicies/{informationProtectionPolicyName}'} # type: ignore
def list(
self,
scope, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.InformationProtectionPolicyList"]
"""Information protection policies of a specific management group.
:param scope: Scope of the query, can be subscription (/subscriptions/0b06d9ea-
afe6-4779-bd59-30e5c2d9d13f) or management group
(/providers/Microsoft.Management/managementGroups/mgName).
:type scope: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either InformationProtectionPolicyList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.security.models.InformationProtectionPolicyList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.InformationProtectionPolicyList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-08-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('InformationProtectionPolicyList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/{scope}/providers/Microsoft.Security/informationProtectionPolicies'} # type: ignore
| 48.63745
| 160
| 0.67595
|
d2d9928df14acc5070172021238707ad1e25738d
| 724
|
py
|
Python
|
tests/reducers/res_test.py
|
beesperester/cinema4d-bootstrap
|
199971fd5d5568fd08370d95611dd9dcd9ae07e8
|
[
"MIT"
] | 1
|
2020-11-23T02:20:42.000Z
|
2020-11-23T02:20:42.000Z
|
tests/reducers/res_test.py
|
beesperester/cinema4d-bootstrap
|
199971fd5d5568fd08370d95611dd9dcd9ae07e8
|
[
"MIT"
] | null | null | null |
tests/reducers/res_test.py
|
beesperester/cinema4d-bootstrap
|
199971fd5d5568fd08370d95611dd9dcd9ae07e8
|
[
"MIT"
] | 1
|
2020-11-23T02:21:30.000Z
|
2020-11-23T02:21:30.000Z
|
"""Test res reducer module."""
import hashlib
import unittest
from bootstrap4c4d.classes.description import Description
from bootstrap4c4d.reducers.res import reduce_resource
class TestResourceReducer(unittest.TestCase):
def test_reduce_resource(self):
description = Description({
"id": "MY_DESCRIPTION",
"key": "CONTAINER",
"value": None,
"locales": {
"strings_us": "My Description"
}
})
result = reduce_resource(description)
expected_result = {
"id": "MY_DESCRIPTION",
"key": "CONTAINER",
"value": None
}
self.assertDictEqual(result, expected_result)
| 24.133333
| 57
| 0.59116
|
2452f10de848d200f47abe1368bc92144a489b1b
| 443
|
py
|
Python
|
setup.py
|
dawe/bbknn
|
202721961e3c422592e70e0596dba5b17ecddeb2
|
[
"MIT"
] | null | null | null |
setup.py
|
dawe/bbknn
|
202721961e3c422592e70e0596dba5b17ecddeb2
|
[
"MIT"
] | null | null | null |
setup.py
|
dawe/bbknn
|
202721961e3c422592e70e0596dba5b17ecddeb2
|
[
"MIT"
] | null | null | null |
from setuptools import setup, find_packages
setup(
name='bbknn',
version='1.3.6',
description='Batch balanced KNN',
url='https://github.com/Teichlab/bbknn',
packages=find_packages(exclude=['docs', 'figures', 'examples']),
install_requires=['Cython','numpy','scipy','annoy','umap-learn','sklearn'],
extras_require=dict(
faiss=['faiss']
),
author='Krzysztof Polanski, Jongeun Park',
author_email='kp9@sanger.ac.uk',
license='MIT'
)
| 27.6875
| 76
| 0.713318
|
2912190685a1a33d508894e8d2152aaf1eb2480c
| 3,344
|
py
|
Python
|
PiezoMove/arduino.py
|
aquilesC/disperscripts
|
55afd510581b6d266cce18a2080031647501e9f4
|
[
"MIT"
] | 1
|
2020-04-16T05:27:20.000Z
|
2020-04-16T05:27:20.000Z
|
PiezoMove/arduino.py
|
aquilesC/disperscripts
|
55afd510581b6d266cce18a2080031647501e9f4
|
[
"MIT"
] | null | null | null |
PiezoMove/arduino.py
|
aquilesC/disperscripts
|
55afd510581b6d266cce18a2080031647501e9f4
|
[
"MIT"
] | 2
|
2021-02-16T12:07:36.000Z
|
2021-02-19T14:29:48.000Z
|
"""
Arduino Model
=============
This is an ad-hoc model for controlling an Arduino Due board, which will in turn control a piezo-mirror, a laser,
and some LED's.
"""
from multiprocessing import Event
import pyvisa
from pyvisa import VisaIOError
from threading import RLock
from time import sleep
from dispertech.controller.devices.arduino.arduino import Arduino
from experimentor.lib.log import get_logger
from experimentor.models import Feature
from experimentor.models.decorators import make_async_thread
from experimentor.models.devices.base_device import ModelDevice
rm = pyvisa.ResourceManager('@py')
class ArduinoModel(ModelDevice):
def __init__(self, port=None, device=0):
""" Use the port if you know where the Arduino is connected, or use the device number in the order shown by
pyvisa.
"""
super().__init__()
self._threads = []
self._stop_temperature = Event()
self.temp_electronics = 0
self.temp_sample = 0
self.query_lock = RLock()
self.driver = None
self.port = port
self.device = device
self.logger = get_logger()
self._laser_power = 0
self._laser_led = 0
self._fiber_led = 0
self._top_led = 0
self._side_led = 0
self._power_led = 0
self._measure_led = 0
self._servo_position = 0
@make_async_thread
def initialize(self):
""" This is a highly opinionated initialize method, in which the power of the laser is set to a minimum, the
servo shutter is closed, and LEDs are switched off.
"""
with self.query_lock:
if not self.port:
port = Arduino.list_devices()[self.device]
self.driver = rm.open_resource(port, baud_rate=115200)
sleep(2)
# This is very silly, but clears the buffer so that next messages are not broken
try:
self.driver.query("IDN")
except VisaIOError:
try:
self.driver.read()
except VisaIOError:
pass
# @make_async_thread
def move_piezo(self, speed: int, direction: int, axis: int):
""" Moves the mirror connected to the board
:param int speed: Speed, from 0 to 2^6.
:param direction: 0 or 1, depending on which direction to move the mirror
:param axis: 1 or 2, to select the axis
"""
with self.query_lock:
binary_speed = '{0:06b}'.format(speed)
binary_speed = str(direction) + str(1) + binary_speed
number = int(binary_speed, 2)
bytestring = number.to_bytes(1, 'big')
self.driver.query(f"mot{axis}")
self.driver.write_raw(bytestring)
ans = self.driver.read()
self.logger.info('Finished moving')
def finalize(self):
super().finalize()
self.clean_up_threads()
if len(self._threads):
self.logger.warning(f'There are {len(self._threads)} still alive in Arduino')
if __name__ == "__main__":
dev = Arduino.list_devices()[0]
ard = ArduinoModel(dev)
ard.laser_power = 50
ard.move_mirror(60, 1, 1)
sleep(2)
ard.move_mirror(60,0,1)
ard.laser_power = 100
sleep(2)
ard.laser_power = 1
| 31.54717
| 117
| 0.616926
|
fde3e27f3f79f3cd5f24ca8046f8fcf7dea2b95b
| 189,796
|
py
|
Python
|
tests/001_theoretical/test_010_ip_network_blueprint.py
|
vitlabuda/datalidator
|
539063a98990c6be165baeff6c2a74ac2fd7a130
|
[
"BSD-3-Clause"
] | null | null | null |
tests/001_theoretical/test_010_ip_network_blueprint.py
|
vitlabuda/datalidator
|
539063a98990c6be165baeff6c2a74ac2fd7a130
|
[
"BSD-3-Clause"
] | null | null | null |
tests/001_theoretical/test_010_ip_network_blueprint.py
|
vitlabuda/datalidator
|
539063a98990c6be165baeff6c2a74ac2fd7a130
|
[
"BSD-3-Clause"
] | null | null | null |
#!/bin/false
# Copyright (c) 2022 Vít Labuda. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import os.path
import sys
if "DATALIDATOR_TESTS_AUTOPATH" in os.environ:
__TESTS_DIR = os.path.dirname(os.path.realpath(__file__))
__MODULE_DIR = os.path.realpath(os.path.join(__TESTS_DIR, "../.."))
if __TESTS_DIR not in sys.path:
sys.path.insert(0, __TESTS_DIR)
if __MODULE_DIR not in sys.path:
sys.path.insert(0, __MODULE_DIR)
import theoretical_testutils
import pytest
import ipaddress
import datetime
import urllib.parse
import uuid
from test_002_bytes_blueprint import BytesableObject, ExceptionRaisingBytesableObject
from test_008_integer_blueprint import IntableObject, ExceptionRaisingIntableObject
from test_009_ip_address_blueprint import lzt
from test_014_string_blueprint import StringableObject, ExceptionRaisingStringableObject
from datalidator.blueprints.ParsingMode import ParsingMode
from datalidator.blueprints.impl.IPNetworkBlueprint import IPNetworkBlueprint
from datalidator.blueprints.exc.InvalidInputDataExc import InvalidInputDataExc
from datalidator.blueprints.exc.InputDataNotConvertibleExc import InputDataNotConvertibleExc
from datalidator.blueprints.exc.InputDataTypeNotInAllowlistExc import InputDataTypeNotInAllowlistExc
# NOTE: Some outputs might not seem "sane" when comparing them to input data.
# However, the outputs are produced by underlying Python standard library functions which might have some quirks caused by their internal implementation.
# As testing the standard library is obviously not the objective of this test, it does not matter - the important thing is that the tested blueprints, filters and validators themselves work fine.
__IP_NETWORK_BLUEPRINT_TEST_SUITE = (
(IPNetworkBlueprint(parsing_mode=ParsingMode.MODE_LOOSE, ignore_set_host_bits=False), (
(ipaddress.IPv4Network("127.0.0.0/8"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.0.0.0/255.0.0.0"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.0.0.0/0.255.255.255"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/8", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/255.0.0.0", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/0.255.255.255", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.12.34.56/32"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/255.255.255.255"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/32", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/255.255.255.255", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv6Network("2001:db8::/64"), ipaddress.ip_network("2001:db8::/64")),
(ipaddress.IPv6Network("2001:db8::abcd/64", strict=False), ipaddress.ip_network("2001:db8::/64")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6/128"), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6"), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6/128", strict=False), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6", strict=False), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
("0.0.0.0/0", ipaddress.ip_network("0.0.0.0/0")),
("0.0.0.0/8", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.0/24", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1024/24", InputDataNotConvertibleExc),
("8.525312/24", InputDataNotConvertibleExc),
("134743040/24", InputDataNotConvertibleExc),
("010.8.04.0/24", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.0/24", InputDataNotConvertibleExc),
("127.123.045.000/24", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.000/24", InputDataNotConvertibleExc),
("01111111000000000000000000000000/24", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x0/24", InputDataNotConvertibleExc),
("8.8.0x4.0/24", InputDataNotConvertibleExc),
("127..0/24", InputDataNotConvertibleExc),
("127.0..0/24", InputDataNotConvertibleExc),
("127.0.0..0/24", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0.0", ipaddress.ip_network("0.0.0.0/0")),
("0.0.0.0/255.0.0.0", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.0/255.255.255.0", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1024/255.255.255.0", InputDataNotConvertibleExc),
("8.525312/255.255.255.0", InputDataNotConvertibleExc),
("134743040/255.255.255.0", InputDataNotConvertibleExc),
("010.8.04.0/255.255.255.0", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.0/255.255.255.0", InputDataNotConvertibleExc),
("127.123.045.000/255.255.255.0", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.000/255.255.255.0", InputDataNotConvertibleExc),
("01111111000000000000000000000000/255.255.255.0", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x0/255.255.255.0", InputDataNotConvertibleExc),
("8.8.0x4.0/255.255.255.0", InputDataNotConvertibleExc),
("127..0/255.255.255.0", InputDataNotConvertibleExc),
("127.0..0/255.255.255.0", InputDataNotConvertibleExc),
("127.0.0..0/255.255.255.0", InputDataNotConvertibleExc),
("0.0.0.0/0.255.255.255", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.0/0.0.0.255", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1024/0.0.0.255", InputDataNotConvertibleExc),
("8.525312/0.0.0.255", InputDataNotConvertibleExc),
("134743040/0.0.0.255", InputDataNotConvertibleExc),
("010.8.04.0/0.0.0.255", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.0/0.0.0.255", InputDataNotConvertibleExc),
("127.123.045.000/0.0.0.255", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.000/0.0.0.255", InputDataNotConvertibleExc),
("01111111000000000000000000000000/0.0.0.255", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x0/0.0.0.255", InputDataNotConvertibleExc),
("8.8.0x4.0/0.0.0.255", InputDataNotConvertibleExc),
("127..0/0.0.0.255", InputDataNotConvertibleExc),
("127.0..0/0.0.0.255", InputDataNotConvertibleExc),
("127.0.0..0/0.0.0.255", InputDataNotConvertibleExc),
("0.0.0.0/32", ipaddress.ip_network("0.0.0.0/32")),
("8.8.4.4/32", ipaddress.ip_network("8.8.4.4/32")),
("8.8.1028/32", InputDataNotConvertibleExc),
("8.525316/32", InputDataNotConvertibleExc),
("134743044/32", InputDataNotConvertibleExc),
("010.8.04.4/32", (ipaddress.ip_network("10.8.4.4/32") if lzt else InputDataNotConvertibleExc)),
("127.1/32", InputDataNotConvertibleExc),
("127.123.045.001/32", (ipaddress.ip_network("127.123.45.1/32") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001/32", InputDataNotConvertibleExc),
("01111111000000000000000000000001/32", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/32", InputDataNotConvertibleExc),
("8.8.0x4.4/32", InputDataNotConvertibleExc),
("127..1/32", InputDataNotConvertibleExc),
("127.0..1/32", InputDataNotConvertibleExc),
("127.0.0..1/32", InputDataNotConvertibleExc),
("0.0.0.0/255.255.255.255", ipaddress.ip_network("0.0.0.0/32")),
("8.8.4.4/255.255.255.255", ipaddress.ip_network("8.8.4.4/32")),
("8.8.1028/255.255.255.255", InputDataNotConvertibleExc),
("8.525316/255.255.255.255", InputDataNotConvertibleExc),
("134743044/255.255.255.255", InputDataNotConvertibleExc),
("010.8.04.4/255.255.255.255", (ipaddress.ip_network("10.8.4.4/32") if lzt else InputDataNotConvertibleExc)),
("127.1/255.255.255.255", InputDataNotConvertibleExc),
("127.123.045.001/255.255.255.255", (ipaddress.ip_network("127.123.45.1/32") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001/255.255.255.255", InputDataNotConvertibleExc),
("01111111000000000000000000000001/255.255.255.255", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/255.255.255.255", InputDataNotConvertibleExc),
("8.8.0x4.4/255.255.255.255", InputDataNotConvertibleExc),
("127..1/255.255.255.255", InputDataNotConvertibleExc),
("127.0..1/255.255.255.255", InputDataNotConvertibleExc),
("127.0.0..1/255.255.255.255", InputDataNotConvertibleExc),
("0.0.0.0", ipaddress.ip_network("0.0.0.0/32")),
("8.8.4.4", ipaddress.ip_network("8.8.4.4/32")),
("8.8.1028", InputDataNotConvertibleExc),
("8.525316", InputDataNotConvertibleExc),
("134743044", InputDataNotConvertibleExc),
("010.8.04.4", (ipaddress.ip_network("10.8.4.4/32") if lzt else InputDataNotConvertibleExc)),
("127.1", InputDataNotConvertibleExc),
("127.123.045.001", (ipaddress.ip_network("127.123.45.1/32") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001", InputDataNotConvertibleExc),
("01111111000000000000000000000001", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4", InputDataNotConvertibleExc),
("8.8.0x4.4", InputDataNotConvertibleExc),
("127..1", InputDataNotConvertibleExc),
("127.0..1", InputDataNotConvertibleExc),
("127.0.0..1", InputDataNotConvertibleExc),
("127.0.0.0/0", InputDataNotConvertibleExc),
("0.1.0.0/8", InputDataNotConvertibleExc),
("8.8.4.4/24", InputDataNotConvertibleExc),
("8.8.1028/24", InputDataNotConvertibleExc),
("8.525316/24", InputDataNotConvertibleExc),
("134743044/24", InputDataNotConvertibleExc),
("010.8.04.4/24", InputDataNotConvertibleExc),
("127.1/24", InputDataNotConvertibleExc),
("127.123.045.001/24", InputDataNotConvertibleExc),
("0127.123.045.001/24", InputDataNotConvertibleExc),
("01111111000000000000000000000001/24", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/24", InputDataNotConvertibleExc),
("8.8.0x4.4/24", InputDataNotConvertibleExc),
("127..4/24", InputDataNotConvertibleExc),
("127.0..4/24", InputDataNotConvertibleExc),
("127.0.0..4/24", InputDataNotConvertibleExc),
("127.0.0.0/0.0.0.0", InputDataNotConvertibleExc),
("0.1.0.0/255.0.0.0", InputDataNotConvertibleExc),
("8.8.4.4/255.255.255.0", InputDataNotConvertibleExc),
("8.8.1028/255.255.255.0", InputDataNotConvertibleExc),
("8.525316/255.255.255.0", InputDataNotConvertibleExc),
("134743044/255.255.255.0", InputDataNotConvertibleExc),
("010.8.04.4/255.255.255.0", InputDataNotConvertibleExc),
("127.1/255.255.255.0", InputDataNotConvertibleExc),
("127.123.045.001/255.255.255.0", InputDataNotConvertibleExc),
("0127.123.045.001/255.255.255.0", InputDataNotConvertibleExc),
("01111111000000000000000000000001/255.255.255.0", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/255.255.255.0", InputDataNotConvertibleExc),
("8.8.0x4.4/255.255.255.0", InputDataNotConvertibleExc),
("127..1/255.255.255.0", InputDataNotConvertibleExc),
("127.0..1/255.255.255.0", InputDataNotConvertibleExc),
("127.0.0..1/255.255.255.0", InputDataNotConvertibleExc),
("0.1.0.0/0.255.255.255", InputDataNotConvertibleExc),
("8.8.4.4/0.0.0.255", InputDataNotConvertibleExc),
("8.8.1028/0.0.0.255", InputDataNotConvertibleExc),
("8.525316/0.0.0.255", InputDataNotConvertibleExc),
("134743044/0.0.0.255", InputDataNotConvertibleExc),
("010.8.04.4/0.0.0.255", InputDataNotConvertibleExc),
("127.1/0.0.0.255", InputDataNotConvertibleExc),
("127.123.045.001/0.0.0.255", InputDataNotConvertibleExc),
("0127.123.045.001/0.0.0.255", InputDataNotConvertibleExc),
("01111111000000000000000000000001/0.0.0.255", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/0.0.0.255", InputDataNotConvertibleExc),
("8.8.0x4.4/0.0.0.255", InputDataNotConvertibleExc),
("127..1/0.0.0.255", InputDataNotConvertibleExc),
("127.0..1/0.0.0.255", InputDataNotConvertibleExc),
("127.0.0..1/0.0.0.255", InputDataNotConvertibleExc),
("127.0.0.0/08", ipaddress.ip_network("127.0.0.0/8")),
("127.0.0.0/00008", ipaddress.ip_network("127.0.0.0/8")),
("127.0.0.0/000_008", InputDataNotConvertibleExc), # "000_008" is convertible by int(), though
("127.0.0.0/8.0", InputDataNotConvertibleExc),
("127.0.0.0/8.", InputDataNotConvertibleExc),
("0.0.0.0/-1", InputDataNotConvertibleExc),
("0.0.0.0/33", InputDataNotConvertibleExc),
("0.0.0.0/80", InputDataNotConvertibleExc),
("0.0.0.0/127.0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/8.8.4.4", InputDataNotConvertibleExc),
("0.0.0.0/0.255.255.0", InputDataNotConvertibleExc),
("0.0.0.0/0.255.0.255", InputDataNotConvertibleExc),
("0.0.0.0/0.255.0.0", InputDataNotConvertibleExc),
("0.0.0.0/255.0.0.255", InputDataNotConvertibleExc),
("0.0.0.0/255.0.255.255", InputDataNotConvertibleExc),
("0.0.0.0/255.255.0.255", InputDataNotConvertibleExc),
("0.0.0.0/255.0.255.0", InputDataNotConvertibleExc),
("0.0.0.0/255.255.254.255", InputDataNotConvertibleExc),
("0.0.0.0/255.255.255.253", InputDataNotConvertibleExc),
("0.0.0.0/000.000.000.000", ipaddress.ip_network("0.0.0.0/0")),
("0.0.0.0/0000.000.000.000", InputDataNotConvertibleExc),
("0.0.0.0/255.255.255.000", ipaddress.ip_network("0.0.0.0/24")),
("0.0.0.0/255.255.255.0000", InputDataNotConvertibleExc),
("0.0.0.0/0255.255.255.000", InputDataNotConvertibleExc),
("0.0.0.0/0.", InputDataNotConvertibleExc),
("0.0.0.0/0.0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/0.0..0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0..0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0.0.", InputDataNotConvertibleExc),
("::/0", ipaddress.ip_network("::/0")),
("::/32", ipaddress.ip_network("::/32")),
("fd12:3456:7890:abcd::/64", ipaddress.ip_network("fd12:3456:7890:abcd::/64")),
("1::/0", InputDataNotConvertibleExc),
("::abcd/32", InputDataNotConvertibleExc),
("fd12:3456:7890:abcd::7890/64", InputDataNotConvertibleExc),
("fd12:3456:7890::abcd::/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/64", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", ipaddress.ip_network("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128")),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", ipaddress.ip_network("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128")),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff::ffff/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff::ffff", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff", InputDataNotConvertibleExc),
("2001:db00::/24", ipaddress.ip_network("2001:db00::/24")),
("2001:db00::/ffff:ff00::", InputDataNotConvertibleExc),
("fe80::/10", ipaddress.ip_network("fe80::/10")),
("FE80::/10", ipaddress.ip_network("fe80::/10")),
("Fe80::/10", ipaddress.ip_network("fe80::/10")),
("fE80::/10", ipaddress.ip_network("fe80::/10")),
("fe80::%enp4s0/64", ipaddress.ip_network("fe80::%enp4s0/64")),
("fe80::%Připojení k síti/64", ipaddress.ip_network("fe80::%Připojení k síti/64")),
("fe80::%/64", InputDataNotConvertibleExc),
("fe80::%enp4s0%/64", InputDataNotConvertibleExc),
("fe80::%enp4s0%abc/64", InputDataNotConvertibleExc),
("2001:db8::/48", ipaddress.ip_network("2001:db8::/48")),
("2001:db8:0:0:0:0:0:0/48", ipaddress.ip_network("2001:db8::/48")),
("2001:db8:0:0:0:0::/48", ipaddress.ip_network("2001:db8::/48")),
("2001:db8:0000:0000:0000:0000:0000:0000/48", ipaddress.ip_network("2001:db8::/48")),
("2001:0db8:0000:0000:0000:0000:0000:0000/48", ipaddress.ip_network("2001:db8::/48")),
("::/128", ipaddress.ip_network("::/128")),
("::", ipaddress.ip_network("::/128")),
(":/128", InputDataNotConvertibleExc),
(":::/128", InputDataNotConvertibleExc),
("::ffff:127.0.0.0/104", ipaddress.ip_network("::ffff:7f00:0/104")),
("::ffff:127.0.0.1/104", InputDataNotConvertibleExc),
("::ffff:127.0.0..0/104", InputDataNotConvertibleExc),
("::ffff:127.0.0..1/104", InputDataNotConvertibleExc),
("::ffff:127.0.0.0./104", InputDataNotConvertibleExc),
("::ffff:127.0.0.1./104", InputDataNotConvertibleExc),
("::/-1", InputDataNotConvertibleExc),
("::/129", InputDataNotConvertibleExc),
("::/000", ipaddress.ip_network("::/0")),
("::/032", ipaddress.ip_network("::/32")),
("::/320", InputDataNotConvertibleExc),
("::/000032", ipaddress.ip_network("::/32")),
("::/000_032", InputDataNotConvertibleExc),
("/", InputDataNotConvertibleExc),
("/32", InputDataNotConvertibleExc),
("/0.0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/", InputDataNotConvertibleExc),
("0.0.0.0:32", InputDataNotConvertibleExc),
("0.0.0.0\\32", InputDataNotConvertibleExc),
("0.0.0.0a32", InputDataNotConvertibleExc),
("0.0.0.0 32", InputDataNotConvertibleExc),
("0.0.0.0/\x00", InputDataNotConvertibleExc),
("\x00/32", InputDataNotConvertibleExc),
("127.0.0.1\x00/32", InputDataNotConvertibleExc),
("\x00127.0.0.1/32", InputDataNotConvertibleExc),
("127.0.0.\x001/32", InputDataNotConvertibleExc),
("127.0.0.1/\x0032", InputDataNotConvertibleExc),
("127.0.0.1/3\x002", InputDataNotConvertibleExc),
("127.0.0.1/32\x00", InputDataNotConvertibleExc),
("0.0.0.0/t", InputDataNotConvertibleExc),
("t/32", InputDataNotConvertibleExc),
("127.0.0.1t/32", InputDataNotConvertibleExc),
("t127.0.0.1/32", InputDataNotConvertibleExc),
("127.0.0.t1/32", InputDataNotConvertibleExc),
("127.0.0.1/t32", InputDataNotConvertibleExc),
("127.0.0.1/3t2", InputDataNotConvertibleExc),
("127.0.0.1/32t", InputDataNotConvertibleExc),
("0.0.0.0/ ", InputDataNotConvertibleExc),
(" /32", InputDataNotConvertibleExc),
("127.0.0.1 /32", InputDataNotConvertibleExc),
("127.0.0. 1/32", InputDataNotConvertibleExc),
("127.0.0.1/ 32", InputDataNotConvertibleExc),
("127.0.0.1/3 2", InputDataNotConvertibleExc),
("2001:db8::/", InputDataNotConvertibleExc),
("2001:db8::+128", InputDataNotConvertibleExc),
("2001:db8::\\128", InputDataNotConvertibleExc),
("2001:db8::x128", InputDataNotConvertibleExc),
("2001:db8:: 128", InputDataNotConvertibleExc),
("2001:db8::/\x00", InputDataNotConvertibleExc),
("\x00/128", InputDataNotConvertibleExc),
("2001:db8::\x00/128", InputDataNotConvertibleExc),
("\x002001:db8::/128", InputDataNotConvertibleExc),
("2001:db8:\x00:/128", InputDataNotConvertibleExc),
("2001:db8::/\x00128", InputDataNotConvertibleExc),
("2001:db8::/12\x008", InputDataNotConvertibleExc),
("2001:db8::/128\x00", InputDataNotConvertibleExc),
("2001:db8::/t", InputDataNotConvertibleExc),
("t/128", InputDataNotConvertibleExc),
("2001:db8::t/128", InputDataNotConvertibleExc),
("t2001:db8::/128", InputDataNotConvertibleExc),
("2001:db8:t:/128", InputDataNotConvertibleExc),
("2001:db8::/t128", InputDataNotConvertibleExc),
("2001:db8::/12t8", InputDataNotConvertibleExc),
("2001:db8::/128t", InputDataNotConvertibleExc),
("2001:db8::/ ", InputDataNotConvertibleExc),
(" /128", InputDataNotConvertibleExc),
("2001:db8:: /128", InputDataNotConvertibleExc),
("2001:db8: :/128", InputDataNotConvertibleExc),
("2001:db8::/ 128", InputDataNotConvertibleExc),
("2001:db8::/12 8", InputDataNotConvertibleExc),
("\r\n 127.0.0.1/32\t", ipaddress.ip_network("127.0.0.1/32")),
("\v\f127.0.0.1/32 ", ipaddress.ip_network("127.0.0.1/32")),
("\r\n 127.0.0.1/255.255.255.255\t", ipaddress.ip_network("127.0.0.1/32")),
("\v\f127.0.0.1/255.255.255.255 ", ipaddress.ip_network("127.0.0.1/32")),
("\r\n 127.0.0.1\t", ipaddress.ip_network("127.0.0.1/32")),
("\v\f127.0.0.1 ", ipaddress.ip_network("127.0.0.1/32")),
("\r\n 2001:db8::1def/128\t", ipaddress.ip_network("2001:db8::1def/128")),
("\v\f2001:db8::1def/128 ", ipaddress.ip_network("2001:db8::1def/128")),
("\r\n 2001:db8::1def\t", ipaddress.ip_network("2001:db8::1def/128")),
("\v\f2001:db8::1def ", ipaddress.ip_network("2001:db8::1def/128")),
("\x00", InputDataNotConvertibleExc),
("\x01", InputDataNotConvertibleExc),
("\x01" * 4, InputDataNotConvertibleExc),
("\x01" * 16, InputDataNotConvertibleExc),
("", InputDataNotConvertibleExc),
("hello", InputDataNotConvertibleExc),
(" \t\t Hello World! \r\v\n", InputDataNotConvertibleExc),
("\vf sadf asdf asd\r\rf as\ndfa sdfsa \t ", InputDataNotConvertibleExc),
(None, InputDataNotConvertibleExc),
# (True, ipaddress.ip_network("0.0.0.1/32")), # Raises DeprecationWarning!
# (False, ipaddress.ip_network("0.0.0.0/32")), # Raises DeprecationWarning!
(-(2 ** 16), InputDataNotConvertibleExc),
(-1, InputDataNotConvertibleExc),
(0, ipaddress.ip_network("0.0.0.0/32")),
(1, ipaddress.ip_network("0.0.0.1/32")),
(2131501624, ipaddress.ip_network("127.12.34.56/32")),
((2 ** 32) - 1, ipaddress.ip_network("255.255.255.255/32")),
((2 ** 32), ipaddress.ip_network("::1:0:0/128")),
((2 ** 32) + 1, ipaddress.ip_network("::1:0:1/128")),
(42541956123769884636017138956568135748, ipaddress.ip_network("2001:4860:4860::8844/128")),
((2 ** 128) - 1, ipaddress.ip_network("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128")),
((2 ** 128), InputDataNotConvertibleExc),
((2 ** 128) + 1, InputDataNotConvertibleExc),
((2 ** 256), InputDataNotConvertibleExc),
(-0.0, InputDataNotConvertibleExc),
(0.0, InputDataNotConvertibleExc),
(0.5, InputDataNotConvertibleExc),
(1.0, InputDataNotConvertibleExc),
(1.5, InputDataNotConvertibleExc),
(2131501624.0, InputDataNotConvertibleExc),
(2131501624.5, InputDataNotConvertibleExc),
(42541956123769884636017138956568135748.0, InputDataNotConvertibleExc),
(42541956123769884636017138956568135748.5, InputDataNotConvertibleExc),
(float("inf"), InputDataNotConvertibleExc),
(float("-inf"), InputDataNotConvertibleExc),
(float("nan"), InputDataNotConvertibleExc),
(b'', InputDataNotConvertibleExc),
(b'8.8.4.4/32', InputDataNotConvertibleExc),
(b'2001:4860:4860::8844/128', InputDataNotConvertibleExc),
(b'8.8.4.4', InputDataNotConvertibleExc),
(b'2001:4860:4860::8844', InputDataNotConvertibleExc),
(b'\x00', InputDataNotConvertibleExc),
(b'\x01', InputDataNotConvertibleExc),
(b'\x01' * 3, InputDataNotConvertibleExc),
(b'\x01' * 4, ipaddress.ip_network("1.1.1.1/32")),
(b'\x01' * 5, InputDataNotConvertibleExc),
(b'\x01' * 15, InputDataNotConvertibleExc),
(b'\x01' * 16, ipaddress.ip_network("101:101:101:101:101:101:101:101/128")),
(b'\x01' * 17, InputDataNotConvertibleExc),
(bytearray(b'\x01' * 4), InputDataNotConvertibleExc),
(bytearray(b'\x01' * 16), InputDataNotConvertibleExc),
([], InputDataNotConvertibleExc),
([b'\x00'], InputDataNotConvertibleExc),
([b'\x01'] * 4, InputDataNotConvertibleExc),
([b'\x01'] * 16, InputDataNotConvertibleExc),
({}, InputDataNotConvertibleExc),
({b'\x01': b'\x01'}, InputDataNotConvertibleExc),
(ipaddress.ip_address("127.0.0.1"), ipaddress.ip_network("127.0.0.1/32")),
(ipaddress.ip_address("2001:db8::abcd"), ipaddress.ip_network("2001:db8::abcd/128")),
(int, InputDataNotConvertibleExc),
(theoretical_testutils.EmptyObject, InputDataNotConvertibleExc),
(datetime.datetime.now(), InputDataNotConvertibleExc),
(datetime.datetime.now().date(), InputDataNotConvertibleExc),
(datetime.datetime.now().time(), InputDataNotConvertibleExc),
(urllib.parse.urlparse("https://www.google.com/test"), InputDataNotConvertibleExc),
(urllib.parse.urlparse("https://127.0.0.1/test"), InputDataNotConvertibleExc),
(urllib.parse.urlparse("https://[::1]/test"), InputDataNotConvertibleExc),
(uuid.UUID('{12345678-1234-5678-1234-567812345678}'), InputDataNotConvertibleExc),
(theoretical_testutils.EmptyObject(), InputDataNotConvertibleExc),
(StringableObject(theoretical_testutils.EmptyObject()), InputDataNotConvertibleExc),
(StringableObject("8.8.4.4/32"), ipaddress.ip_network("8.8.4.4/32")), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("2001:4860:4860::8844/128"), ipaddress.ip_network("2001:4860:4860::8844/128")), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("8.8.4.4"), ipaddress.ip_network("8.8.4.4/32")), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("2001:4860:4860::8844"), ipaddress.ip_network("2001:4860:4860::8844/128")), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject(2131501624), InputDataNotConvertibleExc),
(StringableObject(42541956123769884636017138956568135748), InputDataNotConvertibleExc),
(StringableObject(b'\x01' * 4), InputDataNotConvertibleExc),
(StringableObject(b'\x01' * 16), InputDataNotConvertibleExc),
(ExceptionRaisingStringableObject(), InputDataNotConvertibleExc),
(IntableObject(theoretical_testutils.EmptyObject()), InputDataNotConvertibleExc),
(IntableObject("8.8.4.4/32"), InputDataNotConvertibleExc),
(IntableObject("2001:4860:4860::8844/128"), InputDataNotConvertibleExc),
(IntableObject("8.8.4.4"), InputDataNotConvertibleExc),
(IntableObject("2001:4860:4860::8844"), InputDataNotConvertibleExc),
(IntableObject(2131501624), InputDataNotConvertibleExc),
(IntableObject(42541956123769884636017138956568135748), InputDataNotConvertibleExc),
(IntableObject(b'\x01' * 4), InputDataNotConvertibleExc),
(IntableObject(b'\x01' * 16), InputDataNotConvertibleExc),
(ExceptionRaisingIntableObject(), InputDataNotConvertibleExc),
(BytesableObject(theoretical_testutils.EmptyObject()), InputDataNotConvertibleExc),
(BytesableObject("8.8.4.4/32"), InputDataNotConvertibleExc),
(BytesableObject("2001:4860:4860::8844/128"), InputDataNotConvertibleExc),
(BytesableObject("8.8.4.4"), InputDataNotConvertibleExc),
(BytesableObject("2001:4860:4860::8844"), InputDataNotConvertibleExc),
(BytesableObject(2131501624), InputDataNotConvertibleExc),
(BytesableObject(42541956123769884636017138956568135748), InputDataNotConvertibleExc),
(BytesableObject(b'\x01' * 4), InputDataNotConvertibleExc),
(BytesableObject(b'\x01' * 16), InputDataNotConvertibleExc),
(ExceptionRaisingBytesableObject(), InputDataNotConvertibleExc),
# (("0.0.0.0", False), ipaddress.ip_network("0.0.0.0/0")), # The 'ipaddress' module has inconsistent behaviour when it comes to booleans!
# (("0.0.0.0", True), ipaddress.ip_network("0.0.0.0/1")), # The 'ipaddress' module has inconsistent behaviour when it comes to booleans!
(("127.0.0.0", 8), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.0", "8"), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.0", "008"), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.0", b"8"), ipaddress.ip_network("127.0.0.0/8")), # ???
(("127.0.0.0", b"008"), ipaddress.ip_network("127.0.0.0/8")), # ???
(("127.0.0.0", "255.0.0.0"), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.0", b"255.0.0.0"), InputDataNotConvertibleExc),
(("127.0.0.0", "0.255.255.255"), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.0", b"0.255.255.255"), InputDataNotConvertibleExc),
(("127.0.0.1", 8), InputDataNotConvertibleExc),
(("127.0.0.1", "8"), InputDataNotConvertibleExc),
(("127.0.0.1", "008"), InputDataNotConvertibleExc),
(("127.0.0.1", b"8"), InputDataNotConvertibleExc), # ???
(("127.0.0.1", b"008"), InputDataNotConvertibleExc), # ???
(("127.0.0.1", "255.0.0.0"), InputDataNotConvertibleExc),
(("127.0.0.1", b"255.0.0.0"), InputDataNotConvertibleExc),
(("127.0.0.1", "0.255.255.255"), InputDataNotConvertibleExc),
(("127.0.0.1", b"0.255.255.255"), InputDataNotConvertibleExc),
((b"127.0.0.0", 8), InputDataNotConvertibleExc),
((b"127.0.0.0", "8"), InputDataNotConvertibleExc),
((b"127.0.0.0", "008"), InputDataNotConvertibleExc),
((b"127.0.0.0", b"8"), InputDataNotConvertibleExc),
((b"127.0.0.0", b"008"), InputDataNotConvertibleExc),
((b"127.0.0.0", "255.0.0.0"), InputDataNotConvertibleExc),
((b"127.0.0.0", b"255.0.0.0"), InputDataNotConvertibleExc),
((b"127.0.0.0", "0.255.255.255"), InputDataNotConvertibleExc),
((b"127.0.0.0", b"0.255.255.255"), InputDataNotConvertibleExc),
((2131501624, 32), ipaddress.ip_network("127.12.34.56/32")),
((b"\x01" * 4, 32), ipaddress.ip_network("1.1.1.1/32")),
(("::", 8), ipaddress.ip_network("::/8")),
(("::", "8"), ipaddress.ip_network("::/8")),
(("::", "008"), ipaddress.ip_network("::/8")),
(("::", b"8"), ipaddress.ip_network("::/8")),
(("::", b"008"), ipaddress.ip_network("::/8")),
(("::1", 8), InputDataNotConvertibleExc),
(("::1", "8"), InputDataNotConvertibleExc),
(("::1", "008"), InputDataNotConvertibleExc),
(("::1", b"8"), InputDataNotConvertibleExc),
(("::1", b"008"), InputDataNotConvertibleExc),
((b"::", 8), InputDataNotConvertibleExc),
((b"::", "8"), InputDataNotConvertibleExc),
((b"::", "008"), InputDataNotConvertibleExc),
((b"::", b"8"), InputDataNotConvertibleExc),
((b"::", b"008"), InputDataNotConvertibleExc),
((42541956123769884636017138956568135748, 128), ipaddress.ip_network("2001:4860:4860::8844/128")),
((b"\x01" * 16, 128), ipaddress.ip_network("101:101:101:101:101:101:101:101/128")),
((), InputDataNotConvertibleExc),
(("127.0.0.0",), ipaddress.ip_network("127.0.0.0/32")), # ?????
(("127.1.2.3",), ipaddress.ip_network("127.1.2.3/32")), # ?????
(("127.0.0.0", 8, 8), InputDataNotConvertibleExc),
(("::",), ipaddress.ip_network("::/128")), # ?????
(("::abcd",), ipaddress.ip_network("::abcd/128")), # ?????
(("::", 8, 8), InputDataNotConvertibleExc),
)),
(IPNetworkBlueprint(parsing_mode=ParsingMode.MODE_LOOSE, ignore_set_host_bits=True), (
(ipaddress.IPv4Network("127.0.0.0/8"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.0.0.0/255.0.0.0"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.0.0.0/0.255.255.255"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/8", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/255.0.0.0", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/0.255.255.255", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.12.34.56/32"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/255.255.255.255"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/32", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/255.255.255.255", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv6Network("2001:db8::/64"), ipaddress.ip_network("2001:db8::/64")),
(ipaddress.IPv6Network("2001:db8::abcd/64", strict=False), ipaddress.ip_network("2001:db8::/64")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6/128"), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6"), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6/128", strict=False), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6", strict=False), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
("0.0.0.0/0", ipaddress.ip_network("0.0.0.0/0")),
("0.0.0.0/8", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.0/24", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1024/24", InputDataNotConvertibleExc),
("8.525312/24", InputDataNotConvertibleExc),
("134743040/24", InputDataNotConvertibleExc),
("010.8.04.0/24", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.0/24", InputDataNotConvertibleExc),
("127.123.045.000/24", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.000/24", InputDataNotConvertibleExc),
("01111111000000000000000000000000/24", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x0/24", InputDataNotConvertibleExc),
("8.8.0x4.0/24", InputDataNotConvertibleExc),
("127..0/24", InputDataNotConvertibleExc),
("127.0..0/24", InputDataNotConvertibleExc),
("127.0.0..0/24", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0.0", ipaddress.ip_network("0.0.0.0/0")),
("0.0.0.0/255.0.0.0", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.0/255.255.255.0", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1024/255.255.255.0", InputDataNotConvertibleExc),
("8.525312/255.255.255.0", InputDataNotConvertibleExc),
("134743040/255.255.255.0", InputDataNotConvertibleExc),
("010.8.04.0/255.255.255.0", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.0/255.255.255.0", InputDataNotConvertibleExc),
("127.123.045.000/255.255.255.0", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.000/255.255.255.0", InputDataNotConvertibleExc),
("01111111000000000000000000000000/255.255.255.0", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x0/255.255.255.0", InputDataNotConvertibleExc),
("8.8.0x4.0/255.255.255.0", InputDataNotConvertibleExc),
("127..0/255.255.255.0", InputDataNotConvertibleExc),
("127.0..0/255.255.255.0", InputDataNotConvertibleExc),
("127.0.0..0/255.255.255.0", InputDataNotConvertibleExc),
("0.0.0.0/0.255.255.255", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.0/0.0.0.255", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1024/0.0.0.255", InputDataNotConvertibleExc),
("8.525312/0.0.0.255", InputDataNotConvertibleExc),
("134743040/0.0.0.255", InputDataNotConvertibleExc),
("010.8.04.0/0.0.0.255", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.0/0.0.0.255", InputDataNotConvertibleExc),
("127.123.045.000/0.0.0.255", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.000/0.0.0.255", InputDataNotConvertibleExc),
("01111111000000000000000000000000/0.0.0.255", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x0/0.0.0.255", InputDataNotConvertibleExc),
("8.8.0x4.0/0.0.0.255", InputDataNotConvertibleExc),
("127..0/0.0.0.255", InputDataNotConvertibleExc),
("127.0..0/0.0.0.255", InputDataNotConvertibleExc),
("127.0.0..0/0.0.0.255", InputDataNotConvertibleExc),
("0.0.0.0/32", ipaddress.ip_network("0.0.0.0/32")),
("8.8.4.4/32", ipaddress.ip_network("8.8.4.4/32")),
("8.8.1028/32", InputDataNotConvertibleExc),
("8.525316/32", InputDataNotConvertibleExc),
("134743044/32", InputDataNotConvertibleExc),
("010.8.04.4/32", (ipaddress.ip_network("10.8.4.4/32") if lzt else InputDataNotConvertibleExc)),
("127.1/32", InputDataNotConvertibleExc),
("127.123.045.001/32", (ipaddress.ip_network("127.123.45.1/32") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001/32", InputDataNotConvertibleExc),
("01111111000000000000000000000001/32", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/32", InputDataNotConvertibleExc),
("8.8.0x4.4/32", InputDataNotConvertibleExc),
("127..1/32", InputDataNotConvertibleExc),
("127.0..1/32", InputDataNotConvertibleExc),
("127.0.0..1/32", InputDataNotConvertibleExc),
("0.0.0.0/255.255.255.255", ipaddress.ip_network("0.0.0.0/32")),
("8.8.4.4/255.255.255.255", ipaddress.ip_network("8.8.4.4/32")),
("8.8.1028/255.255.255.255", InputDataNotConvertibleExc),
("8.525316/255.255.255.255", InputDataNotConvertibleExc),
("134743044/255.255.255.255", InputDataNotConvertibleExc),
("010.8.04.4/255.255.255.255", (ipaddress.ip_network("10.8.4.4/32") if lzt else InputDataNotConvertibleExc)),
("127.1/255.255.255.255", InputDataNotConvertibleExc),
("127.123.045.001/255.255.255.255", (ipaddress.ip_network("127.123.45.1/32") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001/255.255.255.255", InputDataNotConvertibleExc),
("01111111000000000000000000000001/255.255.255.255", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/255.255.255.255", InputDataNotConvertibleExc),
("8.8.0x4.4/255.255.255.255", InputDataNotConvertibleExc),
("127..1/255.255.255.255", InputDataNotConvertibleExc),
("127.0..1/255.255.255.255", InputDataNotConvertibleExc),
("127.0.0..1/255.255.255.255", InputDataNotConvertibleExc),
("0.0.0.0", ipaddress.ip_network("0.0.0.0/32")),
("8.8.4.4", ipaddress.ip_network("8.8.4.4/32")),
("8.8.1028", InputDataNotConvertibleExc),
("8.525316", InputDataNotConvertibleExc),
("134743044", InputDataNotConvertibleExc),
("010.8.04.4", (ipaddress.ip_network("10.8.4.4/32") if lzt else InputDataNotConvertibleExc)),
("127.1", InputDataNotConvertibleExc),
("127.123.045.001", (ipaddress.ip_network("127.123.45.1/32") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001", InputDataNotConvertibleExc),
("01111111000000000000000000000001", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4", InputDataNotConvertibleExc),
("8.8.0x4.4", InputDataNotConvertibleExc),
("127..1", InputDataNotConvertibleExc),
("127.0..1", InputDataNotConvertibleExc),
("127.0.0..1", InputDataNotConvertibleExc),
("127.0.0.0/0", ipaddress.ip_network("0.0.0.0/0")),
("0.1.0.0/8", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.4/24", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1028/24", InputDataNotConvertibleExc),
("8.525316/24", InputDataNotConvertibleExc),
("134743044/24", InputDataNotConvertibleExc),
("010.8.04.4/24", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.1/24", InputDataNotConvertibleExc),
("127.123.045.001/24", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001/24", InputDataNotConvertibleExc),
("01111111000000000000000000000001/24", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/24", InputDataNotConvertibleExc),
("8.8.0x4.4/24", InputDataNotConvertibleExc),
("127..4/24", InputDataNotConvertibleExc),
("127.0..4/24", InputDataNotConvertibleExc),
("127.0.0..4/24", InputDataNotConvertibleExc),
("127.0.0.0/0.0.0.0", ipaddress.ip_network("0.0.0.0/0")),
("0.1.0.0/255.0.0.0", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.4/255.255.255.0", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1028/255.255.255.0", InputDataNotConvertibleExc),
("8.525316/255.255.255.0", InputDataNotConvertibleExc),
("134743044/255.255.255.0", InputDataNotConvertibleExc),
("010.8.04.4/255.255.255.0", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.1/255.255.255.0", InputDataNotConvertibleExc),
("127.123.045.001/255.255.255.0", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001/255.255.255.0", InputDataNotConvertibleExc),
("01111111000000000000000000000001/255.255.255.0", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/255.255.255.0", InputDataNotConvertibleExc),
("8.8.0x4.4/255.255.255.0", InputDataNotConvertibleExc),
("127..1/255.255.255.0", InputDataNotConvertibleExc),
("127.0..1/255.255.255.0", InputDataNotConvertibleExc),
("127.0.0..1/255.255.255.0", InputDataNotConvertibleExc),
("0.1.0.0/0.255.255.255", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.4/0.0.0.255", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1028/0.0.0.255", InputDataNotConvertibleExc),
("8.525316/0.0.0.255", InputDataNotConvertibleExc),
("134743044/0.0.0.255", InputDataNotConvertibleExc),
("010.8.04.4/0.0.0.255", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.1/0.0.0.255", InputDataNotConvertibleExc),
("127.123.045.001/0.0.0.255", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001/0.0.0.255", InputDataNotConvertibleExc),
("01111111000000000000000000000001/0.0.0.255", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/0.0.0.255", InputDataNotConvertibleExc),
("8.8.0x4.4/0.0.0.255", InputDataNotConvertibleExc),
("127..1/0.0.0.255", InputDataNotConvertibleExc),
("127.0..1/0.0.0.255", InputDataNotConvertibleExc),
("127.0.0..1/0.0.0.255", InputDataNotConvertibleExc),
("127.0.0.0/08", ipaddress.ip_network("127.0.0.0/8")),
("127.0.0.0/00008", ipaddress.ip_network("127.0.0.0/8")),
("127.0.0.0/000_008", InputDataNotConvertibleExc), # "000_008" is convertible by int(), though
("127.0.0.0/8.0", InputDataNotConvertibleExc),
("127.0.0.0/8.", InputDataNotConvertibleExc),
("0.0.0.0/-1", InputDataNotConvertibleExc),
("0.0.0.0/33", InputDataNotConvertibleExc),
("0.0.0.0/80", InputDataNotConvertibleExc),
("0.0.0.0/127.0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/8.8.4.4", InputDataNotConvertibleExc),
("0.0.0.0/0.255.255.0", InputDataNotConvertibleExc),
("0.0.0.0/0.255.0.255", InputDataNotConvertibleExc),
("0.0.0.0/0.255.0.0", InputDataNotConvertibleExc),
("0.0.0.0/255.0.0.255", InputDataNotConvertibleExc),
("0.0.0.0/255.0.255.255", InputDataNotConvertibleExc),
("0.0.0.0/255.255.0.255", InputDataNotConvertibleExc),
("0.0.0.0/255.0.255.0", InputDataNotConvertibleExc),
("0.0.0.0/255.255.254.255", InputDataNotConvertibleExc),
("0.0.0.0/255.255.255.253", InputDataNotConvertibleExc),
("0.0.0.0/000.000.000.000", ipaddress.ip_network("0.0.0.0/0")),
("0.0.0.0/0000.000.000.000", InputDataNotConvertibleExc),
("0.0.0.0/255.255.255.000", ipaddress.ip_network("0.0.0.0/24")),
("0.0.0.0/255.255.255.0000", InputDataNotConvertibleExc),
("0.0.0.0/0255.255.255.000", InputDataNotConvertibleExc),
("0.0.0.0/0.", InputDataNotConvertibleExc),
("0.0.0.0/0.0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/0.0..0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0..0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0.0.", InputDataNotConvertibleExc),
("::/0", ipaddress.ip_network("::/0")),
("::/32", ipaddress.ip_network("::/32")),
("fd12:3456:7890:abcd::/64", ipaddress.ip_network("fd12:3456:7890:abcd::/64")),
("1::/0", ipaddress.ip_network("::/0")),
("::abcd/32", ipaddress.ip_network("::/32")),
("fd12:3456:7890:abcd::7890/64", ipaddress.ip_network("fd12:3456:7890:abcd::/64")),
("fd12:3456:7890::abcd::/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/64", ipaddress.ip_network("ffff:ffff:ffff:ffff::/64")),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", ipaddress.ip_network("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128")),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", ipaddress.ip_network("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128")),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff::ffff/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff::ffff", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff", InputDataNotConvertibleExc),
("2001:db00::/24", ipaddress.ip_network("2001:db00::/24")),
("2001:db00::/ffff:ff00::", InputDataNotConvertibleExc),
("fe80::/10", ipaddress.ip_network("fe80::/10")),
("FE80::/10", ipaddress.ip_network("fe80::/10")),
("Fe80::/10", ipaddress.ip_network("fe80::/10")),
("fE80::/10", ipaddress.ip_network("fe80::/10")),
("fe80::%enp4s0/64", ipaddress.ip_network("fe80::%enp4s0/64")),
("fe80::%Připojení k síti/64", ipaddress.ip_network("fe80::%Připojení k síti/64")),
("fe80::%/64", InputDataNotConvertibleExc),
("fe80::%enp4s0%/64", InputDataNotConvertibleExc),
("fe80::%enp4s0%abc/64", InputDataNotConvertibleExc),
("2001:db8::/48", ipaddress.ip_network("2001:db8::/48")),
("2001:db8:0:0:0:0:0:0/48", ipaddress.ip_network("2001:db8::/48")),
("2001:db8:0:0:0:0::/48", ipaddress.ip_network("2001:db8::/48")),
("2001:db8:0000:0000:0000:0000:0000:0000/48", ipaddress.ip_network("2001:db8::/48")),
("2001:0db8:0000:0000:0000:0000:0000:0000/48", ipaddress.ip_network("2001:db8::/48")),
("::/128", ipaddress.ip_network("::/128")),
("::", ipaddress.ip_network("::/128")),
(":/128", InputDataNotConvertibleExc),
(":::/128", InputDataNotConvertibleExc),
("::ffff:127.0.0.0/104", ipaddress.ip_network("::ffff:7f00:0/104")),
("::ffff:127.0.0.1/104", ipaddress.ip_network("::ffff:7f00:0/104")),
("::ffff:127.0.0..0/104", InputDataNotConvertibleExc),
("::ffff:127.0.0..1/104", InputDataNotConvertibleExc),
("::ffff:127.0.0.0./104", InputDataNotConvertibleExc),
("::ffff:127.0.0.1./104", InputDataNotConvertibleExc),
("::/-1", InputDataNotConvertibleExc),
("::/129", InputDataNotConvertibleExc),
("::/000", ipaddress.ip_network("::/0")),
("::/032", ipaddress.ip_network("::/32")),
("::/320", InputDataNotConvertibleExc),
("::/000032", ipaddress.ip_network("::/32")),
("::/000_032", InputDataNotConvertibleExc),
("/", InputDataNotConvertibleExc),
("/32", InputDataNotConvertibleExc),
("/0.0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/", InputDataNotConvertibleExc),
("0.0.0.0:32", InputDataNotConvertibleExc),
("0.0.0.0\\32", InputDataNotConvertibleExc),
("0.0.0.0a32", InputDataNotConvertibleExc),
("0.0.0.0 32", InputDataNotConvertibleExc),
("0.0.0.0/\x00", InputDataNotConvertibleExc),
("\x00/32", InputDataNotConvertibleExc),
("127.0.0.1\x00/32", InputDataNotConvertibleExc),
("\x00127.0.0.1/32", InputDataNotConvertibleExc),
("127.0.0.\x001/32", InputDataNotConvertibleExc),
("127.0.0.1/\x0032", InputDataNotConvertibleExc),
("127.0.0.1/3\x002", InputDataNotConvertibleExc),
("127.0.0.1/32\x00", InputDataNotConvertibleExc),
("0.0.0.0/t", InputDataNotConvertibleExc),
("t/32", InputDataNotConvertibleExc),
("127.0.0.1t/32", InputDataNotConvertibleExc),
("t127.0.0.1/32", InputDataNotConvertibleExc),
("127.0.0.t1/32", InputDataNotConvertibleExc),
("127.0.0.1/t32", InputDataNotConvertibleExc),
("127.0.0.1/3t2", InputDataNotConvertibleExc),
("127.0.0.1/32t", InputDataNotConvertibleExc),
("0.0.0.0/ ", InputDataNotConvertibleExc),
(" /32", InputDataNotConvertibleExc),
("127.0.0.1 /32", InputDataNotConvertibleExc),
("127.0.0. 1/32", InputDataNotConvertibleExc),
("127.0.0.1/ 32", InputDataNotConvertibleExc),
("127.0.0.1/3 2", InputDataNotConvertibleExc),
("2001:db8::/", InputDataNotConvertibleExc),
("2001:db8::+128", InputDataNotConvertibleExc),
("2001:db8::\\128", InputDataNotConvertibleExc),
("2001:db8::x128", InputDataNotConvertibleExc),
("2001:db8:: 128", InputDataNotConvertibleExc),
("2001:db8::/\x00", InputDataNotConvertibleExc),
("\x00/128", InputDataNotConvertibleExc),
("2001:db8::\x00/128", InputDataNotConvertibleExc),
("\x002001:db8::/128", InputDataNotConvertibleExc),
("2001:db8:\x00:/128", InputDataNotConvertibleExc),
("2001:db8::/\x00128", InputDataNotConvertibleExc),
("2001:db8::/12\x008", InputDataNotConvertibleExc),
("2001:db8::/128\x00", InputDataNotConvertibleExc),
("2001:db8::/t", InputDataNotConvertibleExc),
("t/128", InputDataNotConvertibleExc),
("2001:db8::t/128", InputDataNotConvertibleExc),
("t2001:db8::/128", InputDataNotConvertibleExc),
("2001:db8:t:/128", InputDataNotConvertibleExc),
("2001:db8::/t128", InputDataNotConvertibleExc),
("2001:db8::/12t8", InputDataNotConvertibleExc),
("2001:db8::/128t", InputDataNotConvertibleExc),
("2001:db8::/ ", InputDataNotConvertibleExc),
(" /128", InputDataNotConvertibleExc),
("2001:db8:: /128", InputDataNotConvertibleExc),
("2001:db8: :/128", InputDataNotConvertibleExc),
("2001:db8::/ 128", InputDataNotConvertibleExc),
("2001:db8::/12 8", InputDataNotConvertibleExc),
("\r\n 127.0.0.1/32\t", ipaddress.ip_network("127.0.0.1/32")),
("\v\f127.0.0.1/32 ", ipaddress.ip_network("127.0.0.1/32")),
("\r\n 127.0.0.1/255.255.255.255\t", ipaddress.ip_network("127.0.0.1/32")),
("\v\f127.0.0.1/255.255.255.255 ", ipaddress.ip_network("127.0.0.1/32")),
("\r\n 127.0.0.1\t", ipaddress.ip_network("127.0.0.1/32")),
("\v\f127.0.0.1 ", ipaddress.ip_network("127.0.0.1/32")),
("\r\n 2001:db8::1def/128\t", ipaddress.ip_network("2001:db8::1def/128")),
("\v\f2001:db8::1def/128 ", ipaddress.ip_network("2001:db8::1def/128")),
("\r\n 2001:db8::1def\t", ipaddress.ip_network("2001:db8::1def/128")),
("\v\f2001:db8::1def ", ipaddress.ip_network("2001:db8::1def/128")),
("\x00", InputDataNotConvertibleExc),
("\x01", InputDataNotConvertibleExc),
("\x01" * 4, InputDataNotConvertibleExc),
("\x01" * 16, InputDataNotConvertibleExc),
("", InputDataNotConvertibleExc),
("hello", InputDataNotConvertibleExc),
(" \t\t Hello World! \r\v\n", InputDataNotConvertibleExc),
("\vf sadf asdf asd\r\rf as\ndfa sdfsa \t ", InputDataNotConvertibleExc),
(None, InputDataNotConvertibleExc),
# (True, ipaddress.ip_network("0.0.0.1/32")), # Raises DeprecationWarning!
# (False, ipaddress.ip_network("0.0.0.0/32")), # Raises DeprecationWarning!
(-(2 ** 16), InputDataNotConvertibleExc),
(-1, InputDataNotConvertibleExc),
(0, ipaddress.ip_network("0.0.0.0/32")),
(1, ipaddress.ip_network("0.0.0.1/32")),
(2131501624, ipaddress.ip_network("127.12.34.56/32")),
((2 ** 32) - 1, ipaddress.ip_network("255.255.255.255/32")),
((2 ** 32), ipaddress.ip_network("::1:0:0/128")),
((2 ** 32) + 1, ipaddress.ip_network("::1:0:1/128")),
(42541956123769884636017138956568135748, ipaddress.ip_network("2001:4860:4860::8844/128")),
((2 ** 128) - 1, ipaddress.ip_network("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128")),
((2 ** 128), InputDataNotConvertibleExc),
((2 ** 128) + 1, InputDataNotConvertibleExc),
((2 ** 256), InputDataNotConvertibleExc),
(-0.0, InputDataNotConvertibleExc),
(0.0, InputDataNotConvertibleExc),
(0.5, InputDataNotConvertibleExc),
(1.0, InputDataNotConvertibleExc),
(1.5, InputDataNotConvertibleExc),
(2131501624.0, InputDataNotConvertibleExc),
(2131501624.5, InputDataNotConvertibleExc),
(42541956123769884636017138956568135748.0, InputDataNotConvertibleExc),
(42541956123769884636017138956568135748.5, InputDataNotConvertibleExc),
(float("inf"), InputDataNotConvertibleExc),
(float("-inf"), InputDataNotConvertibleExc),
(float("nan"), InputDataNotConvertibleExc),
(b'', InputDataNotConvertibleExc),
(b'8.8.4.4/32', InputDataNotConvertibleExc),
(b'2001:4860:4860::8844/128', InputDataNotConvertibleExc),
(b'8.8.4.4', InputDataNotConvertibleExc),
(b'2001:4860:4860::8844', InputDataNotConvertibleExc),
(b'\x00', InputDataNotConvertibleExc),
(b'\x01', InputDataNotConvertibleExc),
(b'\x01' * 3, InputDataNotConvertibleExc),
(b'\x01' * 4, ipaddress.ip_network("1.1.1.1/32")),
(b'\x01' * 5, InputDataNotConvertibleExc),
(b'\x01' * 15, InputDataNotConvertibleExc),
(b'\x01' * 16, ipaddress.ip_network("101:101:101:101:101:101:101:101/128")),
(b'\x01' * 17, InputDataNotConvertibleExc),
(bytearray(b'\x01' * 4), InputDataNotConvertibleExc),
(bytearray(b'\x01' * 16), InputDataNotConvertibleExc),
([], InputDataNotConvertibleExc),
([b'\x00'], InputDataNotConvertibleExc),
([b'\x01'] * 4, InputDataNotConvertibleExc),
([b'\x01'] * 16, InputDataNotConvertibleExc),
({}, InputDataNotConvertibleExc),
({b'\x01': b'\x01'}, InputDataNotConvertibleExc),
(ipaddress.ip_address("127.0.0.1"), ipaddress.ip_network("127.0.0.1/32")),
(ipaddress.ip_address("2001:db8::abcd"), ipaddress.ip_network("2001:db8::abcd/128")),
(int, InputDataNotConvertibleExc),
(theoretical_testutils.EmptyObject, InputDataNotConvertibleExc),
(datetime.datetime.now(), InputDataNotConvertibleExc),
(datetime.datetime.now().date(), InputDataNotConvertibleExc),
(datetime.datetime.now().time(), InputDataNotConvertibleExc),
(urllib.parse.urlparse("https://www.google.com/test"), InputDataNotConvertibleExc),
(urllib.parse.urlparse("https://127.0.0.1/test"), InputDataNotConvertibleExc),
(urllib.parse.urlparse("https://[::1]/test"), InputDataNotConvertibleExc),
(uuid.UUID('{12345678-1234-5678-1234-567812345678}'), InputDataNotConvertibleExc),
(theoretical_testutils.EmptyObject(), InputDataNotConvertibleExc),
(StringableObject(theoretical_testutils.EmptyObject()), InputDataNotConvertibleExc),
(StringableObject("8.8.4.4/32"), ipaddress.ip_network("8.8.4.4/32")), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("2001:4860:4860::8844/128"), ipaddress.ip_network("2001:4860:4860::8844/128")), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("8.8.4.4"), ipaddress.ip_network("8.8.4.4/32")), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("2001:4860:4860::8844"), ipaddress.ip_network("2001:4860:4860::8844/128")), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject(2131501624), InputDataNotConvertibleExc),
(StringableObject(42541956123769884636017138956568135748), InputDataNotConvertibleExc),
(StringableObject(b'\x01' * 4), InputDataNotConvertibleExc),
(StringableObject(b'\x01' * 16), InputDataNotConvertibleExc),
(ExceptionRaisingStringableObject(), InputDataNotConvertibleExc),
(IntableObject(theoretical_testutils.EmptyObject()), InputDataNotConvertibleExc),
(IntableObject("8.8.4.4/32"), InputDataNotConvertibleExc),
(IntableObject("2001:4860:4860::8844/128"), InputDataNotConvertibleExc),
(IntableObject("8.8.4.4"), InputDataNotConvertibleExc),
(IntableObject("2001:4860:4860::8844"), InputDataNotConvertibleExc),
(IntableObject(2131501624), InputDataNotConvertibleExc),
(IntableObject(42541956123769884636017138956568135748), InputDataNotConvertibleExc),
(IntableObject(b'\x01' * 4), InputDataNotConvertibleExc),
(IntableObject(b'\x01' * 16), InputDataNotConvertibleExc),
(ExceptionRaisingIntableObject(), InputDataNotConvertibleExc),
(BytesableObject(theoretical_testutils.EmptyObject()), InputDataNotConvertibleExc),
(BytesableObject("8.8.4.4/32"), InputDataNotConvertibleExc),
(BytesableObject("2001:4860:4860::8844/128"), InputDataNotConvertibleExc),
(BytesableObject("8.8.4.4"), InputDataNotConvertibleExc),
(BytesableObject("2001:4860:4860::8844"), InputDataNotConvertibleExc),
(BytesableObject(2131501624), InputDataNotConvertibleExc),
(BytesableObject(42541956123769884636017138956568135748), InputDataNotConvertibleExc),
(BytesableObject(b'\x01' * 4), InputDataNotConvertibleExc),
(BytesableObject(b'\x01' * 16), InputDataNotConvertibleExc),
(ExceptionRaisingBytesableObject(), InputDataNotConvertibleExc),
# (("0.0.0.0", False), ipaddress.ip_network("0.0.0.0/0")), # The 'ipaddress' module has inconsistent behaviour when it comes to booleans!
# (("0.0.0.0", True), ipaddress.ip_network("0.0.0.0/1")), # The 'ipaddress' module has inconsistent behaviour when it comes to booleans!
(("127.0.0.0", 8), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.0", "8"), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.0", "008"), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.0", b"8"), ipaddress.ip_network("127.0.0.0/8")), # ???
(("127.0.0.0", b"008"), ipaddress.ip_network("127.0.0.0/8")), # ???
(("127.0.0.0", "255.0.0.0"), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.0", b"255.0.0.0"), InputDataNotConvertibleExc),
(("127.0.0.0", "0.255.255.255"), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.0", b"0.255.255.255"), InputDataNotConvertibleExc),
(("127.0.0.1", 8), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.1", "8"), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.1", "008"), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.1", b"8"), ipaddress.ip_network("127.0.0.0/8")), # ???
(("127.0.0.1", b"008"), ipaddress.ip_network("127.0.0.0/8")), # ???
(("127.0.0.1", "255.0.0.0"), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.1", b"255.0.0.0"), InputDataNotConvertibleExc),
(("127.0.0.1", "0.255.255.255"), ipaddress.ip_network("127.0.0.0/8")),
(("127.0.0.1", b"0.255.255.255"), InputDataNotConvertibleExc),
((b"127.0.0.0", 8), InputDataNotConvertibleExc),
((b"127.0.0.0", "8"), InputDataNotConvertibleExc),
((b"127.0.0.0", "008"), InputDataNotConvertibleExc),
((b"127.0.0.0", b"8"), InputDataNotConvertibleExc),
((b"127.0.0.0", b"008"), InputDataNotConvertibleExc),
((b"127.0.0.0", "255.0.0.0"), InputDataNotConvertibleExc),
((b"127.0.0.0", b"255.0.0.0"), InputDataNotConvertibleExc),
((b"127.0.0.0", "0.255.255.255"), InputDataNotConvertibleExc),
((b"127.0.0.0", b"0.255.255.255"), InputDataNotConvertibleExc),
((2131501624, 32), ipaddress.ip_network("127.12.34.56/32")),
((b"\x01" * 4, 32), ipaddress.ip_network("1.1.1.1/32")),
(("::", 8), ipaddress.ip_network("::/8")),
(("::", "8"), ipaddress.ip_network("::/8")),
(("::", "008"), ipaddress.ip_network("::/8")),
(("::", b"8"), ipaddress.ip_network("::/8")),
(("::", b"008"), ipaddress.ip_network("::/8")),
(("::1", 8), ipaddress.ip_network("::/8")),
(("::1", "8"), ipaddress.ip_network("::/8")),
(("::1", "008"), ipaddress.ip_network("::/8")),
(("::1", b"8"), ipaddress.ip_network("::/8")),
(("::1", b"008"), ipaddress.ip_network("::/8")),
((b"::", 8), InputDataNotConvertibleExc),
((b"::", "8"), InputDataNotConvertibleExc),
((b"::", "008"), InputDataNotConvertibleExc),
((b"::", b"8"), InputDataNotConvertibleExc),
((b"::", b"008"), InputDataNotConvertibleExc),
((42541956123769884636017138956568135748, 128), ipaddress.ip_network("2001:4860:4860::8844/128")),
((b"\x01" * 16, 128), ipaddress.ip_network("101:101:101:101:101:101:101:101/128")),
((), InputDataNotConvertibleExc),
(("127.0.0.0",), ipaddress.ip_network("127.0.0.0/32")), # ?????
(("127.1.2.3",), ipaddress.ip_network("127.1.2.3/32")), # ?????
(("127.0.0.0", 8, 8), InputDataNotConvertibleExc),
(("::",), ipaddress.ip_network("::/128")), # ?????
(("::abcd",), ipaddress.ip_network("::abcd/128")), # ?????
(("::", 8, 8), InputDataNotConvertibleExc),
)),
(IPNetworkBlueprint(parsing_mode=ParsingMode.MODE_RATIONAL, ignore_set_host_bits=False), (
(ipaddress.IPv4Network("127.0.0.0/8"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.0.0.0/255.0.0.0"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.0.0.0/0.255.255.255"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/8", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/255.0.0.0", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/0.255.255.255", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.12.34.56/32"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/255.255.255.255"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/32", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/255.255.255.255", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv6Network("2001:db8::/64"), ipaddress.ip_network("2001:db8::/64")),
(ipaddress.IPv6Network("2001:db8::abcd/64", strict=False), ipaddress.ip_network("2001:db8::/64")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6/128"), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6"), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6/128", strict=False), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6", strict=False), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
("0.0.0.0/0", ipaddress.ip_network("0.0.0.0/0")),
("0.0.0.0/8", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.0/24", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1024/24", InputDataNotConvertibleExc),
("8.525312/24", InputDataNotConvertibleExc),
("134743040/24", InputDataNotConvertibleExc),
("010.8.04.0/24", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.0/24", InputDataNotConvertibleExc),
("127.123.045.000/24", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.000/24", InputDataNotConvertibleExc),
("01111111000000000000000000000000/24", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x0/24", InputDataNotConvertibleExc),
("8.8.0x4.0/24", InputDataNotConvertibleExc),
("127..0/24", InputDataNotConvertibleExc),
("127.0..0/24", InputDataNotConvertibleExc),
("127.0.0..0/24", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0.0", ipaddress.ip_network("0.0.0.0/0")),
("0.0.0.0/255.0.0.0", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.0/255.255.255.0", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1024/255.255.255.0", InputDataNotConvertibleExc),
("8.525312/255.255.255.0", InputDataNotConvertibleExc),
("134743040/255.255.255.0", InputDataNotConvertibleExc),
("010.8.04.0/255.255.255.0", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.0/255.255.255.0", InputDataNotConvertibleExc),
("127.123.045.000/255.255.255.0", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.000/255.255.255.0", InputDataNotConvertibleExc),
("01111111000000000000000000000000/255.255.255.0", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x0/255.255.255.0", InputDataNotConvertibleExc),
("8.8.0x4.0/255.255.255.0", InputDataNotConvertibleExc),
("127..0/255.255.255.0", InputDataNotConvertibleExc),
("127.0..0/255.255.255.0", InputDataNotConvertibleExc),
("127.0.0..0/255.255.255.0", InputDataNotConvertibleExc),
("0.0.0.0/0.255.255.255", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.0/0.0.0.255", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1024/0.0.0.255", InputDataNotConvertibleExc),
("8.525312/0.0.0.255", InputDataNotConvertibleExc),
("134743040/0.0.0.255", InputDataNotConvertibleExc),
("010.8.04.0/0.0.0.255", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.0/0.0.0.255", InputDataNotConvertibleExc),
("127.123.045.000/0.0.0.255", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.000/0.0.0.255", InputDataNotConvertibleExc),
("01111111000000000000000000000000/0.0.0.255", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x0/0.0.0.255", InputDataNotConvertibleExc),
("8.8.0x4.0/0.0.0.255", InputDataNotConvertibleExc),
("127..0/0.0.0.255", InputDataNotConvertibleExc),
("127.0..0/0.0.0.255", InputDataNotConvertibleExc),
("127.0.0..0/0.0.0.255", InputDataNotConvertibleExc),
("0.0.0.0/32", ipaddress.ip_network("0.0.0.0/32")),
("8.8.4.4/32", ipaddress.ip_network("8.8.4.4/32")),
("8.8.1028/32", InputDataNotConvertibleExc),
("8.525316/32", InputDataNotConvertibleExc),
("134743044/32", InputDataNotConvertibleExc),
("010.8.04.4/32", (ipaddress.ip_network("10.8.4.4/32") if lzt else InputDataNotConvertibleExc)),
("127.1/32", InputDataNotConvertibleExc),
("127.123.045.001/32", (ipaddress.ip_network("127.123.45.1/32") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001/32", InputDataNotConvertibleExc),
("01111111000000000000000000000001/32", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/32", InputDataNotConvertibleExc),
("8.8.0x4.4/32", InputDataNotConvertibleExc),
("127..1/32", InputDataNotConvertibleExc),
("127.0..1/32", InputDataNotConvertibleExc),
("127.0.0..1/32", InputDataNotConvertibleExc),
("0.0.0.0/255.255.255.255", ipaddress.ip_network("0.0.0.0/32")),
("8.8.4.4/255.255.255.255", ipaddress.ip_network("8.8.4.4/32")),
("8.8.1028/255.255.255.255", InputDataNotConvertibleExc),
("8.525316/255.255.255.255", InputDataNotConvertibleExc),
("134743044/255.255.255.255", InputDataNotConvertibleExc),
("010.8.04.4/255.255.255.255", (ipaddress.ip_network("10.8.4.4/32") if lzt else InputDataNotConvertibleExc)),
("127.1/255.255.255.255", InputDataNotConvertibleExc),
("127.123.045.001/255.255.255.255", (ipaddress.ip_network("127.123.45.1/32") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001/255.255.255.255", InputDataNotConvertibleExc),
("01111111000000000000000000000001/255.255.255.255", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/255.255.255.255", InputDataNotConvertibleExc),
("8.8.0x4.4/255.255.255.255", InputDataNotConvertibleExc),
("127..1/255.255.255.255", InputDataNotConvertibleExc),
("127.0..1/255.255.255.255", InputDataNotConvertibleExc),
("127.0.0..1/255.255.255.255", InputDataNotConvertibleExc),
("0.0.0.0", InvalidInputDataExc),
("8.8.4.4", InvalidInputDataExc),
("8.8.1028", InvalidInputDataExc),
("8.525316", InvalidInputDataExc),
("134743044", InvalidInputDataExc),
("010.8.04.4", InvalidInputDataExc),
("127.1", InvalidInputDataExc),
("127.123.045.001", InvalidInputDataExc),
("0127.123.045.001", InvalidInputDataExc),
("01111111000000000000000000000001", InvalidInputDataExc),
("0x8.0x8.0x4.0x4", InvalidInputDataExc),
("8.8.0x4.4", InvalidInputDataExc),
("127..1", InvalidInputDataExc),
("127.0..1", InvalidInputDataExc),
("127.0.0..1", InvalidInputDataExc),
("127.0.0.0/0", InputDataNotConvertibleExc),
("0.1.0.0/8", InputDataNotConvertibleExc),
("8.8.4.4/24", InputDataNotConvertibleExc),
("8.8.1028/24", InputDataNotConvertibleExc),
("8.525316/24", InputDataNotConvertibleExc),
("134743044/24", InputDataNotConvertibleExc),
("010.8.04.4/24", InputDataNotConvertibleExc),
("127.1/24", InputDataNotConvertibleExc),
("127.123.045.001/24", InputDataNotConvertibleExc),
("0127.123.045.001/24", InputDataNotConvertibleExc),
("01111111000000000000000000000001/24", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/24", InputDataNotConvertibleExc),
("8.8.0x4.4/24", InputDataNotConvertibleExc),
("127..4/24", InputDataNotConvertibleExc),
("127.0..4/24", InputDataNotConvertibleExc),
("127.0.0..4/24", InputDataNotConvertibleExc),
("127.0.0.0/0.0.0.0", InputDataNotConvertibleExc),
("0.1.0.0/255.0.0.0", InputDataNotConvertibleExc),
("8.8.4.4/255.255.255.0", InputDataNotConvertibleExc),
("8.8.1028/255.255.255.0", InputDataNotConvertibleExc),
("8.525316/255.255.255.0", InputDataNotConvertibleExc),
("134743044/255.255.255.0", InputDataNotConvertibleExc),
("010.8.04.4/255.255.255.0", InputDataNotConvertibleExc),
("127.1/255.255.255.0", InputDataNotConvertibleExc),
("127.123.045.001/255.255.255.0", InputDataNotConvertibleExc),
("0127.123.045.001/255.255.255.0", InputDataNotConvertibleExc),
("01111111000000000000000000000001/255.255.255.0", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/255.255.255.0", InputDataNotConvertibleExc),
("8.8.0x4.4/255.255.255.0", InputDataNotConvertibleExc),
("127..1/255.255.255.0", InputDataNotConvertibleExc),
("127.0..1/255.255.255.0", InputDataNotConvertibleExc),
("127.0.0..1/255.255.255.0", InputDataNotConvertibleExc),
("0.1.0.0/0.255.255.255", InputDataNotConvertibleExc),
("8.8.4.4/0.0.0.255", InputDataNotConvertibleExc),
("8.8.1028/0.0.0.255", InputDataNotConvertibleExc),
("8.525316/0.0.0.255", InputDataNotConvertibleExc),
("134743044/0.0.0.255", InputDataNotConvertibleExc),
("010.8.04.4/0.0.0.255", InputDataNotConvertibleExc),
("127.1/0.0.0.255", InputDataNotConvertibleExc),
("127.123.045.001/0.0.0.255", InputDataNotConvertibleExc),
("0127.123.045.001/0.0.0.255", InputDataNotConvertibleExc),
("01111111000000000000000000000001/0.0.0.255", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/0.0.0.255", InputDataNotConvertibleExc),
("8.8.0x4.4/0.0.0.255", InputDataNotConvertibleExc),
("127..1/0.0.0.255", InputDataNotConvertibleExc),
("127.0..1/0.0.0.255", InputDataNotConvertibleExc),
("127.0.0..1/0.0.0.255", InputDataNotConvertibleExc),
("127.0.0.0/08", ipaddress.ip_network("127.0.0.0/8")),
("127.0.0.0/00008", ipaddress.ip_network("127.0.0.0/8")),
("127.0.0.0/000_008", InputDataNotConvertibleExc), # "000_008" is convertible by int(), though
("127.0.0.0/8.0", InputDataNotConvertibleExc),
("127.0.0.0/8.", InputDataNotConvertibleExc),
("0.0.0.0/-1", InputDataNotConvertibleExc),
("0.0.0.0/33", InputDataNotConvertibleExc),
("0.0.0.0/80", InputDataNotConvertibleExc),
("0.0.0.0/127.0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/8.8.4.4", InputDataNotConvertibleExc),
("0.0.0.0/0.255.255.0", InputDataNotConvertibleExc),
("0.0.0.0/0.255.0.255", InputDataNotConvertibleExc),
("0.0.0.0/0.255.0.0", InputDataNotConvertibleExc),
("0.0.0.0/255.0.0.255", InputDataNotConvertibleExc),
("0.0.0.0/255.0.255.255", InputDataNotConvertibleExc),
("0.0.0.0/255.255.0.255", InputDataNotConvertibleExc),
("0.0.0.0/255.0.255.0", InputDataNotConvertibleExc),
("0.0.0.0/255.255.254.255", InputDataNotConvertibleExc),
("0.0.0.0/255.255.255.253", InputDataNotConvertibleExc),
("0.0.0.0/000.000.000.000", ipaddress.ip_network("0.0.0.0/0")),
("0.0.0.0/0000.000.000.000", InputDataNotConvertibleExc),
("0.0.0.0/255.255.255.000", ipaddress.ip_network("0.0.0.0/24")),
("0.0.0.0/255.255.255.0000", InputDataNotConvertibleExc),
("0.0.0.0/0255.255.255.000", InputDataNotConvertibleExc),
("0.0.0.0/0.", InputDataNotConvertibleExc),
("0.0.0.0/0.0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/0.0..0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0..0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0.0.", InputDataNotConvertibleExc),
("::/0", ipaddress.ip_network("::/0")),
("::/32", ipaddress.ip_network("::/32")),
("fd12:3456:7890:abcd::/64", ipaddress.ip_network("fd12:3456:7890:abcd::/64")),
("1::/0", InputDataNotConvertibleExc),
("::abcd/32", InputDataNotConvertibleExc),
("fd12:3456:7890:abcd::7890/64", InputDataNotConvertibleExc),
("fd12:3456:7890::abcd::/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/64", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", ipaddress.ip_network("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128")),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", InvalidInputDataExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff::ffff/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff::ffff", InvalidInputDataExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", InvalidInputDataExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff", InvalidInputDataExc),
("2001:db00::/24", ipaddress.ip_network("2001:db00::/24")),
("2001:db00::/ffff:ff00::", InputDataNotConvertibleExc),
("fe80::/10", ipaddress.ip_network("fe80::/10")),
("FE80::/10", ipaddress.ip_network("fe80::/10")),
("Fe80::/10", ipaddress.ip_network("fe80::/10")),
("fE80::/10", ipaddress.ip_network("fe80::/10")),
("fe80::%enp4s0/64", ipaddress.ip_network("fe80::%enp4s0/64")),
("fe80::%Připojení k síti/64", ipaddress.ip_network("fe80::%Připojení k síti/64")),
("fe80::%/64", InputDataNotConvertibleExc),
("fe80::%enp4s0%/64", InputDataNotConvertibleExc),
("fe80::%enp4s0%abc/64", InputDataNotConvertibleExc),
("2001:db8::/48", ipaddress.ip_network("2001:db8::/48")),
("2001:db8:0:0:0:0:0:0/48", ipaddress.ip_network("2001:db8::/48")),
("2001:db8:0:0:0:0::/48", ipaddress.ip_network("2001:db8::/48")),
("2001:db8:0000:0000:0000:0000:0000:0000/48", ipaddress.ip_network("2001:db8::/48")),
("2001:0db8:0000:0000:0000:0000:0000:0000/48", ipaddress.ip_network("2001:db8::/48")),
("::/128", ipaddress.ip_network("::/128")),
("::", InvalidInputDataExc),
(":/128", InputDataNotConvertibleExc),
(":::/128", InputDataNotConvertibleExc),
("::ffff:127.0.0.0/104", ipaddress.ip_network("::ffff:7f00:0/104")),
("::ffff:127.0.0.1/104", InputDataNotConvertibleExc),
("::ffff:127.0.0..0/104", InputDataNotConvertibleExc),
("::ffff:127.0.0..1/104", InputDataNotConvertibleExc),
("::ffff:127.0.0.0./104", InputDataNotConvertibleExc),
("::ffff:127.0.0.1./104", InputDataNotConvertibleExc),
("::/-1", InputDataNotConvertibleExc),
("::/129", InputDataNotConvertibleExc),
("::/000", ipaddress.ip_network("::/0")),
("::/032", ipaddress.ip_network("::/32")),
("::/320", InputDataNotConvertibleExc),
("::/000032", ipaddress.ip_network("::/32")),
("::/000_032", InputDataNotConvertibleExc),
("/", InputDataNotConvertibleExc),
("/32", InputDataNotConvertibleExc),
("/0.0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/", InputDataNotConvertibleExc),
("0.0.0.0:32", InvalidInputDataExc),
("0.0.0.0\\32", InvalidInputDataExc),
("0.0.0.0a32", InvalidInputDataExc),
("0.0.0.0 32", InvalidInputDataExc),
("0.0.0.0/\x00", InputDataNotConvertibleExc),
("\x00/32", InputDataNotConvertibleExc),
("127.0.0.1\x00/32", InputDataNotConvertibleExc),
("\x00127.0.0.1/32", InputDataNotConvertibleExc),
("127.0.0.\x001/32", InputDataNotConvertibleExc),
("127.0.0.1/\x0032", InputDataNotConvertibleExc),
("127.0.0.1/3\x002", InputDataNotConvertibleExc),
("127.0.0.1/32\x00", InputDataNotConvertibleExc),
("0.0.0.0/t", InputDataNotConvertibleExc),
("t/32", InputDataNotConvertibleExc),
("127.0.0.1t/32", InputDataNotConvertibleExc),
("t127.0.0.1/32", InputDataNotConvertibleExc),
("127.0.0.t1/32", InputDataNotConvertibleExc),
("127.0.0.1/t32", InputDataNotConvertibleExc),
("127.0.0.1/3t2", InputDataNotConvertibleExc),
("127.0.0.1/32t", InputDataNotConvertibleExc),
("0.0.0.0/ ", InputDataNotConvertibleExc),
(" /32", InputDataNotConvertibleExc),
("127.0.0.1 /32", InputDataNotConvertibleExc),
("127.0.0. 1/32", InputDataNotConvertibleExc),
("127.0.0.1/ 32", InputDataNotConvertibleExc),
("127.0.0.1/3 2", InputDataNotConvertibleExc),
("2001:db8::/", InputDataNotConvertibleExc),
("2001:db8::+128", InvalidInputDataExc),
("2001:db8::\\128", InvalidInputDataExc),
("2001:db8::x128", InvalidInputDataExc),
("2001:db8:: 128", InvalidInputDataExc),
("2001:db8::/\x00", InputDataNotConvertibleExc),
("\x00/128", InputDataNotConvertibleExc),
("2001:db8::\x00/128", InputDataNotConvertibleExc),
("\x002001:db8::/128", InputDataNotConvertibleExc),
("2001:db8:\x00:/128", InputDataNotConvertibleExc),
("2001:db8::/\x00128", InputDataNotConvertibleExc),
("2001:db8::/12\x008", InputDataNotConvertibleExc),
("2001:db8::/128\x00", InputDataNotConvertibleExc),
("2001:db8::/t", InputDataNotConvertibleExc),
("t/128", InputDataNotConvertibleExc),
("2001:db8::t/128", InputDataNotConvertibleExc),
("t2001:db8::/128", InputDataNotConvertibleExc),
("2001:db8:t:/128", InputDataNotConvertibleExc),
("2001:db8::/t128", InputDataNotConvertibleExc),
("2001:db8::/12t8", InputDataNotConvertibleExc),
("2001:db8::/128t", InputDataNotConvertibleExc),
("2001:db8::/ ", InputDataNotConvertibleExc),
(" /128", InputDataNotConvertibleExc),
("2001:db8:: /128", InputDataNotConvertibleExc),
("2001:db8: :/128", InputDataNotConvertibleExc),
("2001:db8::/ 128", InputDataNotConvertibleExc),
("2001:db8::/12 8", InputDataNotConvertibleExc),
("\r\n 127.0.0.1/32\t", ipaddress.ip_network("127.0.0.1/32")),
("\v\f127.0.0.1/32 ", ipaddress.ip_network("127.0.0.1/32")),
("\r\n 127.0.0.1/255.255.255.255\t", ipaddress.ip_network("127.0.0.1/32")),
("\v\f127.0.0.1/255.255.255.255 ", ipaddress.ip_network("127.0.0.1/32")),
("\r\n 127.0.0.1\t", InvalidInputDataExc),
("\v\f127.0.0.1 ", InvalidInputDataExc),
("\r\n 2001:db8::1def/128\t", ipaddress.ip_network("2001:db8::1def/128")),
("\v\f2001:db8::1def/128 ", ipaddress.ip_network("2001:db8::1def/128")),
("\r\n 2001:db8::1def\t", InvalidInputDataExc),
("\v\f2001:db8::1def ", InvalidInputDataExc),
("\x00", InvalidInputDataExc),
("\x01", InvalidInputDataExc),
("\x01" * 4, InvalidInputDataExc),
("\x01" * 16, InvalidInputDataExc),
("", InvalidInputDataExc),
("hello", InvalidInputDataExc),
(" \t\t Hello World! \r\v\n", InvalidInputDataExc),
("\vf sadf asdf asd\r\rf as\ndfa sdfsa \t ", InvalidInputDataExc),
(None, InputDataTypeNotInAllowlistExc),
# (True, InputDataTypeNotInAllowlistExc), # Raises DeprecationWarning!
# (False, InputDataTypeNotInAllowlistExc), # Raises DeprecationWarning!
(-(2 ** 16), InputDataTypeNotInAllowlistExc),
(-1, InputDataTypeNotInAllowlistExc),
(0, InputDataTypeNotInAllowlistExc),
(1, InputDataTypeNotInAllowlistExc),
(2131501624, InputDataTypeNotInAllowlistExc),
((2 ** 32) - 1, InputDataTypeNotInAllowlistExc),
((2 ** 32), InputDataTypeNotInAllowlistExc),
((2 ** 32) + 1, InputDataTypeNotInAllowlistExc),
(42541956123769884636017138956568135748, InputDataTypeNotInAllowlistExc),
((2 ** 128) - 1, InputDataTypeNotInAllowlistExc),
((2 ** 128), InputDataTypeNotInAllowlistExc),
((2 ** 128) + 1, InputDataTypeNotInAllowlistExc),
((2 ** 256), InputDataTypeNotInAllowlistExc),
(-0.0, InputDataTypeNotInAllowlistExc),
(0.0, InputDataTypeNotInAllowlistExc),
(0.5, InputDataTypeNotInAllowlistExc),
(1.0, InputDataTypeNotInAllowlistExc),
(1.5, InputDataTypeNotInAllowlistExc),
(2131501624.0, InputDataTypeNotInAllowlistExc),
(2131501624.5, InputDataTypeNotInAllowlistExc),
(42541956123769884636017138956568135748.0, InputDataTypeNotInAllowlistExc),
(42541956123769884636017138956568135748.5, InputDataTypeNotInAllowlistExc),
(float("inf"), InputDataTypeNotInAllowlistExc),
(float("-inf"), InputDataTypeNotInAllowlistExc),
(float("nan"), InputDataTypeNotInAllowlistExc),
(b'', InputDataTypeNotInAllowlistExc),
(b'8.8.4.4/32', InputDataTypeNotInAllowlistExc),
(b'2001:4860:4860::8844/128', InputDataTypeNotInAllowlistExc),
(b'8.8.4.4', InputDataTypeNotInAllowlistExc),
(b'2001:4860:4860::8844', InputDataTypeNotInAllowlistExc),
(b'\x00', InputDataTypeNotInAllowlistExc),
(b'\x01', InputDataTypeNotInAllowlistExc),
(b'\x01' * 3, InputDataTypeNotInAllowlistExc),
(b'\x01' * 4, InputDataTypeNotInAllowlistExc),
(b'\x01' * 5, InputDataTypeNotInAllowlistExc),
(b'\x01' * 15, InputDataTypeNotInAllowlistExc),
(b'\x01' * 16, InputDataTypeNotInAllowlistExc),
(b'\x01' * 17, InputDataTypeNotInAllowlistExc),
(bytearray(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(bytearray(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
([], InputDataTypeNotInAllowlistExc),
([b'\x00'], InputDataTypeNotInAllowlistExc),
([b'\x01'] * 4, InputDataTypeNotInAllowlistExc),
([b'\x01'] * 16, InputDataTypeNotInAllowlistExc),
({}, InputDataTypeNotInAllowlistExc),
({b'\x01': b'\x01'}, InputDataTypeNotInAllowlistExc),
(ipaddress.ip_address("127.0.0.1"), InputDataTypeNotInAllowlistExc),
(ipaddress.ip_address("2001:db8::abcd"), InputDataTypeNotInAllowlistExc),
(int, InputDataTypeNotInAllowlistExc),
(theoretical_testutils.EmptyObject, InputDataTypeNotInAllowlistExc),
(datetime.datetime.now(), InputDataTypeNotInAllowlistExc),
(datetime.datetime.now().date(), InputDataTypeNotInAllowlistExc),
(datetime.datetime.now().time(), InputDataTypeNotInAllowlistExc),
(urllib.parse.urlparse("https://www.google.com/test"), InputDataTypeNotInAllowlistExc),
(urllib.parse.urlparse("https://127.0.0.1/test"), InputDataTypeNotInAllowlistExc),
(urllib.parse.urlparse("https://[::1]/test"), InputDataTypeNotInAllowlistExc),
(uuid.UUID('{12345678-1234-5678-1234-567812345678}'), InputDataTypeNotInAllowlistExc),
(theoretical_testutils.EmptyObject(), InputDataTypeNotInAllowlistExc),
(StringableObject(theoretical_testutils.EmptyObject()), InputDataTypeNotInAllowlistExc),
(StringableObject("8.8.4.4/32"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("2001:4860:4860::8844/128"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("8.8.4.4"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("2001:4860:4860::8844"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject(2131501624), InputDataTypeNotInAllowlistExc),
(StringableObject(42541956123769884636017138956568135748), InputDataTypeNotInAllowlistExc),
(StringableObject(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(StringableObject(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
(ExceptionRaisingStringableObject(), InputDataTypeNotInAllowlistExc),
(IntableObject(theoretical_testutils.EmptyObject()), InputDataTypeNotInAllowlistExc),
(IntableObject("8.8.4.4/32"), InputDataTypeNotInAllowlistExc),
(IntableObject("2001:4860:4860::8844/128"), InputDataTypeNotInAllowlistExc),
(IntableObject("8.8.4.4"), InputDataTypeNotInAllowlistExc),
(IntableObject("2001:4860:4860::8844"), InputDataTypeNotInAllowlistExc),
(IntableObject(2131501624), InputDataTypeNotInAllowlistExc),
(IntableObject(42541956123769884636017138956568135748), InputDataTypeNotInAllowlistExc),
(IntableObject(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(IntableObject(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
(ExceptionRaisingIntableObject(), InputDataTypeNotInAllowlistExc),
(BytesableObject(theoretical_testutils.EmptyObject()), InputDataTypeNotInAllowlistExc),
(BytesableObject("8.8.4.4/32"), InputDataTypeNotInAllowlistExc),
(BytesableObject("2001:4860:4860::8844/128"), InputDataTypeNotInAllowlistExc),
(BytesableObject("8.8.4.4"), InputDataTypeNotInAllowlistExc),
(BytesableObject("2001:4860:4860::8844"), InputDataTypeNotInAllowlistExc),
(BytesableObject(2131501624), InputDataTypeNotInAllowlistExc),
(BytesableObject(42541956123769884636017138956568135748), InputDataTypeNotInAllowlistExc),
(BytesableObject(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(BytesableObject(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
(ExceptionRaisingBytesableObject(), InputDataTypeNotInAllowlistExc),
# (("0.0.0.0", False), InputDataTypeNotInAllowlistExc), # The 'ipaddress' module has inconsistent behaviour when it comes to booleans!
# (("0.0.0.0", True), InputDataTypeNotInAllowlistExc), # The 'ipaddress' module has inconsistent behaviour when it comes to booleans!
(("127.0.0.0", 8), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", "8"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", "008"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", b"8"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.0", b"008"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.0", "255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", b"255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", "0.255.255.255"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", b"0.255.255.255"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", 8), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", "8"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", "008"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", b"8"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.1", b"008"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.1", "255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", b"255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", "0.255.255.255"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", b"0.255.255.255"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", 8), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "8"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "008"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"8"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"008"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "255.0.0.0"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"255.0.0.0"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "0.255.255.255"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"0.255.255.255"), InputDataTypeNotInAllowlistExc),
((2131501624, 32), InputDataTypeNotInAllowlistExc),
((b"\x01" * 4, 32), InputDataTypeNotInAllowlistExc),
(("::", 8), InputDataTypeNotInAllowlistExc),
(("::", "8"), InputDataTypeNotInAllowlistExc),
(("::", "008"), InputDataTypeNotInAllowlistExc),
(("::", b"8"), InputDataTypeNotInAllowlistExc),
(("::", b"008"), InputDataTypeNotInAllowlistExc),
(("::1", 8), InputDataTypeNotInAllowlistExc),
(("::1", "8"), InputDataTypeNotInAllowlistExc),
(("::1", "008"), InputDataTypeNotInAllowlistExc),
(("::1", b"8"), InputDataTypeNotInAllowlistExc),
(("::1", b"008"), InputDataTypeNotInAllowlistExc),
((b"::", 8), InputDataTypeNotInAllowlistExc),
((b"::", "8"), InputDataTypeNotInAllowlistExc),
((b"::", "008"), InputDataTypeNotInAllowlistExc),
((b"::", b"8"), InputDataTypeNotInAllowlistExc),
((b"::", b"008"), InputDataTypeNotInAllowlistExc),
((42541956123769884636017138956568135748, 128), InputDataTypeNotInAllowlistExc),
((b"\x01" * 16, 128), InputDataTypeNotInAllowlistExc),
((), InputDataTypeNotInAllowlistExc),
(("127.0.0.0",), InputDataTypeNotInAllowlistExc), # ?????
(("127.1.2.3",), InputDataTypeNotInAllowlistExc), # ?????
(("127.0.0.0", 8, 8), InputDataTypeNotInAllowlistExc),
(("::",), InputDataTypeNotInAllowlistExc), # ?????
(("::abcd",), InputDataTypeNotInAllowlistExc), # ?????
(("::", 8, 8), InputDataTypeNotInAllowlistExc),
)),
(IPNetworkBlueprint(parsing_mode=ParsingMode.MODE_RATIONAL, ignore_set_host_bits=True), (
(ipaddress.IPv4Network("127.0.0.0/8"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.0.0.0/255.0.0.0"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.0.0.0/0.255.255.255"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/8", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/255.0.0.0", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/0.255.255.255", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.12.34.56/32"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/255.255.255.255"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/32", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/255.255.255.255", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv6Network("2001:db8::/64"), ipaddress.ip_network("2001:db8::/64")),
(ipaddress.IPv6Network("2001:db8::abcd/64", strict=False), ipaddress.ip_network("2001:db8::/64")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6/128"), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6"), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6/128", strict=False), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6", strict=False), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
("0.0.0.0/0", ipaddress.ip_network("0.0.0.0/0")),
("0.0.0.0/8", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.0/24", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1024/24", InputDataNotConvertibleExc),
("8.525312/24", InputDataNotConvertibleExc),
("134743040/24", InputDataNotConvertibleExc),
("010.8.04.0/24", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.0/24", InputDataNotConvertibleExc),
("127.123.045.000/24", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.000/24", InputDataNotConvertibleExc),
("01111111000000000000000000000000/24", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x0/24", InputDataNotConvertibleExc),
("8.8.0x4.0/24", InputDataNotConvertibleExc),
("127..0/24", InputDataNotConvertibleExc),
("127.0..0/24", InputDataNotConvertibleExc),
("127.0.0..0/24", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0.0", ipaddress.ip_network("0.0.0.0/0")),
("0.0.0.0/255.0.0.0", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.0/255.255.255.0", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1024/255.255.255.0", InputDataNotConvertibleExc),
("8.525312/255.255.255.0", InputDataNotConvertibleExc),
("134743040/255.255.255.0", InputDataNotConvertibleExc),
("010.8.04.0/255.255.255.0", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.0/255.255.255.0", InputDataNotConvertibleExc),
("127.123.045.000/255.255.255.0", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.000/255.255.255.0", InputDataNotConvertibleExc),
("01111111000000000000000000000000/255.255.255.0", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x0/255.255.255.0", InputDataNotConvertibleExc),
("8.8.0x4.0/255.255.255.0", InputDataNotConvertibleExc),
("127..0/255.255.255.0", InputDataNotConvertibleExc),
("127.0..0/255.255.255.0", InputDataNotConvertibleExc),
("127.0.0..0/255.255.255.0", InputDataNotConvertibleExc),
("0.0.0.0/0.255.255.255", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.0/0.0.0.255", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1024/0.0.0.255", InputDataNotConvertibleExc),
("8.525312/0.0.0.255", InputDataNotConvertibleExc),
("134743040/0.0.0.255", InputDataNotConvertibleExc),
("010.8.04.0/0.0.0.255", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.0/0.0.0.255", InputDataNotConvertibleExc),
("127.123.045.000/0.0.0.255", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.000/0.0.0.255", InputDataNotConvertibleExc),
("01111111000000000000000000000000/0.0.0.255", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x0/0.0.0.255", InputDataNotConvertibleExc),
("8.8.0x4.0/0.0.0.255", InputDataNotConvertibleExc),
("127..0/0.0.0.255", InputDataNotConvertibleExc),
("127.0..0/0.0.0.255", InputDataNotConvertibleExc),
("127.0.0..0/0.0.0.255", InputDataNotConvertibleExc),
("0.0.0.0/32", ipaddress.ip_network("0.0.0.0/32")),
("8.8.4.4/32", ipaddress.ip_network("8.8.4.4/32")),
("8.8.1028/32", InputDataNotConvertibleExc),
("8.525316/32", InputDataNotConvertibleExc),
("134743044/32", InputDataNotConvertibleExc),
("010.8.04.4/32", (ipaddress.ip_network("10.8.4.4/32") if lzt else InputDataNotConvertibleExc)),
("127.1/32", InputDataNotConvertibleExc),
("127.123.045.001/32", (ipaddress.ip_network("127.123.45.1/32") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001/32", InputDataNotConvertibleExc),
("01111111000000000000000000000001/32", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/32", InputDataNotConvertibleExc),
("8.8.0x4.4/32", InputDataNotConvertibleExc),
("127..1/32", InputDataNotConvertibleExc),
("127.0..1/32", InputDataNotConvertibleExc),
("127.0.0..1/32", InputDataNotConvertibleExc),
("0.0.0.0/255.255.255.255", ipaddress.ip_network("0.0.0.0/32")),
("8.8.4.4/255.255.255.255", ipaddress.ip_network("8.8.4.4/32")),
("8.8.1028/255.255.255.255", InputDataNotConvertibleExc),
("8.525316/255.255.255.255", InputDataNotConvertibleExc),
("134743044/255.255.255.255", InputDataNotConvertibleExc),
("010.8.04.4/255.255.255.255", (ipaddress.ip_network("10.8.4.4/32") if lzt else InputDataNotConvertibleExc)),
("127.1/255.255.255.255", InputDataNotConvertibleExc),
("127.123.045.001/255.255.255.255", (ipaddress.ip_network("127.123.45.1/32") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001/255.255.255.255", InputDataNotConvertibleExc),
("01111111000000000000000000000001/255.255.255.255", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/255.255.255.255", InputDataNotConvertibleExc),
("8.8.0x4.4/255.255.255.255", InputDataNotConvertibleExc),
("127..1/255.255.255.255", InputDataNotConvertibleExc),
("127.0..1/255.255.255.255", InputDataNotConvertibleExc),
("127.0.0..1/255.255.255.255", InputDataNotConvertibleExc),
("0.0.0.0", InvalidInputDataExc),
("8.8.4.4", InvalidInputDataExc),
("8.8.1028", InvalidInputDataExc),
("8.525316", InvalidInputDataExc),
("134743044", InvalidInputDataExc),
("010.8.04.4", InvalidInputDataExc),
("127.1", InvalidInputDataExc),
("127.123.045.001", InvalidInputDataExc),
("0127.123.045.001", InvalidInputDataExc),
("01111111000000000000000000000001", InvalidInputDataExc),
("0x8.0x8.0x4.0x4", InvalidInputDataExc),
("8.8.0x4.4", InvalidInputDataExc),
("127..1", InvalidInputDataExc),
("127.0..1", InvalidInputDataExc),
("127.0.0..1", InvalidInputDataExc),
("127.0.0.0/0", ipaddress.ip_network("0.0.0.0/0")),
("0.1.0.0/8", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.4/24", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1028/24", InputDataNotConvertibleExc),
("8.525316/24", InputDataNotConvertibleExc),
("134743044/24", InputDataNotConvertibleExc),
("010.8.04.4/24", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.1/24", InputDataNotConvertibleExc),
("127.123.045.001/24", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001/24", InputDataNotConvertibleExc),
("01111111000000000000000000000001/24", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/24", InputDataNotConvertibleExc),
("8.8.0x4.4/24", InputDataNotConvertibleExc),
("127..4/24", InputDataNotConvertibleExc),
("127.0..4/24", InputDataNotConvertibleExc),
("127.0.0..4/24", InputDataNotConvertibleExc),
("127.0.0.0/0.0.0.0", ipaddress.ip_network("0.0.0.0/0")),
("0.1.0.0/255.0.0.0", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.4/255.255.255.0", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1028/255.255.255.0", InputDataNotConvertibleExc),
("8.525316/255.255.255.0", InputDataNotConvertibleExc),
("134743044/255.255.255.0", InputDataNotConvertibleExc),
("010.8.04.4/255.255.255.0", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.1/255.255.255.0", InputDataNotConvertibleExc),
("127.123.045.001/255.255.255.0", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001/255.255.255.0", InputDataNotConvertibleExc),
("01111111000000000000000000000001/255.255.255.0", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/255.255.255.0", InputDataNotConvertibleExc),
("8.8.0x4.4/255.255.255.0", InputDataNotConvertibleExc),
("127..1/255.255.255.0", InputDataNotConvertibleExc),
("127.0..1/255.255.255.0", InputDataNotConvertibleExc),
("127.0.0..1/255.255.255.0", InputDataNotConvertibleExc),
("0.1.0.0/0.255.255.255", ipaddress.ip_network("0.0.0.0/8")),
("8.8.4.4/0.0.0.255", ipaddress.ip_network("8.8.4.0/24")),
("8.8.1028/0.0.0.255", InputDataNotConvertibleExc),
("8.525316/0.0.0.255", InputDataNotConvertibleExc),
("134743044/0.0.0.255", InputDataNotConvertibleExc),
("010.8.04.4/0.0.0.255", (ipaddress.ip_network("10.8.4.0/24") if lzt else InputDataNotConvertibleExc)),
("127.1/0.0.0.255", InputDataNotConvertibleExc),
("127.123.045.001/0.0.0.255", (ipaddress.ip_network("127.123.45.0/24") if lzt else InputDataNotConvertibleExc)),
("0127.123.045.001/0.0.0.255", InputDataNotConvertibleExc),
("01111111000000000000000000000001/0.0.0.255", InputDataNotConvertibleExc),
("0x8.0x8.0x4.0x4/0.0.0.255", InputDataNotConvertibleExc),
("8.8.0x4.4/0.0.0.255", InputDataNotConvertibleExc),
("127..1/0.0.0.255", InputDataNotConvertibleExc),
("127.0..1/0.0.0.255", InputDataNotConvertibleExc),
("127.0.0..1/0.0.0.255", InputDataNotConvertibleExc),
("127.0.0.0/08", ipaddress.ip_network("127.0.0.0/8")),
("127.0.0.0/00008", ipaddress.ip_network("127.0.0.0/8")),
("127.0.0.0/000_008", InputDataNotConvertibleExc), # "000_008" is convertible by int(), though
("127.0.0.0/8.0", InputDataNotConvertibleExc),
("127.0.0.0/8.", InputDataNotConvertibleExc),
("0.0.0.0/-1", InputDataNotConvertibleExc),
("0.0.0.0/33", InputDataNotConvertibleExc),
("0.0.0.0/80", InputDataNotConvertibleExc),
("0.0.0.0/127.0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/8.8.4.4", InputDataNotConvertibleExc),
("0.0.0.0/0.255.255.0", InputDataNotConvertibleExc),
("0.0.0.0/0.255.0.255", InputDataNotConvertibleExc),
("0.0.0.0/0.255.0.0", InputDataNotConvertibleExc),
("0.0.0.0/255.0.0.255", InputDataNotConvertibleExc),
("0.0.0.0/255.0.255.255", InputDataNotConvertibleExc),
("0.0.0.0/255.255.0.255", InputDataNotConvertibleExc),
("0.0.0.0/255.0.255.0", InputDataNotConvertibleExc),
("0.0.0.0/255.255.254.255", InputDataNotConvertibleExc),
("0.0.0.0/255.255.255.253", InputDataNotConvertibleExc),
("0.0.0.0/000.000.000.000", ipaddress.ip_network("0.0.0.0/0")),
("0.0.0.0/0000.000.000.000", InputDataNotConvertibleExc),
("0.0.0.0/255.255.255.000", ipaddress.ip_network("0.0.0.0/24")),
("0.0.0.0/255.255.255.0000", InputDataNotConvertibleExc),
("0.0.0.0/0255.255.255.000", InputDataNotConvertibleExc),
("0.0.0.0/0.", InputDataNotConvertibleExc),
("0.0.0.0/0.0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/0.0..0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0..0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/0.0.0.0.", InputDataNotConvertibleExc),
("::/0", ipaddress.ip_network("::/0")),
("::/32", ipaddress.ip_network("::/32")),
("fd12:3456:7890:abcd::/64", ipaddress.ip_network("fd12:3456:7890:abcd::/64")),
("1::/0", ipaddress.ip_network("::/0")),
("::abcd/32", ipaddress.ip_network("::/32")),
("fd12:3456:7890:abcd::7890/64", ipaddress.ip_network("fd12:3456:7890:abcd::/64")),
("fd12:3456:7890::abcd::/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/64", ipaddress.ip_network("ffff:ffff:ffff:ffff::/64")),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", ipaddress.ip_network("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128")),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", InvalidInputDataExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff::ffff/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff::ffff", InvalidInputDataExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", InvalidInputDataExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", InputDataNotConvertibleExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff", InvalidInputDataExc),
("2001:db00::/24", ipaddress.ip_network("2001:db00::/24")),
("2001:db00::/ffff:ff00::", InputDataNotConvertibleExc),
("fe80::/10", ipaddress.ip_network("fe80::/10")),
("FE80::/10", ipaddress.ip_network("fe80::/10")),
("Fe80::/10", ipaddress.ip_network("fe80::/10")),
("fE80::/10", ipaddress.ip_network("fe80::/10")),
("fe80::%enp4s0/64", ipaddress.ip_network("fe80::%enp4s0/64")),
("fe80::%Připojení k síti/64", ipaddress.ip_network("fe80::%Připojení k síti/64")),
("fe80::%/64", InputDataNotConvertibleExc),
("fe80::%enp4s0%/64", InputDataNotConvertibleExc),
("fe80::%enp4s0%abc/64", InputDataNotConvertibleExc),
("2001:db8::/48", ipaddress.ip_network("2001:db8::/48")),
("2001:db8:0:0:0:0:0:0/48", ipaddress.ip_network("2001:db8::/48")),
("2001:db8:0:0:0:0::/48", ipaddress.ip_network("2001:db8::/48")),
("2001:db8:0000:0000:0000:0000:0000:0000/48", ipaddress.ip_network("2001:db8::/48")),
("2001:0db8:0000:0000:0000:0000:0000:0000/48", ipaddress.ip_network("2001:db8::/48")),
("::/128", ipaddress.ip_network("::/128")),
("::", InvalidInputDataExc),
(":/128", InputDataNotConvertibleExc),
(":::/128", InputDataNotConvertibleExc),
("::ffff:127.0.0.0/104", ipaddress.ip_network("::ffff:7f00:0/104")),
("::ffff:127.0.0.1/104", ipaddress.ip_network("::ffff:7f00:0/104")),
("::ffff:127.0.0..0/104", InputDataNotConvertibleExc),
("::ffff:127.0.0..1/104", InputDataNotConvertibleExc),
("::ffff:127.0.0.0./104", InputDataNotConvertibleExc),
("::ffff:127.0.0.1./104", InputDataNotConvertibleExc),
("::/-1", InputDataNotConvertibleExc),
("::/129", InputDataNotConvertibleExc),
("::/000", ipaddress.ip_network("::/0")),
("::/032", ipaddress.ip_network("::/32")),
("::/320", InputDataNotConvertibleExc),
("::/000032", ipaddress.ip_network("::/32")),
("::/000_032", InputDataNotConvertibleExc),
("/", InputDataNotConvertibleExc),
("/32", InputDataNotConvertibleExc),
("/0.0.0.0", InputDataNotConvertibleExc),
("0.0.0.0/", InputDataNotConvertibleExc),
("0.0.0.0:32", InvalidInputDataExc),
("0.0.0.0\\32", InvalidInputDataExc),
("0.0.0.0a32", InvalidInputDataExc),
("0.0.0.0 32", InvalidInputDataExc),
("0.0.0.0/\x00", InputDataNotConvertibleExc),
("\x00/32", InputDataNotConvertibleExc),
("127.0.0.1\x00/32", InputDataNotConvertibleExc),
("\x00127.0.0.1/32", InputDataNotConvertibleExc),
("127.0.0.\x001/32", InputDataNotConvertibleExc),
("127.0.0.1/\x0032", InputDataNotConvertibleExc),
("127.0.0.1/3\x002", InputDataNotConvertibleExc),
("127.0.0.1/32\x00", InputDataNotConvertibleExc),
("0.0.0.0/t", InputDataNotConvertibleExc),
("t/32", InputDataNotConvertibleExc),
("127.0.0.1t/32", InputDataNotConvertibleExc),
("t127.0.0.1/32", InputDataNotConvertibleExc),
("127.0.0.t1/32", InputDataNotConvertibleExc),
("127.0.0.1/t32", InputDataNotConvertibleExc),
("127.0.0.1/3t2", InputDataNotConvertibleExc),
("127.0.0.1/32t", InputDataNotConvertibleExc),
("0.0.0.0/ ", InputDataNotConvertibleExc),
(" /32", InputDataNotConvertibleExc),
("127.0.0.1 /32", InputDataNotConvertibleExc),
("127.0.0. 1/32", InputDataNotConvertibleExc),
("127.0.0.1/ 32", InputDataNotConvertibleExc),
("127.0.0.1/3 2", InputDataNotConvertibleExc),
("2001:db8::/", InputDataNotConvertibleExc),
("2001:db8::+128", InvalidInputDataExc),
("2001:db8::\\128", InvalidInputDataExc),
("2001:db8::x128", InvalidInputDataExc),
("2001:db8:: 128", InvalidInputDataExc),
("2001:db8::/\x00", InputDataNotConvertibleExc),
("\x00/128", InputDataNotConvertibleExc),
("2001:db8::\x00/128", InputDataNotConvertibleExc),
("\x002001:db8::/128", InputDataNotConvertibleExc),
("2001:db8:\x00:/128", InputDataNotConvertibleExc),
("2001:db8::/\x00128", InputDataNotConvertibleExc),
("2001:db8::/12\x008", InputDataNotConvertibleExc),
("2001:db8::/128\x00", InputDataNotConvertibleExc),
("2001:db8::/t", InputDataNotConvertibleExc),
("t/128", InputDataNotConvertibleExc),
("2001:db8::t/128", InputDataNotConvertibleExc),
("t2001:db8::/128", InputDataNotConvertibleExc),
("2001:db8:t:/128", InputDataNotConvertibleExc),
("2001:db8::/t128", InputDataNotConvertibleExc),
("2001:db8::/12t8", InputDataNotConvertibleExc),
("2001:db8::/128t", InputDataNotConvertibleExc),
("2001:db8::/ ", InputDataNotConvertibleExc),
(" /128", InputDataNotConvertibleExc),
("2001:db8:: /128", InputDataNotConvertibleExc),
("2001:db8: :/128", InputDataNotConvertibleExc),
("2001:db8::/ 128", InputDataNotConvertibleExc),
("2001:db8::/12 8", InputDataNotConvertibleExc),
("\r\n 127.0.0.1/32\t", ipaddress.ip_network("127.0.0.1/32")),
("\v\f127.0.0.1/32 ", ipaddress.ip_network("127.0.0.1/32")),
("\r\n 127.0.0.1/255.255.255.255\t", ipaddress.ip_network("127.0.0.1/32")),
("\v\f127.0.0.1/255.255.255.255 ", ipaddress.ip_network("127.0.0.1/32")),
("\r\n 127.0.0.1\t", InvalidInputDataExc),
("\v\f127.0.0.1 ", InvalidInputDataExc),
("\r\n 2001:db8::1def/128\t", ipaddress.ip_network("2001:db8::1def/128")),
("\v\f2001:db8::1def/128 ", ipaddress.ip_network("2001:db8::1def/128")),
("\r\n 2001:db8::1def\t", InvalidInputDataExc),
("\v\f2001:db8::1def ", InvalidInputDataExc),
("\x00", InvalidInputDataExc),
("\x01", InvalidInputDataExc),
("\x01" * 4, InvalidInputDataExc),
("\x01" * 16, InvalidInputDataExc),
("", InvalidInputDataExc),
("hello", InvalidInputDataExc),
(" \t\t Hello World! \r\v\n", InvalidInputDataExc),
("\vf sadf asdf asd\r\rf as\ndfa sdfsa \t ", InvalidInputDataExc),
(None, InputDataTypeNotInAllowlistExc),
# (True, InputDataTypeNotInAllowlistExc), # Raises DeprecationWarning!
# (False, InputDataTypeNotInAllowlistExc), # Raises DeprecationWarning!
(-(2 ** 16), InputDataTypeNotInAllowlistExc),
(-1, InputDataTypeNotInAllowlistExc),
(0, InputDataTypeNotInAllowlistExc),
(1, InputDataTypeNotInAllowlistExc),
(2131501624, InputDataTypeNotInAllowlistExc),
((2 ** 32) - 1, InputDataTypeNotInAllowlistExc),
((2 ** 32), InputDataTypeNotInAllowlistExc),
((2 ** 32) + 1, InputDataTypeNotInAllowlistExc),
(42541956123769884636017138956568135748, InputDataTypeNotInAllowlistExc),
((2 ** 128) - 1, InputDataTypeNotInAllowlistExc),
((2 ** 128), InputDataTypeNotInAllowlistExc),
((2 ** 128) + 1, InputDataTypeNotInAllowlistExc),
((2 ** 256), InputDataTypeNotInAllowlistExc),
(-0.0, InputDataTypeNotInAllowlistExc),
(0.0, InputDataTypeNotInAllowlistExc),
(0.5, InputDataTypeNotInAllowlistExc),
(1.0, InputDataTypeNotInAllowlistExc),
(1.5, InputDataTypeNotInAllowlistExc),
(2131501624.0, InputDataTypeNotInAllowlistExc),
(2131501624.5, InputDataTypeNotInAllowlistExc),
(42541956123769884636017138956568135748.0, InputDataTypeNotInAllowlistExc),
(42541956123769884636017138956568135748.5, InputDataTypeNotInAllowlistExc),
(float("inf"), InputDataTypeNotInAllowlistExc),
(float("-inf"), InputDataTypeNotInAllowlistExc),
(float("nan"), InputDataTypeNotInAllowlistExc),
(b'', InputDataTypeNotInAllowlistExc),
(b'8.8.4.4/32', InputDataTypeNotInAllowlistExc),
(b'2001:4860:4860::8844/128', InputDataTypeNotInAllowlistExc),
(b'8.8.4.4', InputDataTypeNotInAllowlistExc),
(b'2001:4860:4860::8844', InputDataTypeNotInAllowlistExc),
(b'\x00', InputDataTypeNotInAllowlistExc),
(b'\x01', InputDataTypeNotInAllowlistExc),
(b'\x01' * 3, InputDataTypeNotInAllowlistExc),
(b'\x01' * 4, InputDataTypeNotInAllowlistExc),
(b'\x01' * 5, InputDataTypeNotInAllowlistExc),
(b'\x01' * 15, InputDataTypeNotInAllowlistExc),
(b'\x01' * 16, InputDataTypeNotInAllowlistExc),
(b'\x01' * 17, InputDataTypeNotInAllowlistExc),
(bytearray(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(bytearray(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
([], InputDataTypeNotInAllowlistExc),
([b'\x00'], InputDataTypeNotInAllowlistExc),
([b'\x01'] * 4, InputDataTypeNotInAllowlistExc),
([b'\x01'] * 16, InputDataTypeNotInAllowlistExc),
({}, InputDataTypeNotInAllowlistExc),
({b'\x01': b'\x01'}, InputDataTypeNotInAllowlistExc),
(ipaddress.ip_address("127.0.0.1"), InputDataTypeNotInAllowlistExc),
(ipaddress.ip_address("2001:db8::abcd"), InputDataTypeNotInAllowlistExc),
(int, InputDataTypeNotInAllowlistExc),
(theoretical_testutils.EmptyObject, InputDataTypeNotInAllowlistExc),
(datetime.datetime.now(), InputDataTypeNotInAllowlistExc),
(datetime.datetime.now().date(), InputDataTypeNotInAllowlistExc),
(datetime.datetime.now().time(), InputDataTypeNotInAllowlistExc),
(urllib.parse.urlparse("https://www.google.com/test"), InputDataTypeNotInAllowlistExc),
(urllib.parse.urlparse("https://127.0.0.1/test"), InputDataTypeNotInAllowlistExc),
(urllib.parse.urlparse("https://[::1]/test"), InputDataTypeNotInAllowlistExc),
(uuid.UUID('{12345678-1234-5678-1234-567812345678}'), InputDataTypeNotInAllowlistExc),
(theoretical_testutils.EmptyObject(), InputDataTypeNotInAllowlistExc),
(StringableObject(theoretical_testutils.EmptyObject()), InputDataTypeNotInAllowlistExc),
(StringableObject("8.8.4.4/32"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("2001:4860:4860::8844/128"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("8.8.4.4"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("2001:4860:4860::8844"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject(2131501624), InputDataTypeNotInAllowlistExc),
(StringableObject(42541956123769884636017138956568135748), InputDataTypeNotInAllowlistExc),
(StringableObject(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(StringableObject(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
(ExceptionRaisingStringableObject(), InputDataTypeNotInAllowlistExc),
(IntableObject(theoretical_testutils.EmptyObject()), InputDataTypeNotInAllowlistExc),
(IntableObject("8.8.4.4/32"), InputDataTypeNotInAllowlistExc),
(IntableObject("2001:4860:4860::8844/128"), InputDataTypeNotInAllowlistExc),
(IntableObject("8.8.4.4"), InputDataTypeNotInAllowlistExc),
(IntableObject("2001:4860:4860::8844"), InputDataTypeNotInAllowlistExc),
(IntableObject(2131501624), InputDataTypeNotInAllowlistExc),
(IntableObject(42541956123769884636017138956568135748), InputDataTypeNotInAllowlistExc),
(IntableObject(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(IntableObject(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
(ExceptionRaisingIntableObject(), InputDataTypeNotInAllowlistExc),
(BytesableObject(theoretical_testutils.EmptyObject()), InputDataTypeNotInAllowlistExc),
(BytesableObject("8.8.4.4/32"), InputDataTypeNotInAllowlistExc),
(BytesableObject("2001:4860:4860::8844/128"), InputDataTypeNotInAllowlistExc),
(BytesableObject("8.8.4.4"), InputDataTypeNotInAllowlistExc),
(BytesableObject("2001:4860:4860::8844"), InputDataTypeNotInAllowlistExc),
(BytesableObject(2131501624), InputDataTypeNotInAllowlistExc),
(BytesableObject(42541956123769884636017138956568135748), InputDataTypeNotInAllowlistExc),
(BytesableObject(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(BytesableObject(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
(ExceptionRaisingBytesableObject(), InputDataTypeNotInAllowlistExc),
# (("0.0.0.0", False), InputDataTypeNotInAllowlistExc), # The 'ipaddress' module has inconsistent behaviour when it comes to booleans!
# (("0.0.0.0", True), InputDataTypeNotInAllowlistExc), # The 'ipaddress' module has inconsistent behaviour when it comes to booleans!
(("127.0.0.0", 8), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", "8"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", "008"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", b"8"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.0", b"008"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.0", "255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", b"255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", "0.255.255.255"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", b"0.255.255.255"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", 8), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", "8"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", "008"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", b"8"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.1", b"008"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.1", "255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", b"255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", "0.255.255.255"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", b"0.255.255.255"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", 8), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "8"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "008"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"8"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"008"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "255.0.0.0"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"255.0.0.0"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "0.255.255.255"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"0.255.255.255"), InputDataTypeNotInAllowlistExc),
((2131501624, 32), InputDataTypeNotInAllowlistExc),
((b"\x01" * 4, 32), InputDataTypeNotInAllowlistExc),
(("::", 8), InputDataTypeNotInAllowlistExc),
(("::", "8"), InputDataTypeNotInAllowlistExc),
(("::", "008"), InputDataTypeNotInAllowlistExc),
(("::", b"8"), InputDataTypeNotInAllowlistExc),
(("::", b"008"), InputDataTypeNotInAllowlistExc),
(("::1", 8), InputDataTypeNotInAllowlistExc),
(("::1", "8"), InputDataTypeNotInAllowlistExc),
(("::1", "008"), InputDataTypeNotInAllowlistExc),
(("::1", b"8"), InputDataTypeNotInAllowlistExc),
(("::1", b"008"), InputDataTypeNotInAllowlistExc),
((b"::", 8), InputDataTypeNotInAllowlistExc),
((b"::", "8"), InputDataTypeNotInAllowlistExc),
((b"::", "008"), InputDataTypeNotInAllowlistExc),
((b"::", b"8"), InputDataTypeNotInAllowlistExc),
((b"::", b"008"), InputDataTypeNotInAllowlistExc),
((42541956123769884636017138956568135748, 128), InputDataTypeNotInAllowlistExc),
((b"\x01" * 16, 128), InputDataTypeNotInAllowlistExc),
((), InputDataTypeNotInAllowlistExc),
(("127.0.0.0",), InputDataTypeNotInAllowlistExc), # ?????
(("127.1.2.3",), InputDataTypeNotInAllowlistExc), # ?????
(("127.0.0.0", 8, 8), InputDataTypeNotInAllowlistExc),
(("::",), InputDataTypeNotInAllowlistExc), # ?????
(("::abcd",), InputDataTypeNotInAllowlistExc), # ?????
(("::", 8, 8), InputDataTypeNotInAllowlistExc),
)),
(IPNetworkBlueprint(parsing_mode=ParsingMode.MODE_STRICT, ignore_set_host_bits=False), (
(ipaddress.IPv4Network("127.0.0.0/8"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.0.0.0/255.0.0.0"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.0.0.0/0.255.255.255"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/8", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/255.0.0.0", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/0.255.255.255", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.12.34.56/32"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/255.255.255.255"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/32", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/255.255.255.255", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv6Network("2001:db8::/64"), ipaddress.ip_network("2001:db8::/64")),
(ipaddress.IPv6Network("2001:db8::abcd/64", strict=False), ipaddress.ip_network("2001:db8::/64")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6/128"), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6"), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6/128", strict=False), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6", strict=False), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
("0.0.0.0/0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/8", InputDataTypeNotInAllowlistExc),
("8.8.4.0/24", InputDataTypeNotInAllowlistExc),
("8.8.1024/24", InputDataTypeNotInAllowlistExc),
("8.525312/24", InputDataTypeNotInAllowlistExc),
("134743040/24", InputDataTypeNotInAllowlistExc),
("010.8.04.0/24", InputDataTypeNotInAllowlistExc),
("127.0/24", InputDataTypeNotInAllowlistExc),
("127.123.045.000/24", InputDataTypeNotInAllowlistExc),
("0127.123.045.000/24", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000000/24", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x0/24", InputDataTypeNotInAllowlistExc),
("8.8.0x4.0/24", InputDataTypeNotInAllowlistExc),
("127..0/24", InputDataTypeNotInAllowlistExc),
("127.0..0/24", InputDataTypeNotInAllowlistExc),
("127.0.0..0/24", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.0.0.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.0.0.0", InputDataTypeNotInAllowlistExc),
("8.8.4.0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("8.8.1024/255.255.255.0", InputDataTypeNotInAllowlistExc),
("8.525312/255.255.255.0", InputDataTypeNotInAllowlistExc),
("134743040/255.255.255.0", InputDataTypeNotInAllowlistExc),
("010.8.04.0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.123.045.000/255.255.255.0", InputDataTypeNotInAllowlistExc),
("0127.123.045.000/255.255.255.0", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000000/255.255.255.0", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("8.8.0x4.0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127..0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.0..0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.0.0..0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.255.255.255", InputDataTypeNotInAllowlistExc),
("8.8.4.0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("8.8.1024/0.0.0.255", InputDataTypeNotInAllowlistExc),
("8.525312/0.0.0.255", InputDataTypeNotInAllowlistExc),
("134743040/0.0.0.255", InputDataTypeNotInAllowlistExc),
("010.8.04.0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.123.045.000/0.0.0.255", InputDataTypeNotInAllowlistExc),
("0127.123.045.000/0.0.0.255", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000000/0.0.0.255", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("8.8.0x4.0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127..0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.0..0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.0.0..0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("0.0.0.0/32", InputDataTypeNotInAllowlistExc),
("8.8.4.4/32", InputDataTypeNotInAllowlistExc),
("8.8.1028/32", InputDataTypeNotInAllowlistExc),
("8.525316/32", InputDataTypeNotInAllowlistExc),
("134743044/32", InputDataTypeNotInAllowlistExc),
("010.8.04.4/32", InputDataTypeNotInAllowlistExc),
("127.1/32", InputDataTypeNotInAllowlistExc),
("127.123.045.001/32", InputDataTypeNotInAllowlistExc),
("0127.123.045.001/32", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000001/32", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x4/32", InputDataTypeNotInAllowlistExc),
("8.8.0x4.4/32", InputDataTypeNotInAllowlistExc),
("127..1/32", InputDataTypeNotInAllowlistExc),
("127.0..1/32", InputDataTypeNotInAllowlistExc),
("127.0.0..1/32", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.255.255.255", InputDataTypeNotInAllowlistExc),
("8.8.4.4/255.255.255.255", InputDataTypeNotInAllowlistExc),
("8.8.1028/255.255.255.255", InputDataTypeNotInAllowlistExc),
("8.525316/255.255.255.255", InputDataTypeNotInAllowlistExc),
("134743044/255.255.255.255", InputDataTypeNotInAllowlistExc),
("010.8.04.4/255.255.255.255", InputDataTypeNotInAllowlistExc),
("127.1/255.255.255.255", InputDataTypeNotInAllowlistExc),
("127.123.045.001/255.255.255.255", InputDataTypeNotInAllowlistExc),
("0127.123.045.001/255.255.255.255", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000001/255.255.255.255", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x4/255.255.255.255", InputDataTypeNotInAllowlistExc),
("8.8.0x4.4/255.255.255.255", InputDataTypeNotInAllowlistExc),
("127..1/255.255.255.255", InputDataTypeNotInAllowlistExc),
("127.0..1/255.255.255.255", InputDataTypeNotInAllowlistExc),
("127.0.0..1/255.255.255.255", InputDataTypeNotInAllowlistExc),
("0.0.0.0", InputDataTypeNotInAllowlistExc),
("8.8.4.4", InputDataTypeNotInAllowlistExc),
("8.8.1028", InputDataTypeNotInAllowlistExc),
("8.525316", InputDataTypeNotInAllowlistExc),
("134743044", InputDataTypeNotInAllowlistExc),
("010.8.04.4", InputDataTypeNotInAllowlistExc),
("127.1", InputDataTypeNotInAllowlistExc),
("127.123.045.001", InputDataTypeNotInAllowlistExc),
("0127.123.045.001", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000001", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x4", InputDataTypeNotInAllowlistExc),
("8.8.0x4.4", InputDataTypeNotInAllowlistExc),
("127..1", InputDataTypeNotInAllowlistExc),
("127.0..1", InputDataTypeNotInAllowlistExc),
("127.0.0..1", InputDataTypeNotInAllowlistExc),
("127.0.0.0/0", InputDataTypeNotInAllowlistExc),
("0.1.0.0/8", InputDataTypeNotInAllowlistExc),
("8.8.4.4/24", InputDataTypeNotInAllowlistExc),
("8.8.1028/24", InputDataTypeNotInAllowlistExc),
("8.525316/24", InputDataTypeNotInAllowlistExc),
("134743044/24", InputDataTypeNotInAllowlistExc),
("010.8.04.4/24", InputDataTypeNotInAllowlistExc),
("127.1/24", InputDataTypeNotInAllowlistExc),
("127.123.045.001/24", InputDataTypeNotInAllowlistExc),
("0127.123.045.001/24", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000001/24", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x4/24", InputDataTypeNotInAllowlistExc),
("8.8.0x4.4/24", InputDataTypeNotInAllowlistExc),
("127..4/24", InputDataTypeNotInAllowlistExc),
("127.0..4/24", InputDataTypeNotInAllowlistExc),
("127.0.0..4/24", InputDataTypeNotInAllowlistExc),
("127.0.0.0/0.0.0.0", InputDataTypeNotInAllowlistExc),
("0.1.0.0/255.0.0.0", InputDataTypeNotInAllowlistExc),
("8.8.4.4/255.255.255.0", InputDataTypeNotInAllowlistExc),
("8.8.1028/255.255.255.0", InputDataTypeNotInAllowlistExc),
("8.525316/255.255.255.0", InputDataTypeNotInAllowlistExc),
("134743044/255.255.255.0", InputDataTypeNotInAllowlistExc),
("010.8.04.4/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.1/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.123.045.001/255.255.255.0", InputDataTypeNotInAllowlistExc),
("0127.123.045.001/255.255.255.0", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000001/255.255.255.0", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x4/255.255.255.0", InputDataTypeNotInAllowlistExc),
("8.8.0x4.4/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127..1/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.0..1/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.0.0..1/255.255.255.0", InputDataTypeNotInAllowlistExc),
("0.1.0.0/0.255.255.255", InputDataTypeNotInAllowlistExc),
("8.8.4.4/0.0.0.255", InputDataTypeNotInAllowlistExc),
("8.8.1028/0.0.0.255", InputDataTypeNotInAllowlistExc),
("8.525316/0.0.0.255", InputDataTypeNotInAllowlistExc),
("134743044/0.0.0.255", InputDataTypeNotInAllowlistExc),
("010.8.04.4/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.1/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.123.045.001/0.0.0.255", InputDataTypeNotInAllowlistExc),
("0127.123.045.001/0.0.0.255", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000001/0.0.0.255", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x4/0.0.0.255", InputDataTypeNotInAllowlistExc),
("8.8.0x4.4/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127..1/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.0..1/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.0.0..1/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.0.0.0/08", InputDataTypeNotInAllowlistExc),
("127.0.0.0/00008", InputDataTypeNotInAllowlistExc),
("127.0.0.0/000_008", InputDataTypeNotInAllowlistExc), # "000_008" is convertible by int(), though
("127.0.0.0/8.0", InputDataTypeNotInAllowlistExc),
("127.0.0.0/8.", InputDataTypeNotInAllowlistExc),
("0.0.0.0/-1", InputDataTypeNotInAllowlistExc),
("0.0.0.0/33", InputDataTypeNotInAllowlistExc),
("0.0.0.0/80", InputDataTypeNotInAllowlistExc),
("0.0.0.0/127.0.0.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/8.8.4.4", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.255.255.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.255.0.255", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.255.0.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.0.0.255", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.0.255.255", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.255.0.255", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.0.255.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.255.254.255", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.255.255.253", InputDataTypeNotInAllowlistExc),
("0.0.0.0/000.000.000.000", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0000.000.000.000", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.255.255.000", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.255.255.0000", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0255.255.255.000", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.0.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.0..0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.0.0..0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.0.0.0.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.0.0.0.", InputDataTypeNotInAllowlistExc),
("::/0", InputDataTypeNotInAllowlistExc),
("::/32", InputDataTypeNotInAllowlistExc),
("fd12:3456:7890:abcd::/64", InputDataTypeNotInAllowlistExc),
("1::/0", InputDataTypeNotInAllowlistExc),
("::abcd/32", InputDataTypeNotInAllowlistExc),
("fd12:3456:7890:abcd::7890/64", InputDataTypeNotInAllowlistExc),
("fd12:3456:7890::abcd::/128", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/64", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff::ffff/128", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff::ffff", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff", InputDataTypeNotInAllowlistExc),
("2001:db00::/24", InputDataTypeNotInAllowlistExc),
("2001:db00::/ffff:ff00::", InputDataTypeNotInAllowlistExc),
("fe80::/10", InputDataTypeNotInAllowlistExc),
("FE80::/10", InputDataTypeNotInAllowlistExc),
("Fe80::/10", InputDataTypeNotInAllowlistExc),
("fE80::/10", InputDataTypeNotInAllowlistExc),
("fe80::%enp4s0/64", InputDataTypeNotInAllowlistExc),
("fe80::%Připojení k síti/64", InputDataTypeNotInAllowlistExc),
("fe80::%/64", InputDataTypeNotInAllowlistExc),
("fe80::%enp4s0%/64", InputDataTypeNotInAllowlistExc),
("fe80::%enp4s0%abc/64", InputDataTypeNotInAllowlistExc),
("2001:db8::/48", InputDataTypeNotInAllowlistExc),
("2001:db8:0:0:0:0:0:0/48", InputDataTypeNotInAllowlistExc),
("2001:db8:0:0:0:0::/48", InputDataTypeNotInAllowlistExc),
("2001:db8:0000:0000:0000:0000:0000:0000/48", InputDataTypeNotInAllowlistExc),
("2001:0db8:0000:0000:0000:0000:0000:0000/48", InputDataTypeNotInAllowlistExc),
("::/128", InputDataTypeNotInAllowlistExc),
("::", InputDataTypeNotInAllowlistExc),
(":/128", InputDataTypeNotInAllowlistExc),
(":::/128", InputDataTypeNotInAllowlistExc),
("::ffff:127.0.0.0/104", InputDataTypeNotInAllowlistExc),
("::ffff:127.0.0.1/104", InputDataTypeNotInAllowlistExc),
("::ffff:127.0.0..0/104", InputDataTypeNotInAllowlistExc),
("::ffff:127.0.0..1/104", InputDataTypeNotInAllowlistExc),
("::ffff:127.0.0.0./104", InputDataTypeNotInAllowlistExc),
("::ffff:127.0.0.1./104", InputDataTypeNotInAllowlistExc),
("::/-1", InputDataTypeNotInAllowlistExc),
("::/129", InputDataTypeNotInAllowlistExc),
("::/000", InputDataTypeNotInAllowlistExc),
("::/032", InputDataTypeNotInAllowlistExc),
("::/320", InputDataTypeNotInAllowlistExc),
("::/000032", InputDataTypeNotInAllowlistExc),
("::/000_032", InputDataTypeNotInAllowlistExc),
("/", InputDataTypeNotInAllowlistExc),
("/32", InputDataTypeNotInAllowlistExc),
("/0.0.0.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/", InputDataTypeNotInAllowlistExc),
("0.0.0.0:32", InputDataTypeNotInAllowlistExc),
("0.0.0.0\\32", InputDataTypeNotInAllowlistExc),
("0.0.0.0a32", InputDataTypeNotInAllowlistExc),
("0.0.0.0 32", InputDataTypeNotInAllowlistExc),
("0.0.0.0/\x00", InputDataTypeNotInAllowlistExc),
("\x00/32", InputDataTypeNotInAllowlistExc),
("127.0.0.1\x00/32", InputDataTypeNotInAllowlistExc),
("\x00127.0.0.1/32", InputDataTypeNotInAllowlistExc),
("127.0.0.\x001/32", InputDataTypeNotInAllowlistExc),
("127.0.0.1/\x0032", InputDataTypeNotInAllowlistExc),
("127.0.0.1/3\x002", InputDataTypeNotInAllowlistExc),
("127.0.0.1/32\x00", InputDataTypeNotInAllowlistExc),
("0.0.0.0/t", InputDataTypeNotInAllowlistExc),
("t/32", InputDataTypeNotInAllowlistExc),
("127.0.0.1t/32", InputDataTypeNotInAllowlistExc),
("t127.0.0.1/32", InputDataTypeNotInAllowlistExc),
("127.0.0.t1/32", InputDataTypeNotInAllowlistExc),
("127.0.0.1/t32", InputDataTypeNotInAllowlistExc),
("127.0.0.1/3t2", InputDataTypeNotInAllowlistExc),
("127.0.0.1/32t", InputDataTypeNotInAllowlistExc),
("0.0.0.0/ ", InputDataTypeNotInAllowlistExc),
(" /32", InputDataTypeNotInAllowlistExc),
("127.0.0.1 /32", InputDataTypeNotInAllowlistExc),
("127.0.0. 1/32", InputDataTypeNotInAllowlistExc),
("127.0.0.1/ 32", InputDataTypeNotInAllowlistExc),
("127.0.0.1/3 2", InputDataTypeNotInAllowlistExc),
("2001:db8::/", InputDataTypeNotInAllowlistExc),
("2001:db8::+128", InputDataTypeNotInAllowlistExc),
("2001:db8::\\128", InputDataTypeNotInAllowlistExc),
("2001:db8::x128", InputDataTypeNotInAllowlistExc),
("2001:db8:: 128", InputDataTypeNotInAllowlistExc),
("2001:db8::/\x00", InputDataTypeNotInAllowlistExc),
("\x00/128", InputDataTypeNotInAllowlistExc),
("2001:db8::\x00/128", InputDataTypeNotInAllowlistExc),
("\x002001:db8::/128", InputDataTypeNotInAllowlistExc),
("2001:db8:\x00:/128", InputDataTypeNotInAllowlistExc),
("2001:db8::/\x00128", InputDataTypeNotInAllowlistExc),
("2001:db8::/12\x008", InputDataTypeNotInAllowlistExc),
("2001:db8::/128\x00", InputDataTypeNotInAllowlistExc),
("2001:db8::/t", InputDataTypeNotInAllowlistExc),
("t/128", InputDataTypeNotInAllowlistExc),
("2001:db8::t/128", InputDataTypeNotInAllowlistExc),
("t2001:db8::/128", InputDataTypeNotInAllowlistExc),
("2001:db8:t:/128", InputDataTypeNotInAllowlistExc),
("2001:db8::/t128", InputDataTypeNotInAllowlistExc),
("2001:db8::/12t8", InputDataTypeNotInAllowlistExc),
("2001:db8::/128t", InputDataTypeNotInAllowlistExc),
("2001:db8::/ ", InputDataTypeNotInAllowlistExc),
(" /128", InputDataTypeNotInAllowlistExc),
("2001:db8:: /128", InputDataTypeNotInAllowlistExc),
("2001:db8: :/128", InputDataTypeNotInAllowlistExc),
("2001:db8::/ 128", InputDataTypeNotInAllowlistExc),
("2001:db8::/12 8", InputDataTypeNotInAllowlistExc),
("\r\n 127.0.0.1/32\t", InputDataTypeNotInAllowlistExc),
("\v\f127.0.0.1/32 ", InputDataTypeNotInAllowlistExc),
("\r\n 127.0.0.1/255.255.255.255\t", InputDataTypeNotInAllowlistExc),
("\v\f127.0.0.1/255.255.255.255 ", InputDataTypeNotInAllowlistExc),
("\r\n 127.0.0.1\t", InputDataTypeNotInAllowlistExc),
("\v\f127.0.0.1 ", InputDataTypeNotInAllowlistExc),
("\r\n 2001:db8::1def/128\t", InputDataTypeNotInAllowlistExc),
("\v\f2001:db8::1def/128 ", InputDataTypeNotInAllowlistExc),
("\r\n 2001:db8::1def\t", InputDataTypeNotInAllowlistExc),
("\v\f2001:db8::1def ", InputDataTypeNotInAllowlistExc),
("\x00", InputDataTypeNotInAllowlistExc),
("\x01", InputDataTypeNotInAllowlistExc),
("\x01" * 4, InputDataTypeNotInAllowlistExc),
("\x01" * 16, InputDataTypeNotInAllowlistExc),
("", InputDataTypeNotInAllowlistExc),
("hello", InputDataTypeNotInAllowlistExc),
(" \t\t Hello World! \r\v\n", InputDataTypeNotInAllowlistExc),
("\vf sadf asdf asd\r\rf as\ndfa sdfsa \t ", InputDataTypeNotInAllowlistExc),
(None, InputDataTypeNotInAllowlistExc),
# (True, InputDataTypeNotInAllowlistExc), # Raises DeprecationWarning!
# (False, InputDataTypeNotInAllowlistExc), # Raises DeprecationWarning!
(-(2 ** 16), InputDataTypeNotInAllowlistExc),
(-1, InputDataTypeNotInAllowlistExc),
(0, InputDataTypeNotInAllowlistExc),
(1, InputDataTypeNotInAllowlistExc),
(2131501624, InputDataTypeNotInAllowlistExc),
((2 ** 32) - 1, InputDataTypeNotInAllowlistExc),
((2 ** 32), InputDataTypeNotInAllowlistExc),
((2 ** 32) + 1, InputDataTypeNotInAllowlistExc),
(42541956123769884636017138956568135748, InputDataTypeNotInAllowlistExc),
((2 ** 128) - 1, InputDataTypeNotInAllowlistExc),
((2 ** 128), InputDataTypeNotInAllowlistExc),
((2 ** 128) + 1, InputDataTypeNotInAllowlistExc),
((2 ** 256), InputDataTypeNotInAllowlistExc),
(-0.0, InputDataTypeNotInAllowlistExc),
(0.0, InputDataTypeNotInAllowlistExc),
(0.5, InputDataTypeNotInAllowlistExc),
(1.0, InputDataTypeNotInAllowlistExc),
(1.5, InputDataTypeNotInAllowlistExc),
(2131501624.0, InputDataTypeNotInAllowlistExc),
(2131501624.5, InputDataTypeNotInAllowlistExc),
(42541956123769884636017138956568135748.0, InputDataTypeNotInAllowlistExc),
(42541956123769884636017138956568135748.5, InputDataTypeNotInAllowlistExc),
(float("inf"), InputDataTypeNotInAllowlistExc),
(float("-inf"), InputDataTypeNotInAllowlistExc),
(float("nan"), InputDataTypeNotInAllowlistExc),
(b'', InputDataTypeNotInAllowlistExc),
(b'8.8.4.4/32', InputDataTypeNotInAllowlistExc),
(b'2001:4860:4860::8844/128', InputDataTypeNotInAllowlistExc),
(b'8.8.4.4', InputDataTypeNotInAllowlistExc),
(b'2001:4860:4860::8844', InputDataTypeNotInAllowlistExc),
(b'\x00', InputDataTypeNotInAllowlistExc),
(b'\x01', InputDataTypeNotInAllowlistExc),
(b'\x01' * 3, InputDataTypeNotInAllowlistExc),
(b'\x01' * 4, InputDataTypeNotInAllowlistExc),
(b'\x01' * 5, InputDataTypeNotInAllowlistExc),
(b'\x01' * 15, InputDataTypeNotInAllowlistExc),
(b'\x01' * 16, InputDataTypeNotInAllowlistExc),
(b'\x01' * 17, InputDataTypeNotInAllowlistExc),
(bytearray(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(bytearray(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
([], InputDataTypeNotInAllowlistExc),
([b'\x00'], InputDataTypeNotInAllowlistExc),
([b'\x01'] * 4, InputDataTypeNotInAllowlistExc),
([b'\x01'] * 16, InputDataTypeNotInAllowlistExc),
({}, InputDataTypeNotInAllowlistExc),
({b'\x01': b'\x01'}, InputDataTypeNotInAllowlistExc),
(ipaddress.ip_address("127.0.0.1"), InputDataTypeNotInAllowlistExc),
(ipaddress.ip_address("2001:db8::abcd"), InputDataTypeNotInAllowlistExc),
(int, InputDataTypeNotInAllowlistExc),
(theoretical_testutils.EmptyObject, InputDataTypeNotInAllowlistExc),
(datetime.datetime.now(), InputDataTypeNotInAllowlistExc),
(datetime.datetime.now().date(), InputDataTypeNotInAllowlistExc),
(datetime.datetime.now().time(), InputDataTypeNotInAllowlistExc),
(urllib.parse.urlparse("https://www.google.com/test"), InputDataTypeNotInAllowlistExc),
(urllib.parse.urlparse("https://127.0.0.1/test"), InputDataTypeNotInAllowlistExc),
(urllib.parse.urlparse("https://[::1]/test"), InputDataTypeNotInAllowlistExc),
(uuid.UUID('{12345678-1234-5678-1234-567812345678}'), InputDataTypeNotInAllowlistExc),
(theoretical_testutils.EmptyObject(), InputDataTypeNotInAllowlistExc),
(StringableObject(theoretical_testutils.EmptyObject()), InputDataTypeNotInAllowlistExc),
(StringableObject("8.8.4.4/32"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("2001:4860:4860::8844/128"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("8.8.4.4"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("2001:4860:4860::8844"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject(2131501624), InputDataTypeNotInAllowlistExc),
(StringableObject(42541956123769884636017138956568135748), InputDataTypeNotInAllowlistExc),
(StringableObject(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(StringableObject(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
(ExceptionRaisingStringableObject(), InputDataTypeNotInAllowlistExc),
(IntableObject(theoretical_testutils.EmptyObject()), InputDataTypeNotInAllowlistExc),
(IntableObject("8.8.4.4/32"), InputDataTypeNotInAllowlistExc),
(IntableObject("2001:4860:4860::8844/128"), InputDataTypeNotInAllowlistExc),
(IntableObject("8.8.4.4"), InputDataTypeNotInAllowlistExc),
(IntableObject("2001:4860:4860::8844"), InputDataTypeNotInAllowlistExc),
(IntableObject(2131501624), InputDataTypeNotInAllowlistExc),
(IntableObject(42541956123769884636017138956568135748), InputDataTypeNotInAllowlistExc),
(IntableObject(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(IntableObject(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
(ExceptionRaisingIntableObject(), InputDataTypeNotInAllowlistExc),
(BytesableObject(theoretical_testutils.EmptyObject()), InputDataTypeNotInAllowlistExc),
(BytesableObject("8.8.4.4/32"), InputDataTypeNotInAllowlistExc),
(BytesableObject("2001:4860:4860::8844/128"), InputDataTypeNotInAllowlistExc),
(BytesableObject("8.8.4.4"), InputDataTypeNotInAllowlistExc),
(BytesableObject("2001:4860:4860::8844"), InputDataTypeNotInAllowlistExc),
(BytesableObject(2131501624), InputDataTypeNotInAllowlistExc),
(BytesableObject(42541956123769884636017138956568135748), InputDataTypeNotInAllowlistExc),
(BytesableObject(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(BytesableObject(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
(ExceptionRaisingBytesableObject(), InputDataTypeNotInAllowlistExc),
# (("0.0.0.0", False), InputDataTypeNotInAllowlistExc), # The 'ipaddress' module has inconsistent behaviour when it comes to booleans!
# (("0.0.0.0", True), InputDataTypeNotInAllowlistExc), # The 'ipaddress' module has inconsistent behaviour when it comes to booleans!
(("127.0.0.0", 8), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", "8"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", "008"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", b"8"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.0", b"008"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.0", "255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", b"255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", "0.255.255.255"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", b"0.255.255.255"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", 8), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", "8"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", "008"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", b"8"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.1", b"008"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.1", "255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", b"255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", "0.255.255.255"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", b"0.255.255.255"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", 8), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "8"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "008"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"8"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"008"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "255.0.0.0"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"255.0.0.0"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "0.255.255.255"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"0.255.255.255"), InputDataTypeNotInAllowlistExc),
((2131501624, 32), InputDataTypeNotInAllowlistExc),
((b"\x01" * 4, 32), InputDataTypeNotInAllowlistExc),
(("::", 8), InputDataTypeNotInAllowlistExc),
(("::", "8"), InputDataTypeNotInAllowlistExc),
(("::", "008"), InputDataTypeNotInAllowlistExc),
(("::", b"8"), InputDataTypeNotInAllowlistExc),
(("::", b"008"), InputDataTypeNotInAllowlistExc),
(("::1", 8), InputDataTypeNotInAllowlistExc),
(("::1", "8"), InputDataTypeNotInAllowlistExc),
(("::1", "008"), InputDataTypeNotInAllowlistExc),
(("::1", b"8"), InputDataTypeNotInAllowlistExc),
(("::1", b"008"), InputDataTypeNotInAllowlistExc),
((b"::", 8), InputDataTypeNotInAllowlistExc),
((b"::", "8"), InputDataTypeNotInAllowlistExc),
((b"::", "008"), InputDataTypeNotInAllowlistExc),
((b"::", b"8"), InputDataTypeNotInAllowlistExc),
((b"::", b"008"), InputDataTypeNotInAllowlistExc),
((42541956123769884636017138956568135748, 128), InputDataTypeNotInAllowlistExc),
((b"\x01" * 16, 128), InputDataTypeNotInAllowlistExc),
((), InputDataTypeNotInAllowlistExc),
(("127.0.0.0",), InputDataTypeNotInAllowlistExc), # ?????
(("127.1.2.3",), InputDataTypeNotInAllowlistExc), # ?????
(("127.0.0.0", 8, 8), InputDataTypeNotInAllowlistExc),
(("::",), InputDataTypeNotInAllowlistExc), # ?????
(("::abcd",), InputDataTypeNotInAllowlistExc), # ?????
(("::", 8, 8), InputDataTypeNotInAllowlistExc),
)),
(IPNetworkBlueprint(parsing_mode=ParsingMode.MODE_STRICT, ignore_set_host_bits=True), (
(ipaddress.IPv4Network("127.0.0.0/8"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.0.0.0/255.0.0.0"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.0.0.0/0.255.255.255"), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/8", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/255.0.0.0", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.1.2.3/0.255.255.255", strict=False), ipaddress.ip_network("127.0.0.0/8")),
(ipaddress.IPv4Network("127.12.34.56/32"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/255.255.255.255"), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/32", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv4Network("127.12.34.56/255.255.255.255", strict=False), ipaddress.ip_network("127.12.34.56/32")),
(ipaddress.IPv6Network("2001:db8::/64"), ipaddress.ip_network("2001:db8::/64")),
(ipaddress.IPv6Network("2001:db8::abcd/64", strict=False), ipaddress.ip_network("2001:db8::/64")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6/128"), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6"), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6/128", strict=False), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
(ipaddress.IPv6Network("2001:db8:1:2:3:4:5:6", strict=False), ipaddress.ip_network("2001:db8:1:2:3:4:5:6/128")),
("0.0.0.0/0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/8", InputDataTypeNotInAllowlistExc),
("8.8.4.0/24", InputDataTypeNotInAllowlistExc),
("8.8.1024/24", InputDataTypeNotInAllowlistExc),
("8.525312/24", InputDataTypeNotInAllowlistExc),
("134743040/24", InputDataTypeNotInAllowlistExc),
("010.8.04.0/24", InputDataTypeNotInAllowlistExc),
("127.0/24", InputDataTypeNotInAllowlistExc),
("127.123.045.000/24", InputDataTypeNotInAllowlistExc),
("0127.123.045.000/24", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000000/24", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x0/24", InputDataTypeNotInAllowlistExc),
("8.8.0x4.0/24", InputDataTypeNotInAllowlistExc),
("127..0/24", InputDataTypeNotInAllowlistExc),
("127.0..0/24", InputDataTypeNotInAllowlistExc),
("127.0.0..0/24", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.0.0.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.0.0.0", InputDataTypeNotInAllowlistExc),
("8.8.4.0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("8.8.1024/255.255.255.0", InputDataTypeNotInAllowlistExc),
("8.525312/255.255.255.0", InputDataTypeNotInAllowlistExc),
("134743040/255.255.255.0", InputDataTypeNotInAllowlistExc),
("010.8.04.0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.123.045.000/255.255.255.0", InputDataTypeNotInAllowlistExc),
("0127.123.045.000/255.255.255.0", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000000/255.255.255.0", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("8.8.0x4.0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127..0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.0..0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.0.0..0/255.255.255.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.255.255.255", InputDataTypeNotInAllowlistExc),
("8.8.4.0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("8.8.1024/0.0.0.255", InputDataTypeNotInAllowlistExc),
("8.525312/0.0.0.255", InputDataTypeNotInAllowlistExc),
("134743040/0.0.0.255", InputDataTypeNotInAllowlistExc),
("010.8.04.0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.123.045.000/0.0.0.255", InputDataTypeNotInAllowlistExc),
("0127.123.045.000/0.0.0.255", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000000/0.0.0.255", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("8.8.0x4.0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127..0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.0..0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.0.0..0/0.0.0.255", InputDataTypeNotInAllowlistExc),
("0.0.0.0/32", InputDataTypeNotInAllowlistExc),
("8.8.4.4/32", InputDataTypeNotInAllowlistExc),
("8.8.1028/32", InputDataTypeNotInAllowlistExc),
("8.525316/32", InputDataTypeNotInAllowlistExc),
("134743044/32", InputDataTypeNotInAllowlistExc),
("010.8.04.4/32", InputDataTypeNotInAllowlistExc),
("127.1/32", InputDataTypeNotInAllowlistExc),
("127.123.045.001/32", InputDataTypeNotInAllowlistExc),
("0127.123.045.001/32", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000001/32", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x4/32", InputDataTypeNotInAllowlistExc),
("8.8.0x4.4/32", InputDataTypeNotInAllowlistExc),
("127..1/32", InputDataTypeNotInAllowlistExc),
("127.0..1/32", InputDataTypeNotInAllowlistExc),
("127.0.0..1/32", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.255.255.255", InputDataTypeNotInAllowlistExc),
("8.8.4.4/255.255.255.255", InputDataTypeNotInAllowlistExc),
("8.8.1028/255.255.255.255", InputDataTypeNotInAllowlistExc),
("8.525316/255.255.255.255", InputDataTypeNotInAllowlistExc),
("134743044/255.255.255.255", InputDataTypeNotInAllowlistExc),
("010.8.04.4/255.255.255.255", InputDataTypeNotInAllowlistExc),
("127.1/255.255.255.255", InputDataTypeNotInAllowlistExc),
("127.123.045.001/255.255.255.255", InputDataTypeNotInAllowlistExc),
("0127.123.045.001/255.255.255.255", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000001/255.255.255.255", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x4/255.255.255.255", InputDataTypeNotInAllowlistExc),
("8.8.0x4.4/255.255.255.255", InputDataTypeNotInAllowlistExc),
("127..1/255.255.255.255", InputDataTypeNotInAllowlistExc),
("127.0..1/255.255.255.255", InputDataTypeNotInAllowlistExc),
("127.0.0..1/255.255.255.255", InputDataTypeNotInAllowlistExc),
("0.0.0.0", InputDataTypeNotInAllowlistExc),
("8.8.4.4", InputDataTypeNotInAllowlistExc),
("8.8.1028", InputDataTypeNotInAllowlistExc),
("8.525316", InputDataTypeNotInAllowlistExc),
("134743044", InputDataTypeNotInAllowlistExc),
("010.8.04.4", InputDataTypeNotInAllowlistExc),
("127.1", InputDataTypeNotInAllowlistExc),
("127.123.045.001", InputDataTypeNotInAllowlistExc),
("0127.123.045.001", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000001", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x4", InputDataTypeNotInAllowlistExc),
("8.8.0x4.4", InputDataTypeNotInAllowlistExc),
("127..1", InputDataTypeNotInAllowlistExc),
("127.0..1", InputDataTypeNotInAllowlistExc),
("127.0.0..1", InputDataTypeNotInAllowlistExc),
("127.0.0.0/0", InputDataTypeNotInAllowlistExc),
("0.1.0.0/8", InputDataTypeNotInAllowlistExc),
("8.8.4.4/24", InputDataTypeNotInAllowlistExc),
("8.8.1028/24", InputDataTypeNotInAllowlistExc),
("8.525316/24", InputDataTypeNotInAllowlistExc),
("134743044/24", InputDataTypeNotInAllowlistExc),
("010.8.04.4/24", InputDataTypeNotInAllowlistExc),
("127.1/24", InputDataTypeNotInAllowlistExc),
("127.123.045.001/24", InputDataTypeNotInAllowlistExc),
("0127.123.045.001/24", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000001/24", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x4/24", InputDataTypeNotInAllowlistExc),
("8.8.0x4.4/24", InputDataTypeNotInAllowlistExc),
("127..4/24", InputDataTypeNotInAllowlistExc),
("127.0..4/24", InputDataTypeNotInAllowlistExc),
("127.0.0..4/24", InputDataTypeNotInAllowlistExc),
("127.0.0.0/0.0.0.0", InputDataTypeNotInAllowlistExc),
("0.1.0.0/255.0.0.0", InputDataTypeNotInAllowlistExc),
("8.8.4.4/255.255.255.0", InputDataTypeNotInAllowlistExc),
("8.8.1028/255.255.255.0", InputDataTypeNotInAllowlistExc),
("8.525316/255.255.255.0", InputDataTypeNotInAllowlistExc),
("134743044/255.255.255.0", InputDataTypeNotInAllowlistExc),
("010.8.04.4/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.1/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.123.045.001/255.255.255.0", InputDataTypeNotInAllowlistExc),
("0127.123.045.001/255.255.255.0", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000001/255.255.255.0", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x4/255.255.255.0", InputDataTypeNotInAllowlistExc),
("8.8.0x4.4/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127..1/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.0..1/255.255.255.0", InputDataTypeNotInAllowlistExc),
("127.0.0..1/255.255.255.0", InputDataTypeNotInAllowlistExc),
("0.1.0.0/0.255.255.255", InputDataTypeNotInAllowlistExc),
("8.8.4.4/0.0.0.255", InputDataTypeNotInAllowlistExc),
("8.8.1028/0.0.0.255", InputDataTypeNotInAllowlistExc),
("8.525316/0.0.0.255", InputDataTypeNotInAllowlistExc),
("134743044/0.0.0.255", InputDataTypeNotInAllowlistExc),
("010.8.04.4/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.1/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.123.045.001/0.0.0.255", InputDataTypeNotInAllowlistExc),
("0127.123.045.001/0.0.0.255", InputDataTypeNotInAllowlistExc),
("01111111000000000000000000000001/0.0.0.255", InputDataTypeNotInAllowlistExc),
("0x8.0x8.0x4.0x4/0.0.0.255", InputDataTypeNotInAllowlistExc),
("8.8.0x4.4/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127..1/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.0..1/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.0.0..1/0.0.0.255", InputDataTypeNotInAllowlistExc),
("127.0.0.0/08", InputDataTypeNotInAllowlistExc),
("127.0.0.0/00008", InputDataTypeNotInAllowlistExc),
("127.0.0.0/000_008", InputDataTypeNotInAllowlistExc), # "000_008" is convertible by int(), though
("127.0.0.0/8.0", InputDataTypeNotInAllowlistExc),
("127.0.0.0/8.", InputDataTypeNotInAllowlistExc),
("0.0.0.0/-1", InputDataTypeNotInAllowlistExc),
("0.0.0.0/33", InputDataTypeNotInAllowlistExc),
("0.0.0.0/80", InputDataTypeNotInAllowlistExc),
("0.0.0.0/127.0.0.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/8.8.4.4", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.255.255.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.255.0.255", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.255.0.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.0.0.255", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.0.255.255", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.255.0.255", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.0.255.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.255.254.255", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.255.255.253", InputDataTypeNotInAllowlistExc),
("0.0.0.0/000.000.000.000", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0000.000.000.000", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.255.255.000", InputDataTypeNotInAllowlistExc),
("0.0.0.0/255.255.255.0000", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0255.255.255.000", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.0.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.0..0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.0.0..0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.0.0.0.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/0.0.0.0.", InputDataTypeNotInAllowlistExc),
("::/0", InputDataTypeNotInAllowlistExc),
("::/32", InputDataTypeNotInAllowlistExc),
("fd12:3456:7890:abcd::/64", InputDataTypeNotInAllowlistExc),
("1::/0", InputDataTypeNotInAllowlistExc),
("::abcd/32", InputDataTypeNotInAllowlistExc),
("fd12:3456:7890:abcd::7890/64", InputDataTypeNotInAllowlistExc),
("fd12:3456:7890::abcd::/128", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/64", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff::ffff/128", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff::ffff", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff/128", InputDataTypeNotInAllowlistExc),
("ffff:ffff:ffff:ffff:ffff:ffff:ffff", InputDataTypeNotInAllowlistExc),
("2001:db00::/24", InputDataTypeNotInAllowlistExc),
("2001:db00::/ffff:ff00::", InputDataTypeNotInAllowlistExc),
("fe80::/10", InputDataTypeNotInAllowlistExc),
("FE80::/10", InputDataTypeNotInAllowlistExc),
("Fe80::/10", InputDataTypeNotInAllowlistExc),
("fE80::/10", InputDataTypeNotInAllowlistExc),
("fe80::%enp4s0/64", InputDataTypeNotInAllowlistExc),
("fe80::%Připojení k síti/64", InputDataTypeNotInAllowlistExc),
("fe80::%/64", InputDataTypeNotInAllowlistExc),
("fe80::%enp4s0%/64", InputDataTypeNotInAllowlistExc),
("fe80::%enp4s0%abc/64", InputDataTypeNotInAllowlistExc),
("2001:db8::/48", InputDataTypeNotInAllowlistExc),
("2001:db8:0:0:0:0:0:0/48", InputDataTypeNotInAllowlistExc),
("2001:db8:0:0:0:0::/48", InputDataTypeNotInAllowlistExc),
("2001:db8:0000:0000:0000:0000:0000:0000/48", InputDataTypeNotInAllowlistExc),
("2001:0db8:0000:0000:0000:0000:0000:0000/48", InputDataTypeNotInAllowlistExc),
("::/128", InputDataTypeNotInAllowlistExc),
("::", InputDataTypeNotInAllowlistExc),
(":/128", InputDataTypeNotInAllowlistExc),
(":::/128", InputDataTypeNotInAllowlistExc),
("::ffff:127.0.0.0/104", InputDataTypeNotInAllowlistExc),
("::ffff:127.0.0.1/104", InputDataTypeNotInAllowlistExc),
("::ffff:127.0.0..0/104", InputDataTypeNotInAllowlistExc),
("::ffff:127.0.0..1/104", InputDataTypeNotInAllowlistExc),
("::ffff:127.0.0.0./104", InputDataTypeNotInAllowlistExc),
("::ffff:127.0.0.1./104", InputDataTypeNotInAllowlistExc),
("::/-1", InputDataTypeNotInAllowlistExc),
("::/129", InputDataTypeNotInAllowlistExc),
("::/000", InputDataTypeNotInAllowlistExc),
("::/032", InputDataTypeNotInAllowlistExc),
("::/320", InputDataTypeNotInAllowlistExc),
("::/000032", InputDataTypeNotInAllowlistExc),
("::/000_032", InputDataTypeNotInAllowlistExc),
("/", InputDataTypeNotInAllowlistExc),
("/32", InputDataTypeNotInAllowlistExc),
("/0.0.0.0", InputDataTypeNotInAllowlistExc),
("0.0.0.0/", InputDataTypeNotInAllowlistExc),
("0.0.0.0:32", InputDataTypeNotInAllowlistExc),
("0.0.0.0\\32", InputDataTypeNotInAllowlistExc),
("0.0.0.0a32", InputDataTypeNotInAllowlistExc),
("0.0.0.0 32", InputDataTypeNotInAllowlistExc),
("0.0.0.0/\x00", InputDataTypeNotInAllowlistExc),
("\x00/32", InputDataTypeNotInAllowlistExc),
("127.0.0.1\x00/32", InputDataTypeNotInAllowlistExc),
("\x00127.0.0.1/32", InputDataTypeNotInAllowlistExc),
("127.0.0.\x001/32", InputDataTypeNotInAllowlistExc),
("127.0.0.1/\x0032", InputDataTypeNotInAllowlistExc),
("127.0.0.1/3\x002", InputDataTypeNotInAllowlistExc),
("127.0.0.1/32\x00", InputDataTypeNotInAllowlistExc),
("0.0.0.0/t", InputDataTypeNotInAllowlistExc),
("t/32", InputDataTypeNotInAllowlistExc),
("127.0.0.1t/32", InputDataTypeNotInAllowlistExc),
("t127.0.0.1/32", InputDataTypeNotInAllowlistExc),
("127.0.0.t1/32", InputDataTypeNotInAllowlistExc),
("127.0.0.1/t32", InputDataTypeNotInAllowlistExc),
("127.0.0.1/3t2", InputDataTypeNotInAllowlistExc),
("127.0.0.1/32t", InputDataTypeNotInAllowlistExc),
("0.0.0.0/ ", InputDataTypeNotInAllowlistExc),
(" /32", InputDataTypeNotInAllowlistExc),
("127.0.0.1 /32", InputDataTypeNotInAllowlistExc),
("127.0.0. 1/32", InputDataTypeNotInAllowlistExc),
("127.0.0.1/ 32", InputDataTypeNotInAllowlistExc),
("127.0.0.1/3 2", InputDataTypeNotInAllowlistExc),
("2001:db8::/", InputDataTypeNotInAllowlistExc),
("2001:db8::+128", InputDataTypeNotInAllowlistExc),
("2001:db8::\\128", InputDataTypeNotInAllowlistExc),
("2001:db8::x128", InputDataTypeNotInAllowlistExc),
("2001:db8:: 128", InputDataTypeNotInAllowlistExc),
("2001:db8::/\x00", InputDataTypeNotInAllowlistExc),
("\x00/128", InputDataTypeNotInAllowlistExc),
("2001:db8::\x00/128", InputDataTypeNotInAllowlistExc),
("\x002001:db8::/128", InputDataTypeNotInAllowlistExc),
("2001:db8:\x00:/128", InputDataTypeNotInAllowlistExc),
("2001:db8::/\x00128", InputDataTypeNotInAllowlistExc),
("2001:db8::/12\x008", InputDataTypeNotInAllowlistExc),
("2001:db8::/128\x00", InputDataTypeNotInAllowlistExc),
("2001:db8::/t", InputDataTypeNotInAllowlistExc),
("t/128", InputDataTypeNotInAllowlistExc),
("2001:db8::t/128", InputDataTypeNotInAllowlistExc),
("t2001:db8::/128", InputDataTypeNotInAllowlistExc),
("2001:db8:t:/128", InputDataTypeNotInAllowlistExc),
("2001:db8::/t128", InputDataTypeNotInAllowlistExc),
("2001:db8::/12t8", InputDataTypeNotInAllowlistExc),
("2001:db8::/128t", InputDataTypeNotInAllowlistExc),
("2001:db8::/ ", InputDataTypeNotInAllowlistExc),
(" /128", InputDataTypeNotInAllowlistExc),
("2001:db8:: /128", InputDataTypeNotInAllowlistExc),
("2001:db8: :/128", InputDataTypeNotInAllowlistExc),
("2001:db8::/ 128", InputDataTypeNotInAllowlistExc),
("2001:db8::/12 8", InputDataTypeNotInAllowlistExc),
("\r\n 127.0.0.1/32\t", InputDataTypeNotInAllowlistExc),
("\v\f127.0.0.1/32 ", InputDataTypeNotInAllowlistExc),
("\r\n 127.0.0.1/255.255.255.255\t", InputDataTypeNotInAllowlistExc),
("\v\f127.0.0.1/255.255.255.255 ", InputDataTypeNotInAllowlistExc),
("\r\n 127.0.0.1\t", InputDataTypeNotInAllowlistExc),
("\v\f127.0.0.1 ", InputDataTypeNotInAllowlistExc),
("\r\n 2001:db8::1def/128\t", InputDataTypeNotInAllowlistExc),
("\v\f2001:db8::1def/128 ", InputDataTypeNotInAllowlistExc),
("\r\n 2001:db8::1def\t", InputDataTypeNotInAllowlistExc),
("\v\f2001:db8::1def ", InputDataTypeNotInAllowlistExc),
("\x00", InputDataTypeNotInAllowlistExc),
("\x01", InputDataTypeNotInAllowlistExc),
("\x01" * 4, InputDataTypeNotInAllowlistExc),
("\x01" * 16, InputDataTypeNotInAllowlistExc),
("", InputDataTypeNotInAllowlistExc),
("hello", InputDataTypeNotInAllowlistExc),
(" \t\t Hello World! \r\v\n", InputDataTypeNotInAllowlistExc),
("\vf sadf asdf asd\r\rf as\ndfa sdfsa \t ", InputDataTypeNotInAllowlistExc),
(None, InputDataTypeNotInAllowlistExc),
# (True, InputDataTypeNotInAllowlistExc), # Raises DeprecationWarning!
# (False, InputDataTypeNotInAllowlistExc), # Raises DeprecationWarning!
(-(2 ** 16), InputDataTypeNotInAllowlistExc),
(-1, InputDataTypeNotInAllowlistExc),
(0, InputDataTypeNotInAllowlistExc),
(1, InputDataTypeNotInAllowlistExc),
(2131501624, InputDataTypeNotInAllowlistExc),
((2 ** 32) - 1, InputDataTypeNotInAllowlistExc),
((2 ** 32), InputDataTypeNotInAllowlistExc),
((2 ** 32) + 1, InputDataTypeNotInAllowlistExc),
(42541956123769884636017138956568135748, InputDataTypeNotInAllowlistExc),
((2 ** 128) - 1, InputDataTypeNotInAllowlistExc),
((2 ** 128), InputDataTypeNotInAllowlistExc),
((2 ** 128) + 1, InputDataTypeNotInAllowlistExc),
((2 ** 256), InputDataTypeNotInAllowlistExc),
(-0.0, InputDataTypeNotInAllowlistExc),
(0.0, InputDataTypeNotInAllowlistExc),
(0.5, InputDataTypeNotInAllowlistExc),
(1.0, InputDataTypeNotInAllowlistExc),
(1.5, InputDataTypeNotInAllowlistExc),
(2131501624.0, InputDataTypeNotInAllowlistExc),
(2131501624.5, InputDataTypeNotInAllowlistExc),
(42541956123769884636017138956568135748.0, InputDataTypeNotInAllowlistExc),
(42541956123769884636017138956568135748.5, InputDataTypeNotInAllowlistExc),
(float("inf"), InputDataTypeNotInAllowlistExc),
(float("-inf"), InputDataTypeNotInAllowlistExc),
(float("nan"), InputDataTypeNotInAllowlistExc),
(b'', InputDataTypeNotInAllowlistExc),
(b'8.8.4.4/32', InputDataTypeNotInAllowlistExc),
(b'2001:4860:4860::8844/128', InputDataTypeNotInAllowlistExc),
(b'8.8.4.4', InputDataTypeNotInAllowlistExc),
(b'2001:4860:4860::8844', InputDataTypeNotInAllowlistExc),
(b'\x00', InputDataTypeNotInAllowlistExc),
(b'\x01', InputDataTypeNotInAllowlistExc),
(b'\x01' * 3, InputDataTypeNotInAllowlistExc),
(b'\x01' * 4, InputDataTypeNotInAllowlistExc),
(b'\x01' * 5, InputDataTypeNotInAllowlistExc),
(b'\x01' * 15, InputDataTypeNotInAllowlistExc),
(b'\x01' * 16, InputDataTypeNotInAllowlistExc),
(b'\x01' * 17, InputDataTypeNotInAllowlistExc),
(bytearray(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(bytearray(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
([], InputDataTypeNotInAllowlistExc),
([b'\x00'], InputDataTypeNotInAllowlistExc),
([b'\x01'] * 4, InputDataTypeNotInAllowlistExc),
([b'\x01'] * 16, InputDataTypeNotInAllowlistExc),
({}, InputDataTypeNotInAllowlistExc),
({b'\x01': b'\x01'}, InputDataTypeNotInAllowlistExc),
(ipaddress.ip_address("127.0.0.1"), InputDataTypeNotInAllowlistExc),
(ipaddress.ip_address("2001:db8::abcd"), InputDataTypeNotInAllowlistExc),
(int, InputDataTypeNotInAllowlistExc),
(theoretical_testutils.EmptyObject, InputDataTypeNotInAllowlistExc),
(datetime.datetime.now(), InputDataTypeNotInAllowlistExc),
(datetime.datetime.now().date(), InputDataTypeNotInAllowlistExc),
(datetime.datetime.now().time(), InputDataTypeNotInAllowlistExc),
(urllib.parse.urlparse("https://www.google.com/test"), InputDataTypeNotInAllowlistExc),
(urllib.parse.urlparse("https://127.0.0.1/test"), InputDataTypeNotInAllowlistExc),
(urllib.parse.urlparse("https://[::1]/test"), InputDataTypeNotInAllowlistExc),
(uuid.UUID('{12345678-1234-5678-1234-567812345678}'), InputDataTypeNotInAllowlistExc),
(theoretical_testutils.EmptyObject(), InputDataTypeNotInAllowlistExc),
(StringableObject(theoretical_testutils.EmptyObject()), InputDataTypeNotInAllowlistExc),
(StringableObject("8.8.4.4/32"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("2001:4860:4860::8844/128"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("8.8.4.4"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject("2001:4860:4860::8844"), InputDataTypeNotInAllowlistExc), # This works only due to an undocumented implementation detail of the 'ipaddress' module and might stop working in the future!
(StringableObject(2131501624), InputDataTypeNotInAllowlistExc),
(StringableObject(42541956123769884636017138956568135748), InputDataTypeNotInAllowlistExc),
(StringableObject(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(StringableObject(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
(ExceptionRaisingStringableObject(), InputDataTypeNotInAllowlistExc),
(IntableObject(theoretical_testutils.EmptyObject()), InputDataTypeNotInAllowlistExc),
(IntableObject("8.8.4.4/32"), InputDataTypeNotInAllowlistExc),
(IntableObject("2001:4860:4860::8844/128"), InputDataTypeNotInAllowlistExc),
(IntableObject("8.8.4.4"), InputDataTypeNotInAllowlistExc),
(IntableObject("2001:4860:4860::8844"), InputDataTypeNotInAllowlistExc),
(IntableObject(2131501624), InputDataTypeNotInAllowlistExc),
(IntableObject(42541956123769884636017138956568135748), InputDataTypeNotInAllowlistExc),
(IntableObject(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(IntableObject(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
(ExceptionRaisingIntableObject(), InputDataTypeNotInAllowlistExc),
(BytesableObject(theoretical_testutils.EmptyObject()), InputDataTypeNotInAllowlistExc),
(BytesableObject("8.8.4.4/32"), InputDataTypeNotInAllowlistExc),
(BytesableObject("2001:4860:4860::8844/128"), InputDataTypeNotInAllowlistExc),
(BytesableObject("8.8.4.4"), InputDataTypeNotInAllowlistExc),
(BytesableObject("2001:4860:4860::8844"), InputDataTypeNotInAllowlistExc),
(BytesableObject(2131501624), InputDataTypeNotInAllowlistExc),
(BytesableObject(42541956123769884636017138956568135748), InputDataTypeNotInAllowlistExc),
(BytesableObject(b'\x01' * 4), InputDataTypeNotInAllowlistExc),
(BytesableObject(b'\x01' * 16), InputDataTypeNotInAllowlistExc),
(ExceptionRaisingBytesableObject(), InputDataTypeNotInAllowlistExc),
# (("0.0.0.0", False), InputDataTypeNotInAllowlistExc), # The 'ipaddress' module has inconsistent behaviour when it comes to booleans!
# (("0.0.0.0", True), InputDataTypeNotInAllowlistExc), # The 'ipaddress' module has inconsistent behaviour when it comes to booleans!
(("127.0.0.0", 8), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", "8"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", "008"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", b"8"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.0", b"008"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.0", "255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", b"255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", "0.255.255.255"), InputDataTypeNotInAllowlistExc),
(("127.0.0.0", b"0.255.255.255"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", 8), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", "8"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", "008"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", b"8"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.1", b"008"), InputDataTypeNotInAllowlistExc), # ???
(("127.0.0.1", "255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", b"255.0.0.0"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", "0.255.255.255"), InputDataTypeNotInAllowlistExc),
(("127.0.0.1", b"0.255.255.255"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", 8), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "8"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "008"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"8"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"008"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "255.0.0.0"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"255.0.0.0"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", "0.255.255.255"), InputDataTypeNotInAllowlistExc),
((b"127.0.0.0", b"0.255.255.255"), InputDataTypeNotInAllowlistExc),
((2131501624, 32), InputDataTypeNotInAllowlistExc),
((b"\x01" * 4, 32), InputDataTypeNotInAllowlistExc),
(("::", 8), InputDataTypeNotInAllowlistExc),
(("::", "8"), InputDataTypeNotInAllowlistExc),
(("::", "008"), InputDataTypeNotInAllowlistExc),
(("::", b"8"), InputDataTypeNotInAllowlistExc),
(("::", b"008"), InputDataTypeNotInAllowlistExc),
(("::1", 8), InputDataTypeNotInAllowlistExc),
(("::1", "8"), InputDataTypeNotInAllowlistExc),
(("::1", "008"), InputDataTypeNotInAllowlistExc),
(("::1", b"8"), InputDataTypeNotInAllowlistExc),
(("::1", b"008"), InputDataTypeNotInAllowlistExc),
((b"::", 8), InputDataTypeNotInAllowlistExc),
((b"::", "8"), InputDataTypeNotInAllowlistExc),
((b"::", "008"), InputDataTypeNotInAllowlistExc),
((b"::", b"8"), InputDataTypeNotInAllowlistExc),
((b"::", b"008"), InputDataTypeNotInAllowlistExc),
((42541956123769884636017138956568135748, 128), InputDataTypeNotInAllowlistExc),
((b"\x01" * 16, 128), InputDataTypeNotInAllowlistExc),
((), InputDataTypeNotInAllowlistExc),
(("127.0.0.0",), InputDataTypeNotInAllowlistExc), # ?????
(("127.1.2.3",), InputDataTypeNotInAllowlistExc), # ?????
(("127.0.0.0", 8, 8), InputDataTypeNotInAllowlistExc),
(("::",), InputDataTypeNotInAllowlistExc), # ?????
(("::abcd",), InputDataTypeNotInAllowlistExc), # ?????
(("::", 8, 8), InputDataTypeNotInAllowlistExc),
)),
)
@pytest.mark.parametrize(("blueprint", "input_", "output"), theoretical_testutils.test_function_parameter_generator(__IP_NETWORK_BLUEPRINT_TEST_SUITE))
def test_ip_network_blueprint(blueprint, input_, output):
theoretical_testutils.perform_test(blueprint, input_, output)
def test_ip_network_blueprint_default_parsing_mode():
assert IPNetworkBlueprint().get_parsing_mode() == ParsingMode.MODE_RATIONAL
def test_ip_network_blueprint_default_ignore_set_host_bits():
assert IPNetworkBlueprint().are_set_host_bits_ignored() is False
def test_ip_network_blueprint_ignore_set_host_bits():
assert IPNetworkBlueprint(ignore_set_host_bits=True).are_set_host_bits_ignored() is True
| 66.947443
| 233
| 0.65513
|
7c0dd74068d95732f2124b322bdc6e4983a8e69c
| 1,336
|
py
|
Python
|
tests/kyu_7_tests/test_mumbling.py
|
the-zebulan/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 40
|
2016-03-09T12:26:20.000Z
|
2022-03-23T08:44:51.000Z
|
tests/kyu_7_tests/test_mumbling.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | null | null | null |
tests/kyu_7_tests/test_mumbling.py
|
akalynych/CodeWars
|
1eafd1247d60955a5dfb63e4882e8ce86019f43a
|
[
"MIT"
] | 36
|
2016-11-07T19:59:58.000Z
|
2022-03-31T11:18:27.000Z
|
import unittest
from katas.kyu_7.mumbling import accum
class MumblingTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(accum('ZpglnRxqenU'),
'Z-Pp-Ggg-Llll-Nnnnn-Rrrrrr-Xxxxxxx-Qqqqqq'
'qq-Eeeeeeeee-Nnnnnnnnnn-Uuuuuuuuuuu')
def test_equals_2(self):
self.assertEqual(accum('NyffsGeyylB'), 'N-Yy-Fff-Ffff-Sssss-Gggggg-E'
'eeeeee-Yyyyyyyy-Yyyyyyyyy-Ll'
'llllllll-Bbbbbbbbbbb')
def test_equals_3(self):
self.assertEqual(accum('MjtkuBovqrU'), 'M-Jj-Ttt-Kkkk-Uuuuu-Bbbbbb-O'
'oooooo-Vvvvvvvv-Qqqqqqqqq-Rr'
'rrrrrrrr-Uuuuuuuuuuu')
def test_equals_4(self):
self.assertEqual(accum('EvidjUnokmM'), 'E-Vv-Iii-Dddd-Jjjjj-Uuuuuu-N'
'nnnnnn-Oooooooo-Kkkkkkkkk-Mm'
'mmmmmmmm-Mmmmmmmmmmm')
def test_equals_5(self):
self.assertEqual(accum('HbideVbxncC'), 'H-Bb-Iii-Dddd-Eeeee-Vvvvvv-B'
'bbbbbb-Xxxxxxxx-Nnnnnnnnn-Cc'
'cccccccc-Ccccccccccc')
| 43.096774
| 77
| 0.499251
|
43a85e56add3222a196d2983d0f49d6c5c6a9244
| 2,559
|
py
|
Python
|
models/ops/setup.py
|
sunggukcha/QAHOI
|
43e3292f96495431c1dd03ec11597ebec555b6c4
|
[
"Apache-2.0"
] | 84
|
2021-06-22T08:38:17.000Z
|
2022-03-31T14:19:48.000Z
|
mctrans/models/ops/setup.py
|
eshasadia/MCTrans
|
9b8b5677eef584b423d5e1630680a4b667cbe823
|
[
"Apache-2.0"
] | 16
|
2021-06-29T07:18:47.000Z
|
2022-03-31T08:12:15.000Z
|
mctrans/models/ops/setup.py
|
eshasadia/MCTrans
|
9b8b5677eef584b423d5e1630680a4b667cbe823
|
[
"Apache-2.0"
] | 26
|
2021-07-05T08:41:34.000Z
|
2022-03-22T02:57:30.000Z
|
# ------------------------------------------------------------------------------------------------
# Deformable DETR
# Copyright (c) 2020 SenseTime. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 [see LICENSE for details]
# ------------------------------------------------------------------------------------------------
# Modified from https://github.com/chengdazhi/Deformable-Convolution-V2-PyTorch/tree/pytorch_1.0.0
# ------------------------------------------------------------------------------------------------
import os
import glob
import torch
from torch.utils.cpp_extension import CUDA_HOME
from torch.utils.cpp_extension import CppExtension
from torch.utils.cpp_extension import CUDAExtension
from setuptools import find_packages
from setuptools import setup
requirements = ["torch", "torchvision"]
def get_extensions():
this_dir = os.path.dirname(os.path.abspath(__file__))
extensions_dir = os.path.join(this_dir, "src")
main_file = glob.glob(os.path.join(extensions_dir, "*.cpp"))
source_cpu = glob.glob(os.path.join(extensions_dir, "cpu", "*.cpp"))
source_cuda = glob.glob(os.path.join(extensions_dir, "cuda", "*.cu"))
sources = main_file + source_cpu
extension = CppExtension
extra_compile_args = {"cxx": []}
define_macros = []
if torch.cuda.is_available() and CUDA_HOME is not None:
extension = CUDAExtension
sources += source_cuda
define_macros += [("WITH_CUDA", None)]
extra_compile_args["nvcc"] = [
"-DCUDA_HAS_FP16=0",
"-D__CUDA_NO_HALF_OPERATORS__",
"-D__CUDA_NO_HALF_CONVERSIONS__",
"-D__CUDA_NO_HALF2_OPERATORS__",
]
else:
raise NotImplementedError('Cuda is not availabel')
sources = [os.path.join(extensions_dir, s) for s in sources]
include_dirs = [extensions_dir]
ext_modules = [
extension(
"MultiScaleDeformableAttention",
sources,
include_dirs=include_dirs,
define_macros=define_macros,
extra_compile_args=extra_compile_args,
)
]
return ext_modules
setup(
name="MultiScaleDeformableAttention",
version="1.0",
author="Weijie Su",
url="https://github.com/fundamentalvision/Deformable-DETR",
description="PyTorch Wrapper for CUDA Functions of Multi-Scale Deformable Attention",
packages=find_packages(exclude=("configs", "tests",)),
ext_modules=get_extensions(),
cmdclass={"build_ext": torch.utils.cpp_extension.BuildExtension},
)
| 35.541667
| 98
| 0.611176
|
238b955141f389a7ec46bceee2215f439721a25f
| 1,931
|
py
|
Python
|
setup.py
|
icemac/icemac.ab.document
|
c5e9a68ca509b5ea59a84cf8a8c50f570a83a9eb
|
[
"BSD-2-Clause"
] | 1
|
2020-02-25T17:04:39.000Z
|
2020-02-25T17:04:39.000Z
|
setup.py
|
icemac/icemac.ab.document
|
c5e9a68ca509b5ea59a84cf8a8c50f570a83a9eb
|
[
"BSD-2-Clause"
] | null | null | null |
setup.py
|
icemac/icemac.ab.document
|
c5e9a68ca509b5ea59a84cf8a8c50f570a83a9eb
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import os.path
import setuptools
def read(*path_elements):
"""Read file."""
return open(os.path.join(*path_elements)).read()
version = '0.1.dev0'
long_description = '\n\n'.join([
read('README.rst'),
read('CHANGES.rst'),
])
setuptools.setup(
name='icemac.ab.document',
version=version,
description="Document storage feature for icemac.addressbook",
long_description=long_description,
keywords='icemac addressbook document storage download groups',
author='Michael Howitz',
author_email='icemac@gmx.net',
download_url='https://pypi.org/project/icemac.ab.document',
url='https://github.com/icemac/icemac.ab.document',
license='MIT',
classifiers=[
'Development Status :: :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Paste',
'Framework :: Zope3',
'License :: OSI Approved',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Natural Language :: German',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2 :: Only',
'Programming Language :: Python :: Implementation',
'Programming Language :: Python :: Implementation :: CPython',
],
packages=setuptools.find_packages('src'),
package_dir={'': 'src'},
namespace_packages=['icemac', 'icemac.ab'],
include_package_data=True,
zip_safe=False,
install_requires=[
'grokcore.annotation',
'icemac.addressbook >= 9.0.dev0',
'setuptools',
],
extras_require=dict(
test=[
'icemac.addressbook [test]',
]),
entry_points="""
[fanstatic.libraries]
document = icemac.ab.document.browser.resource:lib
""",
)
| 30.171875
| 70
| 0.615225
|
9d4143e1e4ce698d9c32ded8fbad3cd16aa2b1c0
| 547
|
py
|
Python
|
WebMirror/management/rss_parser_funcs/feed_parse_extractBuzyhoneybeeBlog.py
|
fake-name/ReadableWebProxy
|
ed5c7abe38706acc2684a1e6cd80242a03c5f010
|
[
"BSD-3-Clause"
] | 193
|
2016-08-02T22:04:35.000Z
|
2022-03-09T20:45:41.000Z
|
WebMirror/management/rss_parser_funcs/feed_parse_extractBuzyhoneybeeBlog.py
|
fake-name/ReadableWebProxy
|
ed5c7abe38706acc2684a1e6cd80242a03c5f010
|
[
"BSD-3-Clause"
] | 533
|
2016-08-23T20:48:23.000Z
|
2022-03-28T15:55:13.000Z
|
WebMirror/management/rss_parser_funcs/feed_parse_extractBuzyhoneybeeBlog.py
|
rrosajp/ReadableWebProxy
|
ed5c7abe38706acc2684a1e6cd80242a03c5f010
|
[
"BSD-3-Clause"
] | 19
|
2015-08-13T18:01:08.000Z
|
2021-07-12T17:13:09.000Z
|
def extractBuzyhoneybeeBlog(item):
'''
Parser for 'buzyhoneybee.blog'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
| 24.863636
| 104
| 0.632541
|
9447a2c2be2d8c655ffc30ca6c2f9c4a7e6a74e8
| 652
|
py
|
Python
|
.vim/bundle/ultisnips.vim/pythonx/UltiSnips/snippet/definition/snipmate.py
|
marshnmedia/dotfiles
|
d1a965ec81c9ed4e2f8addf9a07c588f2cea46db
|
[
"MIT"
] | null | null | null |
.vim/bundle/ultisnips.vim/pythonx/UltiSnips/snippet/definition/snipmate.py
|
marshnmedia/dotfiles
|
d1a965ec81c9ed4e2f8addf9a07c588f2cea46db
|
[
"MIT"
] | null | null | null |
.vim/bundle/ultisnips.vim/pythonx/UltiSnips/snippet/definition/snipmate.py
|
marshnmedia/dotfiles
|
d1a965ec81c9ed4e2f8addf9a07c588f2cea46db
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
"""A snipMate snippet after parsing."""
from UltiSnips.snippet.definition._base import SnippetDefinition
from UltiSnips.snippet.parsing.snipmate import parse_and_instantiate
class SnipMateSnippetDefinition(SnippetDefinition):
"""See module doc."""
SNIPMATE_SNIPPET_PRIORITY = -1000
def __init__(self, trigger, value, description):
SnippetDefinition.__init__(self, self.SNIPMATE_SNIPPET_PRIORITY,
trigger, value, description, "", {})
def instantiate(self, snippet_instance, initial_text, indent):
parse_and_instantiate(snippet_instance, initial_text, indent)
| 32.6
| 72
| 0.75
|
d63a773c98d1a741db8f318c09c6f472ca9b4d2c
| 5,009
|
py
|
Python
|
robots/LoCoBot/locobot_riss/scripts/dev/riss.py
|
HIRO-group/pyrobot
|
bc54608657f51f3691905a5b2a2d3abba1e3aa68
|
[
"MIT"
] | null | null | null |
robots/LoCoBot/locobot_riss/scripts/dev/riss.py
|
HIRO-group/pyrobot
|
bc54608657f51f3691905a5b2a2d3abba1e3aa68
|
[
"MIT"
] | 1
|
2020-09-17T01:33:19.000Z
|
2020-09-17T01:33:19.000Z
|
robots/LoCoBot/locobot_riss/scripts/dev/riss.py
|
HIRO-group/pyrobot
|
bc54608657f51f3691905a5b2a2d3abba1e3aa68
|
[
"MIT"
] | 1
|
2020-08-26T18:53:25.000Z
|
2020-08-26T18:53:25.000Z
|
#!/home/locobot/pyenvs/pyro3_ws/bin/python
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import time
import numpy as np
import matplotlib.pyplot as plt
try:
from image_processor import Image_Processor
except:
from dev.image_processor import Image_Processor
def get_time_str():
return time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
class simple_planner():
def __init__(self, start_point, max_steps=200, max_trace=10, reset_threshold=1e-3):
self.origin = start_point
self.memory = np.array([])
self.tracer = np.array([])
self.max_steps = max_steps
self.max_trace = max_trace
self.reset_threshold=reset_threshold
def observe(self, state, trajectory):
if self.memory.shape[0] >= self.max_steps:
self.memory = np.concatenate([self.memory[1:], state])
else:
self.memory = np.concatenate([self.memory, state])
if self.tracer.shape[0] >= self.max_trace:
self.tracer = np.concatenate([self.tracer[1:], trajectory])
else:
self.tracer = np.concatenate([self.tracer, trajectory])
if self.memory.shape[0] > 4:
if self.memory[-1] - self.memory[-2] - self.memory[-3] > self.reset_threshold:
return True
if self.tracer.shape[0] > 4:
if self.tracer[-1] - self.tracer[-2] - self.tracer[-3] > self.reset_threshold:
return True
return False
def display_path(self):
fig = plt.figure()
ax = fig.add_subplot()
ax.plot(self.memory[:][:2])
ax.set_title('Pose over time')
plt.show()
class RISS():
def __init__(self, start_point, image_resolution, high, low, gains=(1e-1,1e-3), memory_length=200, verbose=False, visualize=False):
self.gain = gains # Mostly to prevent overflow of the accumulator
self.planner = simple_planner(start_point) # simple path tracking and adjustments
self.resolution = image_resolution
self.processor = Image_Processor()
self.high = high
self.low = low
self.verbose = verbose
self.visualize = visualize
def get_alpha_from_image_slice(self, u_slice, l_slice, name='[INFO]'):
""" This function takes two slices of an image and returns a
heading towards the most open space.
@params:
- u_slice : the upper slice of the image (usually from px:275)
- l_slice : the lower slice of the image (usually from px:375)
- name : String to make print log more readable
@returns:
- vector : normal vector (forward speed, turn_speed)
- exe_time : duration to execute the trajectory
"""
exe_time = 0.25
# meta-data
mp_actual = int(u_slice.shape[0]/2)
weight = np.zeros(2)
init_point = 0
# compute intrest of sections
for x,d in enumerate(u_slice[1:]):
# if found section
if u_slice[init_point] != d or x == len(u_slice) - 1:
mp = (x + init_point) / 2 # midpoint of section
depth_relative = int(u_slice[x]) - int(l_slice[x]) # difference between slices
depth_actual = int(u_slice[x]) + int(l_slice[x]) / 2 # average depth of two slices
# weight of sections
left_gain = (1 - (mp / mp_actual)**2) * self.gain[0]
center_gain = mp_actual - np.abs(mp - mp_actual) * self.gain[1]
# get Interest
edge_intrest = left_gain * (x - init_point) * (depth_relative + depth_actual - 1.5)
center_intrest = center_gain * (x - init_point) * (depth_relative + depth_actual - 1.75)
#update
weight[0] += edge_intrest
weight[1] += center_intrest
init_point = x + 1
# when no interests turn around
if np.sum(weight) == 0:
weight = [100, -1]
exe_time = 2
vector = [weight[1], weight[0]]
if self.verbose:
print(f'{name} Accumulated weights: {weight}')
print(f'{name} Non-normal vector: {vector}')
return vector / np.linalg.norm(vector), exe_time
def get_heading(self, image_d, image_rgb, state, name='[INFO]'):
""" Wrapper for getting alpha from image slices """
u_slice = np.array(image_d[self.high])
l_slice = np.array(image_d[self.low])
(fwd_speed, turn_speed), exe_time = self.get_alpha_from_image_slice(u_slice, l_slice, name=name)
if self.planner.observe(state, (fwd_speed, turn_speed)):
fwd_speed = -0.1
turn_speed = -0.7
exe_time = 1
if self.visualize:
cp = np.array([self.resolution[1] / 2, self.resolution[0] / 2]).astype(np.int)
slice_max = np.max([np.max(u_slice), np.max(l_slice)])
protocol=[
{'draw-line' : [[[cp[0],cp[1],cp[0]-(turn_speed * 100) ,cp[1]-(fwd_speed * 100)]],(1,0,0),3]},
{'crop' : [[0,0,1,1]],
'plot' : [[list(zip(np.arange(0,image_d.shape[1]), (slice_max - u_slice) * 45)), list(zip(np.arange(0,image_d.shape[1]), (slice_max - l_slice) * 45))],[(0,0,1),(1,0,0)], slice_max * 45, len(u_slice)]}]
_ = self.processor.process(image_rgb.copy(), protocol=protocol[0])
_ = self.processor.process(image_d.copy(), protocol=protocol[1], new_sequence=False, sequence=False)
self.processor.display()#save=f'scripts/dev/gen/img-processor-{get_time_str()}.png')
return (fwd_speed, turn_speed), exe_time
| 34.544828
| 205
| 0.684967
|
de8d42f1a7162a6a3efa177d1b54cee4c59d0b23
| 1,285
|
py
|
Python
|
05-image_denoising/03_hdr.py
|
myghact-1/complete-opencv
|
f89f7edce9d8ad1adde882e12d2e4eea91ea9155
|
[
"MIT"
] | null | null | null |
05-image_denoising/03_hdr.py
|
myghact-1/complete-opencv
|
f89f7edce9d8ad1adde882e12d2e4eea91ea9155
|
[
"MIT"
] | null | null | null |
05-image_denoising/03_hdr.py
|
myghact-1/complete-opencv
|
f89f7edce9d8ad1adde882e12d2e4eea91ea9155
|
[
"MIT"
] | null | null | null |
# https://en.wikipedia.org/wiki/High-dynamic-range_imaging
import cv2
import numpy as np
from glob import glob
#* 1. Loading exposure images into a list
images = glob("./images/hdr*")
images = [cv2.imread(image) for image in images]
exposure_times = np.array([15.0, 2.5, 0.25, 0.0333], dtype=np.float32)
#* 2. Merge exposures into HDR image
merge_debevec = cv2.createMergeDebevec()
hdr_debevec = merge_debevec.process(images, times=exposure_times.copy())
merge_robertson = cv2.createMergeRobertson()
hdr_robertson = merge_robertson.process(images, times=exposure_times.copy())
#* 3. Tonemap HDR image
tonemap1 = cv2.createTonemap(gamma=1.5)
res_debevec = tonemap1.process(hdr_debevec.copy())
res_robertson = tonemap1.process(hdr_robertson.copy())
#* 4. Merge exposures using Mertens fusion
merge_mertens = cv2.createMergeMertens()
res_mertens = merge_mertens.process(images)
#* 5. Convert to 8-bit and save
res_debevec_8bit = np.clip(res_debevec*255, 0, 255).astype('uint8')
res_robertson_8bit = np.clip(res_robertson*255, 0, 255).astype('uint8')
res_mertens_8bit = np.clip(res_mertens*255, 0, 255).astype('uint8')
cv2.imwrite("ldr_debevec.jpg", res_debevec_8bit)
cv2.imwrite("ldr_robertson.jpg", res_robertson_8bit)
cv2.imwrite("fusion_mertens.jpg", res_mertens_8bit)
| 29.204545
| 76
| 0.768872
|
4585a76773a7ebc8b4894f4108c724e2e84147db
| 4,990
|
py
|
Python
|
apps/accounts/forms.py
|
seanlefevre/openduty
|
34ab21117f114ccc808d8b0aa2cb801c819bdb86
|
[
"MIT"
] | 145
|
2016-04-11T06:53:13.000Z
|
2022-03-22T05:15:49.000Z
|
apps/accounts/forms.py
|
seanlefevre/openduty
|
34ab21117f114ccc808d8b0aa2cb801c819bdb86
|
[
"MIT"
] | 78
|
2017-09-24T10:59:49.000Z
|
2022-02-12T07:36:27.000Z
|
apps/accounts/forms.py
|
seanlefevre/openduty
|
34ab21117f114ccc808d8b0aa2cb801c819bdb86
|
[
"MIT"
] | 30
|
2016-04-11T06:53:16.000Z
|
2021-12-29T11:39:26.000Z
|
from django import forms
from django.contrib.auth.models import User
from django.conf import settings
from django.contrib.auth.forms import AuthenticationForm, PasswordResetForm, SetPasswordForm
from apps.accounts.models import Profile
from betterforms.multiform import MultiModelForm
from apps.notification.notifier.hipchat import HipchatNotifier
class CustomAuthenticationForm(AuthenticationForm):
username = forms.CharField(
required=True,
label='Username',
widget=forms.TextInput(
attrs={'type': 'text', 'class': "form-control", 'required': 'required', 'placeholder': 'Username'}
)
)
password = forms.CharField(
required=True,
label='Password',
widget=forms.TextInput(
attrs={'type': 'password', 'class': "form-control", 'required': 'required', 'placeholder': 'Password'}
)
)
class CustomPasswordResetForm(PasswordResetForm):
email = forms.EmailField(
required=True,
label='Email',
widget=forms.EmailInput(
attrs={'type': 'email', 'class': "form-control", 'required': 'required', 'placeholder': 'Email'}
)
)
class CustomSetPasswordForm(SetPasswordForm):
new_password1 = forms.CharField(
widget=forms.PasswordInput(
attrs={'type': 'password', 'class': "form-control", 'required': 'required', 'placeholder': 'New password'}
),
)
new_password2 = forms.CharField(
widget=forms.PasswordInput(
attrs={
'type': 'password', 'class': "form-control", 'required': 'required',
'placeholder': 'New password confirmation'
}
),
)
class UserForm(forms.ModelForm):
"""Django default User Form"""
username = forms.CharField(
required=True,
widget=forms.TextInput(attrs={'class': 'form-control'})
)
email = forms.EmailField(
required=True,
widget=forms.EmailInput(attrs={'class': 'form-control'})
)
password = forms.CharField(
required=False,
widget=forms.PasswordInput(attrs={'class': 'form-control'})
)
class Meta:
model = User
fields = (
'username',
'email',
'password'
)
class ProfileForm(forms.ModelForm):
"""User Profile Form"""
def __init__(self, *args, **kwargs):
super(ProfileForm, self).__init__(*args, **kwargs)
hipchat_room_name = self.fields.get('hipchat_room_name')
hipchat_rooms = HipchatNotifier(settings.HIPCHAT_SETTINGS).get_all_rooms()
if hipchat_rooms: # pragma: no cover
hipchat_room_name.choices = list(hipchat_room_name.choices) + hipchat_rooms
else:
hipchat_room_name.choices = list(hipchat_room_name.choices) + \
[("", ""), ("room", "Room")]
phone_number = forms.CharField(
required=False,
widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': "+44"})
)
pushover_user_key = forms.CharField(
required=False,
widget=forms.TextInput(attrs={'class': 'form-control'})
)
pushover_app_key = forms.CharField(
required=False,
widget=forms.TextInput(attrs={'class': 'form-control'})
)
slack_room_name = forms.CharField(
required=False,
widget=forms.TextInput(attrs={'class': 'form-control'})
)
prowl_api_key = forms.CharField(
required=False,
widget=forms.TextInput(attrs={'class': 'form-control'})
)
prowl_application = forms.CharField(
required=False,
widget=forms.TextInput(attrs={'class': 'form-control'})
)
prowl_url = forms.CharField(
required=False,
widget=forms.TextInput(attrs={'class': 'form-control'})
)
rocket_webhook_url = forms.CharField(
required=False,
widget=forms.TextInput(attrs={'class': 'form-control'})
)
hipchat_room_name = forms.ChoiceField(
required=False,
widget=forms.Select(
attrs={'class': 'form-control'}
)
)
hipchat_room_url = forms.CharField(
required=False,
widget=forms.TextInput(attrs={'class': 'form-control'})
)
send_resolve_enabled = forms.CharField(
required=False,
widget=forms.CheckboxInput(attrs={'class': 'flat-red'})
)
class Meta:
model = Profile
fields = (
'phone_number',
'pushover_user_key',
'pushover_app_key',
'slack_room_name',
'prowl_api_key',
'prowl_application',
'prowl_url',
'rocket_webhook_url',
'hipchat_room_name',
'hipchat_room_url',
'send_resolve_enabled'
)
class UserProfileMultiForm(MultiModelForm):
"""MultiForm for Users and Profile"""
form_classes = {
'user': UserForm,
'profile': ProfileForm,
}
| 30.993789
| 118
| 0.604609
|
fe8f7fa1016111faf36ea389d999f9617c062acc
| 4,465
|
py
|
Python
|
packages/PIPS/pips/src/Passes/pyps/pipscc.py
|
DVSR1966/par4all
|
86b33ca9da736e832b568c5637a2381f360f1996
|
[
"MIT"
] | 51
|
2015-01-31T01:51:39.000Z
|
2022-02-18T02:01:50.000Z
|
packages/PIPS/pips/src/Passes/pyps/pipscc.py
|
DVSR1966/par4all
|
86b33ca9da736e832b568c5637a2381f360f1996
|
[
"MIT"
] | 7
|
2017-05-29T09:29:00.000Z
|
2019-03-11T16:01:39.000Z
|
packages/PIPS/pips/src/Passes/pyps/pipscc.py
|
DVSR1966/par4all
|
86b33ca9da736e832b568c5637a2381f360f1996
|
[
"MIT"
] | 12
|
2015-03-26T08:05:38.000Z
|
2022-02-18T02:01:51.000Z
|
#!/usr/bin/env python
from __future__ import with_statement # to cope with python2.5
import pyps
import sys
import os
import shutil
import string
import tempfile
import pickle
import subprocess
from pyps import module
class object_code:
"""preprocessed c source file descriptor"""
def __init__(self,sourcefile,cppflags,cflags):
self.cflags=cflags
CPP=os.getenv("CPP","cpp")
cmd=[CPP,"-U__GNUC__"]+cppflags+[sourcefile]
#print "# running",cmd
sp=subprocess.Popen(cmd,stdout=subprocess.PIPE)
sp.wait()
self.code=sp.stdout.read()
self.cname=sourcefile.replace(os.sep,"__")
def set_cname(self,cname):
self.cname=cname
for op in self.cflags:
if op == "-c":
i=self.cflags.index(op)
self.cflags[i+1]=self.cname
break
def dump_to_c(self,in_dir):
self.set_cname(in_dir+os.sep+self.cname)
cfile=file(self.cname,"w")
cfile.write(self.code)
cfile.close()
def ofile(argv):
for opt in argv[1:]:
if opt == '-o':
index=argv.index(opt)
return argv[index+1]
return ""
def cppflags(argv):
flags=[]
for opt in argv[1:]:
if opt[0:2] == "-D" or opt[0:2] == "-I" :
flags+=[opt]
argv.remove(opt)
return flags
class pipscc:
"""modular pips compiler front-end"""
def __init__(self,argv):
"""create a pips compiler instance from argv"""
self.argv=argv
self.is_ld=len(self.gather_c_files())==0
def run(self):
"""run the compilation"""
if not self.is_ld:self.pipscpp()
else:self.pipsld()
def pipscpp(self):
"""simulate the behavior of the c preprocessor"""
# parse command line
CPPFLAGS=cppflags(self.argv)
OUTFILE=ofile(self.argv)
#print "# CPPFLAGS: ", CPPFLAGS
cpp_and_linking= len([f for f in self.argv[1:] if f == "-c"]) == 0
# look for input file
for opt in self.argv[1:]:
if opt[0] != '-' and opt[-2:] == '.c' :
if not OUTFILE:
OUTFILE=os.path.basename(opt)[0:-1]+"o"
# generate internal representation of preprocessed code
args = self.argv[1:]
if cpp_and_linking : args.insert(0,"-c")
obj=object_code(opt,CPPFLAGS,args)
# serialize it
newobj=file(OUTFILE,"w")
pickle.dump(obj,newobj)
newobj.close()
#print "# OBJ written: ", OUTFILE
# check if we should link too
if cpp_and_linking:
for i in range(1,len(self.argv)):
if self.argv[i][-2:]=='.c':
self.argv[i]=self.argv[i][0:-2]+".o"
self.pipsld()
# that's all folks
def gather_object_files(self):
INPUT_FILES=[]
for opt in self.argv[1:]:
if opt[0] != '-' and opt[-2:]==".o":
INPUT_FILES+=[opt]
return INPUT_FILES
def gather_c_files(self):
INPUT_FILES=[]
for opt in self.argv[1:]:
if opt[0] != '-' and opt[-2:]==".c":
INPUT_FILES+=[opt]
return INPUT_FILES
def unpickle(self,WDIR,files):
"""generate a list of unpickled object files from files"""
O_FILES=[]
for ifile in files:
obj=pickle.load(file(ifile,"r"))
obj.dump_to_c(WDIR)
obj.oname=ifile
O_FILES+=[obj]
return O_FILES
def changes(self,ws):
"""apply any change to the workspace, should be overloaded by the user"""
for f in ws.fun: f.display()
for c in ws.cu: c.display()
def get_wd(self):
"""selects a working directory for pipscc"""
WDIR=tempfile.mkdtemp("pipscc")
#print "# intermediate files generated in", WDIR
return WDIR
def get_workspace(self,c_files):
return pyps.workspace(*c_files)
def compile(self,wdir,o_files):
CC=os.getenv("CC","gcc")
for obj in o_files:
cmd=[CC]+obj.cflags+["-o",obj.oname]
#print "# running", cmd
sp=subprocess.Popen(cmd)
sp.wait()
cmd=[CC]+self.argv[1:]
#print "# running", cmd
sp=subprocess.Popen(cmd)
exitcode=sp.wait()
if exitcode:
shutil.rmtree(wdir)
def pipsld(self):
"""simulate c linker, all computation is done at link time"""
WDIR=self.get_wd()
# gather pickled input files
INPUT_FILES=self.gather_object_files()
if len(INPUT_FILES) == 0:
print >> sys.stderr, "pipscc: no input files"
sys.exit(1)
else:
# load pickled input files
O_FILES=self.unpickle(WDIR,INPUT_FILES)
C_FILES=map(lambda o:o.cname,O_FILES)
#print "# input files: ", C_FILES
# run pips with this informations
#print "# running pips"
with self.get_workspace(C_FILES) as ws:
# add extra operations
self.changes(ws)
# commit changes
ws.save(rep=WDIR)
# now run the compiler
self.compile(WDIR,O_FILES)
shutil.rmtree(WDIR)
#
##
#
if __name__ == "__main__":
thecompiler=pipscc(sys.argv)
thecompiler.run()
| 24.668508
| 75
| 0.66383
|
ec7af4e37ba7cb45a6a8086a1af24275c15bee53
| 3,553
|
py
|
Python
|
venv/Lib/site-packages/mediapipe/calculators/core/sequence_shift_calculator_pb2.py
|
Farhan-Malik/advance-hand-gesture
|
0ebe21ddd7c8c2eb14746678be57b33d38c47205
|
[
"MIT"
] | 41
|
2021-06-19T13:57:18.000Z
|
2021-12-02T17:08:53.000Z
|
venv/Lib/site-packages/mediapipe/calculators/core/sequence_shift_calculator_pb2.py
|
HxnDev/Pose-Detection
|
2be27e88cf79a0fb643c5047158cba478c770be9
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/mediapipe/calculators/core/sequence_shift_calculator_pb2.py
|
HxnDev/Pose-Detection
|
2be27e88cf79a0fb643c5047158cba478c770be9
|
[
"MIT"
] | 4
|
2021-11-13T13:53:43.000Z
|
2022-03-13T05:57:13.000Z
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: mediapipe/calculators/core/sequence_shift_calculator.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from mediapipe.framework import calculator_pb2 as mediapipe_dot_framework_dot_calculator__pb2
mediapipe_dot_framework_dot_calculator__options__pb2 = mediapipe_dot_framework_dot_calculator__pb2.mediapipe_dot_framework_dot_calculator__options__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='mediapipe/calculators/core/sequence_shift_calculator.proto',
package='mediapipe',
syntax='proto2',
serialized_pb=_b('\n:mediapipe/calculators/core/sequence_shift_calculator.proto\x12\tmediapipe\x1a$mediapipe/framework/calculator.proto\"\x94\x01\n\x1eSequenceShiftCalculatorOptions\x12\x19\n\rpacket_offset\x18\x01 \x01(\x05:\x02-12W\n\x03\x65xt\x12\x1c.mediapipe.CalculatorOptions\x18\x87\xba\xa9\x33 \x01(\x0b\x32).mediapipe.SequenceShiftCalculatorOptionsB\x0c\xa2\x02\tMediaPipe')
,
dependencies=[mediapipe_dot_framework_dot_calculator__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_SEQUENCESHIFTCALCULATOROPTIONS = _descriptor.Descriptor(
name='SequenceShiftCalculatorOptions',
full_name='mediapipe.SequenceShiftCalculatorOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='packet_offset', full_name='mediapipe.SequenceShiftCalculatorOptions.packet_offset', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
_descriptor.FieldDescriptor(
name='ext', full_name='mediapipe.SequenceShiftCalculatorOptions.ext', index=0,
number=107633927, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
options=None),
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=112,
serialized_end=260,
)
DESCRIPTOR.message_types_by_name['SequenceShiftCalculatorOptions'] = _SEQUENCESHIFTCALCULATOROPTIONS
SequenceShiftCalculatorOptions = _reflection.GeneratedProtocolMessageType('SequenceShiftCalculatorOptions', (_message.Message,), dict(
DESCRIPTOR = _SEQUENCESHIFTCALCULATOROPTIONS,
__module__ = 'mediapipe.calculators.core.sequence_shift_calculator_pb2'
# @@protoc_insertion_point(class_scope:mediapipe.SequenceShiftCalculatorOptions)
))
_sym_db.RegisterMessage(SequenceShiftCalculatorOptions)
_SEQUENCESHIFTCALCULATOROPTIONS.extensions_by_name['ext'].message_type = _SEQUENCESHIFTCALCULATOROPTIONS
mediapipe_dot_framework_dot_calculator__options__pb2.CalculatorOptions.RegisterExtension(_SEQUENCESHIFTCALCULATOROPTIONS.extensions_by_name['ext'])
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\242\002\tMediaPipe'))
# @@protoc_insertion_point(module_scope)
| 42.297619
| 385
| 0.814523
|
4afaf685584c911fe67893958ef36bf145fb5bc2
| 13,071
|
py
|
Python
|
sdk/python/pulumi_azure_native/storagesync/v20181001/outputs.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/storagesync/v20181001/outputs.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/storagesync/v20181001/outputs.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'ServerEndpointHealthResponse',
'SyncProgressStatusResponse',
'SyncSessionStatusResponse',
]
@pulumi.output_type
class ServerEndpointHealthResponse(dict):
"""
ServerEndpoint Health object.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "combinedHealth":
suggest = "combined_health"
elif key == "currentProgress":
suggest = "current_progress"
elif key == "downloadHealth":
suggest = "download_health"
elif key == "downloadStatus":
suggest = "download_status"
elif key == "lastUpdatedTimestamp":
suggest = "last_updated_timestamp"
elif key == "offlineDataTransferStatus":
suggest = "offline_data_transfer_status"
elif key == "uploadHealth":
suggest = "upload_health"
elif key == "uploadStatus":
suggest = "upload_status"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ServerEndpointHealthResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ServerEndpointHealthResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ServerEndpointHealthResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
combined_health: Optional[str] = None,
current_progress: Optional['outputs.SyncProgressStatusResponse'] = None,
download_health: Optional[str] = None,
download_status: Optional['outputs.SyncSessionStatusResponse'] = None,
last_updated_timestamp: Optional[str] = None,
offline_data_transfer_status: Optional[str] = None,
upload_health: Optional[str] = None,
upload_status: Optional['outputs.SyncSessionStatusResponse'] = None):
"""
ServerEndpoint Health object.
:param str combined_health: Combined Health Status.
:param 'SyncProgressStatusResponse' current_progress: Current progress
:param str download_health: Download Health Status.
:param 'SyncSessionStatusResponse' download_status: Download Status
:param str last_updated_timestamp: Last Updated Timestamp
:param str offline_data_transfer_status: Offline Data Transfer State
:param str upload_health: Upload Health Status.
:param 'SyncSessionStatusResponse' upload_status: Upload Status
"""
if combined_health is not None:
pulumi.set(__self__, "combined_health", combined_health)
if current_progress is not None:
pulumi.set(__self__, "current_progress", current_progress)
if download_health is not None:
pulumi.set(__self__, "download_health", download_health)
if download_status is not None:
pulumi.set(__self__, "download_status", download_status)
if last_updated_timestamp is not None:
pulumi.set(__self__, "last_updated_timestamp", last_updated_timestamp)
if offline_data_transfer_status is not None:
pulumi.set(__self__, "offline_data_transfer_status", offline_data_transfer_status)
if upload_health is not None:
pulumi.set(__self__, "upload_health", upload_health)
if upload_status is not None:
pulumi.set(__self__, "upload_status", upload_status)
@property
@pulumi.getter(name="combinedHealth")
def combined_health(self) -> Optional[str]:
"""
Combined Health Status.
"""
return pulumi.get(self, "combined_health")
@property
@pulumi.getter(name="currentProgress")
def current_progress(self) -> Optional['outputs.SyncProgressStatusResponse']:
"""
Current progress
"""
return pulumi.get(self, "current_progress")
@property
@pulumi.getter(name="downloadHealth")
def download_health(self) -> Optional[str]:
"""
Download Health Status.
"""
return pulumi.get(self, "download_health")
@property
@pulumi.getter(name="downloadStatus")
def download_status(self) -> Optional['outputs.SyncSessionStatusResponse']:
"""
Download Status
"""
return pulumi.get(self, "download_status")
@property
@pulumi.getter(name="lastUpdatedTimestamp")
def last_updated_timestamp(self) -> Optional[str]:
"""
Last Updated Timestamp
"""
return pulumi.get(self, "last_updated_timestamp")
@property
@pulumi.getter(name="offlineDataTransferStatus")
def offline_data_transfer_status(self) -> Optional[str]:
"""
Offline Data Transfer State
"""
return pulumi.get(self, "offline_data_transfer_status")
@property
@pulumi.getter(name="uploadHealth")
def upload_health(self) -> Optional[str]:
"""
Upload Health Status.
"""
return pulumi.get(self, "upload_health")
@property
@pulumi.getter(name="uploadStatus")
def upload_status(self) -> Optional['outputs.SyncSessionStatusResponse']:
"""
Upload Status
"""
return pulumi.get(self, "upload_status")
@pulumi.output_type
class SyncProgressStatusResponse(dict):
"""
Sync Session status object.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "appliedBytes":
suggest = "applied_bytes"
elif key == "appliedItemCount":
suggest = "applied_item_count"
elif key == "perItemErrorCount":
suggest = "per_item_error_count"
elif key == "progressTimestamp":
suggest = "progress_timestamp"
elif key == "syncDirection":
suggest = "sync_direction"
elif key == "totalBytes":
suggest = "total_bytes"
elif key == "totalItemCount":
suggest = "total_item_count"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SyncProgressStatusResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SyncProgressStatusResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SyncProgressStatusResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
applied_bytes: Optional[int] = None,
applied_item_count: Optional[int] = None,
per_item_error_count: Optional[int] = None,
progress_timestamp: Optional[str] = None,
sync_direction: Optional[str] = None,
total_bytes: Optional[int] = None,
total_item_count: Optional[int] = None):
"""
Sync Session status object.
:param int applied_bytes: Applied bytes
:param int applied_item_count: Applied item count.
:param int per_item_error_count: Per item error count
:param str progress_timestamp: Progress timestamp
:param str sync_direction: Sync direction.
:param int total_bytes: Total bytes
:param int total_item_count: Total item count
"""
if applied_bytes is not None:
pulumi.set(__self__, "applied_bytes", applied_bytes)
if applied_item_count is not None:
pulumi.set(__self__, "applied_item_count", applied_item_count)
if per_item_error_count is not None:
pulumi.set(__self__, "per_item_error_count", per_item_error_count)
if progress_timestamp is not None:
pulumi.set(__self__, "progress_timestamp", progress_timestamp)
if sync_direction is not None:
pulumi.set(__self__, "sync_direction", sync_direction)
if total_bytes is not None:
pulumi.set(__self__, "total_bytes", total_bytes)
if total_item_count is not None:
pulumi.set(__self__, "total_item_count", total_item_count)
@property
@pulumi.getter(name="appliedBytes")
def applied_bytes(self) -> Optional[int]:
"""
Applied bytes
"""
return pulumi.get(self, "applied_bytes")
@property
@pulumi.getter(name="appliedItemCount")
def applied_item_count(self) -> Optional[int]:
"""
Applied item count.
"""
return pulumi.get(self, "applied_item_count")
@property
@pulumi.getter(name="perItemErrorCount")
def per_item_error_count(self) -> Optional[int]:
"""
Per item error count
"""
return pulumi.get(self, "per_item_error_count")
@property
@pulumi.getter(name="progressTimestamp")
def progress_timestamp(self) -> Optional[str]:
"""
Progress timestamp
"""
return pulumi.get(self, "progress_timestamp")
@property
@pulumi.getter(name="syncDirection")
def sync_direction(self) -> Optional[str]:
"""
Sync direction.
"""
return pulumi.get(self, "sync_direction")
@property
@pulumi.getter(name="totalBytes")
def total_bytes(self) -> Optional[int]:
"""
Total bytes
"""
return pulumi.get(self, "total_bytes")
@property
@pulumi.getter(name="totalItemCount")
def total_item_count(self) -> Optional[int]:
"""
Total item count
"""
return pulumi.get(self, "total_item_count")
@pulumi.output_type
class SyncSessionStatusResponse(dict):
"""
Sync Session status object.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "lastSyncPerItemErrorCount":
suggest = "last_sync_per_item_error_count"
elif key == "lastSyncResult":
suggest = "last_sync_result"
elif key == "lastSyncSuccessTimestamp":
suggest = "last_sync_success_timestamp"
elif key == "lastSyncTimestamp":
suggest = "last_sync_timestamp"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SyncSessionStatusResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SyncSessionStatusResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SyncSessionStatusResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
last_sync_per_item_error_count: Optional[int] = None,
last_sync_result: Optional[int] = None,
last_sync_success_timestamp: Optional[str] = None,
last_sync_timestamp: Optional[str] = None):
"""
Sync Session status object.
:param int last_sync_per_item_error_count: Last sync per item error count.
:param int last_sync_result: Last sync status
:param str last_sync_success_timestamp: Last sync success timestamp
:param str last_sync_timestamp: Last sync timestamp
"""
if last_sync_per_item_error_count is not None:
pulumi.set(__self__, "last_sync_per_item_error_count", last_sync_per_item_error_count)
if last_sync_result is not None:
pulumi.set(__self__, "last_sync_result", last_sync_result)
if last_sync_success_timestamp is not None:
pulumi.set(__self__, "last_sync_success_timestamp", last_sync_success_timestamp)
if last_sync_timestamp is not None:
pulumi.set(__self__, "last_sync_timestamp", last_sync_timestamp)
@property
@pulumi.getter(name="lastSyncPerItemErrorCount")
def last_sync_per_item_error_count(self) -> Optional[int]:
"""
Last sync per item error count.
"""
return pulumi.get(self, "last_sync_per_item_error_count")
@property
@pulumi.getter(name="lastSyncResult")
def last_sync_result(self) -> Optional[int]:
"""
Last sync status
"""
return pulumi.get(self, "last_sync_result")
@property
@pulumi.getter(name="lastSyncSuccessTimestamp")
def last_sync_success_timestamp(self) -> Optional[str]:
"""
Last sync success timestamp
"""
return pulumi.get(self, "last_sync_success_timestamp")
@property
@pulumi.getter(name="lastSyncTimestamp")
def last_sync_timestamp(self) -> Optional[str]:
"""
Last sync timestamp
"""
return pulumi.get(self, "last_sync_timestamp")
| 36.107735
| 148
| 0.638054
|
a25d11821c38846ee768a2752b2b0b45ace29696
| 5,527
|
py
|
Python
|
src/pretix/presale/views/__init__.py
|
alainrk/pretix
|
867a8132aa1ed73dd9513efae5b3c46b5bbae140
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-08-31T13:16:55.000Z
|
2021-08-31T13:16:55.000Z
|
src/pretix/presale/views/__init__.py
|
alainrk/pretix
|
867a8132aa1ed73dd9513efae5b3c46b5bbae140
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/pretix/presale/views/__init__.py
|
alainrk/pretix
|
867a8132aa1ed73dd9513efae5b3c46b5bbae140
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
from datetime import timedelta
from decimal import Decimal
from itertools import groupby
from django.db.models import Sum
from django.utils.functional import cached_property
from django.utils.timezone import now
from pretix.base.decimal import round_decimal
from pretix.base.models import CartPosition, OrderPosition
from pretix.base.signals import register_payment_providers
class CartMixin:
@cached_property
def positions(self):
"""
A list of this users cart position
"""
return list(get_cart(self.request))
def get_cart(self, answers=False, queryset=None, payment_fee=None, payment_fee_tax_rate=None, downloads=False):
if queryset:
prefetch = []
if answers:
prefetch.append('item__questions')
prefetch.append('answers')
cartpos = queryset.order_by(
'item', 'variation'
).select_related(
'item', 'variation'
).prefetch_related(
*prefetch
)
else:
cartpos = self.positions
# Group items of the same variation
# We do this by list manipulations instead of a GROUP BY query, as
# Django is unable to join related models in a .values() query
def keyfunc(pos):
if isinstance(pos, OrderPosition):
i = pos.positionid
else:
i = pos.pk
if downloads:
return i, pos.pk, 0, 0, 0, 0,
if answers and ((pos.item.admission and self.request.event.settings.attendee_names_asked)
or pos.item.questions.all()):
return i, pos.pk, 0, 0, 0, 0,
return 0, 0, pos.item_id, pos.variation_id, pos.price, (pos.voucher_id or 0)
positions = []
for k, g in groupby(sorted(list(cartpos), key=keyfunc), key=keyfunc):
g = list(g)
group = g[0]
group.count = len(g)
group.total = group.count * group.price
group.net_total = group.count * group.net_price
group.has_questions = answers and k[0] != ""
if answers:
group.cache_answers()
positions.append(group)
total = sum(p.total for p in positions)
net_total = sum(p.net_total for p in positions)
tax_total = sum(p.total - p.net_total for p in positions)
payment_fee = payment_fee if payment_fee is not None else self.get_payment_fee(total)
payment_fee_tax_rate = round_decimal(payment_fee_tax_rate
if payment_fee_tax_rate is not None
else self.request.event.settings.tax_rate_default)
payment_fee_tax_value = round_decimal(payment_fee * (1 - 100 / (100 + payment_fee_tax_rate)))
payment_fee_net = payment_fee - payment_fee_tax_value
tax_total += payment_fee_tax_value
net_total += payment_fee_net
try:
first_expiry = min(p.expires for p in positions) if positions else now()
minutes_left = max(first_expiry - now(), timedelta()).seconds // 60
except AttributeError:
first_expiry = None
minutes_left = None
return {
'positions': positions,
'raw': cartpos,
'total': total + payment_fee,
'net_total': net_total,
'tax_total': tax_total,
'payment_fee': payment_fee,
'payment_fee_net': payment_fee_net,
'payment_fee_tax_rate': payment_fee_tax_rate,
'answers': answers,
'minutes_left': minutes_left,
'first_expiry': first_expiry
}
def get_payment_fee(self, total):
if total == 0:
return Decimal('0.00')
payment_fee = 0
if 'payment' in self.request.session:
responses = register_payment_providers.send(self.request.event)
for receiver, response in responses:
provider = response(self.request.event)
if provider.identifier == self.request.session['payment']:
payment_fee = provider.calculate_fee(total)
return payment_fee
def get_cart(request):
if not hasattr(request, '_cart_cache'):
request._cart_cache = CartPosition.objects.filter(
cart_id=request.session.session_key, event=request.event
).order_by(
'item', 'variation'
).select_related(
'item', 'variation'
).prefetch_related(
'item__questions', 'answers'
)
return request._cart_cache
def get_cart_total(request):
if not hasattr(request, '_cart_total_cache'):
if hasattr(request, '_cart_cache'):
request._cart_total_cache = sum(i.price for i in request._cart_cache)
else:
request._cart_total_cache = CartPosition.objects.filter(
cart_id=request.session.session_key, event=request.event
).aggregate(sum=Sum('price'))['sum']
return request._cart_total_cache
class EventViewMixin:
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['event'] = self.request.event
return context
class OrganizerViewMixin:
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['organizer'] = self.request.organizer
return context
| 36.846667
| 115
| 0.602135
|
235bed273204eb59147569f9fd38c176b07cf73b
| 419
|
py
|
Python
|
tests/checkpoint/test_torch_save.py
|
ECS-251-W2020/final-project-TorchFly
|
69f60b337c5dec0b1cd8315c194bc7891ba98d3a
|
[
"MIT"
] | null | null | null |
tests/checkpoint/test_torch_save.py
|
ECS-251-W2020/final-project-TorchFly
|
69f60b337c5dec0b1cd8315c194bc7891ba98d3a
|
[
"MIT"
] | 3
|
2021-06-08T21:07:12.000Z
|
2021-12-13T20:41:53.000Z
|
tests/checkpoint/test_torch_save.py
|
ECS-251-W2020/final-project-TorchFly
|
69f60b337c5dec0b1cd8315c194bc7891ba98d3a
|
[
"MIT"
] | 1
|
2020-02-19T00:53:21.000Z
|
2020-02-19T00:53:21.000Z
|
import os
import ray
import glob
import time
import torch
from transformers import AutoModel
import torchfly_dev
model = AutoModel.from_pretrained("roberta-large")
device = torch.device("cuda")
model = model.cuda()
for i in range(100):
start = time.time()
obj = torch.save(model.state_dict(), f"tmp.pth")
time.sleep(4)
end = time.time()
print(f"Time takes: {end-start-4}s")
time.sleep(100)
| 15.518519
| 52
| 0.696897
|
731365cf9349c4d0b77363752e5f06d5744e1255
| 1,344
|
py
|
Python
|
k8s_utils.py
|
b01901143/secret-script
|
ec71f2b4fbd91ff8d8f9d55824e001549f2237f1
|
[
"Apache-2.0"
] | 15
|
2020-07-21T14:32:30.000Z
|
2022-03-07T01:26:00.000Z
|
k8s_utils.py
|
b01901143/secret-script
|
ec71f2b4fbd91ff8d8f9d55824e001549f2237f1
|
[
"Apache-2.0"
] | 22
|
2020-07-17T21:47:00.000Z
|
2022-02-20T21:00:59.000Z
|
k8s_utils.py
|
b01901143/secret-script
|
ec71f2b4fbd91ff8d8f9d55824e001549f2237f1
|
[
"Apache-2.0"
] | 8
|
2020-07-21T00:24:20.000Z
|
2021-10-14T23:50:26.000Z
|
# k8s secret manager utilities
import os
import yaml
import subprocess
import base64
def k8sCreateSecret(secret_id, file):
literal = ""
with open(file) as f:
data = yaml.load_all(f, Loader=yaml.FullLoader)
for d in data:
for k,v in d.items():
literal += " --from-literal=" + str(k) +"=" + str(v)
os.system("kubectl create secret generic " + secret_id + literal)
def k8sDeleteSecret(secret_id):
os.system("kubectl delete secret " + secret_id)
def k8sUpdateSecret(secret_id, file):
literal = ""
with open(file) as f:
data = yaml.load_all(f, Loader=yaml.FullLoader)
for d in data:
for k,v in d.items():
literal += " --from-literal=" + str(k) +"=" + str(v)
os.system("kubectl create secret generic " + secret_id + literal + " --dry-run=client -o yaml | kubectl apply -f -")
def k8sAccessSecret(secret_id):
secret = {}
os.system("kubectl get secret " + secret_id + " -o yaml > " + "k8s_"+secret_id+".yaml")
with open("k8s_"+secret_id+".yaml") as f:
data = yaml.load_all(f, Loader=yaml.FullLoader)
for d in data:
for k,v in d.items():
if k!='data': continue
for k_,s_ in v.items():
secret[k_] = base64.b64decode(s_).decode("utf-8")
out = 'k8s_res.yaml'
with open(out, 'w') as outfile:
yaml.dump(secret, outfile, default_flow_style=False)
print("K8s secret:")
print(secret)
return out
| 28.595745
| 117
| 0.666667
|
d51217b4408546e61a273ec7300dd2e6e9f7272a
| 1,206
|
py
|
Python
|
varats-core/varats/project/varats_project.py
|
ajrox090/VaRA-Tool-Suite
|
1550d36a4049e0615afb0bacfb96b1d506a29c98
|
[
"BSD-2-Clause"
] | null | null | null |
varats-core/varats/project/varats_project.py
|
ajrox090/VaRA-Tool-Suite
|
1550d36a4049e0615afb0bacfb96b1d506a29c98
|
[
"BSD-2-Clause"
] | null | null | null |
varats-core/varats/project/varats_project.py
|
ajrox090/VaRA-Tool-Suite
|
1550d36a4049e0615afb0bacfb96b1d506a29c98
|
[
"BSD-2-Clause"
] | null | null | null |
"""
VaRA-TS project abstraction.
This module defines the main project abstractions for VaRA-TS that extend the
benchbuild interface with tool suite specific functions.
"""
import typing as tp
from abc import abstractmethod
import benchbuild as bb
from varats.project.project_domain import ProjectDomains
from varats.project.project_util import ProjectBinaryWrapper
from varats.utils.git_util import ShortCommitHash
class VProject(bb.Project): # type: ignore
"""VaRA-TS project abstraction, extending the interface which is required
from benchbuild."""
DOMAIN: ProjectDomains
@property
def binaries(self) -> tp.List[ProjectBinaryWrapper]:
"""Return a list of binaries generated by the project."""
return self.binaries_for_revision(self.version_of_primary)
@staticmethod
@abstractmethod
def binaries_for_revision(
revision: ShortCommitHash # pylint: disable=W0613
) -> tp.List[ProjectBinaryWrapper]:
"""
Return a list of binaries generated by the project, for the given
revision.
Args:
revision: to determine the binaries for
Returns:
list of project binaries
"""
| 28.046512
| 77
| 0.716418
|
de4c80ee81f1f6d8d83f3f378ca27a53c8f092d8
| 317
|
py
|
Python
|
examples/compose_example.py
|
emthanh/svg_utils
|
1ebd8e5a8cae067ed3f1d40939997eeed0a2d4fb
|
[
"MIT"
] | 195
|
2015-01-08T16:57:14.000Z
|
2022-03-08T10:08:01.000Z
|
examples/compose_example.py
|
emthanh/svg_utils
|
1ebd8e5a8cae067ed3f1d40939997eeed0a2d4fb
|
[
"MIT"
] | 61
|
2015-12-16T17:22:11.000Z
|
2022-03-07T02:03:30.000Z
|
examples/compose_example.py
|
emthanh/svg_utils
|
1ebd8e5a8cae067ed3f1d40939997eeed0a2d4fb
|
[
"MIT"
] | 58
|
2015-04-08T17:00:51.000Z
|
2022-02-27T20:06:13.000Z
|
#!/usr/bin/env python3
# coding=utf-8
from svgutils.compose import *
CONFIG["svg.file_path"] = "files"
CONFIG["image.file_path"] = "files"
Figure(
"10cm",
"5cm",
SVG("svg_logo.svg").scale(0.2),
Image(
120,
120,
"lion.jpeg",
).move(120, 0),
).save("compose_example.svg")
| 16.684211
| 35
| 0.574132
|
7eae16a91d91872b120a411811918d86ac692fbc
| 963
|
py
|
Python
|
plugins/agent-installer/worker_installer/tests/mock-sudo-plugin/setup.py
|
konradxyz/dev_fileserver
|
2c57520c447fc4bfda78668df575431be5c39276
|
[
"Apache-2.0"
] | null | null | null |
plugins/agent-installer/worker_installer/tests/mock-sudo-plugin/setup.py
|
konradxyz/dev_fileserver
|
2c57520c447fc4bfda78668df575431be5c39276
|
[
"Apache-2.0"
] | null | null | null |
plugins/agent-installer/worker_installer/tests/mock-sudo-plugin/setup.py
|
konradxyz/dev_fileserver
|
2c57520c447fc4bfda78668df575431be5c39276
|
[
"Apache-2.0"
] | null | null | null |
#########
# Copyright (c) 2014 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
from setuptools import setup
setup(
name='mock-sudo-plugin',
version='3.3a3',
author='idanmo',
author_email='idan@gigaspaces.com',
packages=['sudo_plugin'],
license='LICENSE',
description='Plugin for running simple bash scripts',
install_requires=[
"cloudify-plugins-common"
]
)
| 31.064516
| 77
| 0.712357
|
2b708f0758f5a6ff55a0a3d263c10c9eea4e7251
| 1,311
|
py
|
Python
|
authentication.py
|
alexanderluiscampino/ESTsoft-Data-Statistics-by-Period
|
74949ec3de214a04ffd97ed484bfe27addee1bff
|
[
"MIT"
] | null | null | null |
authentication.py
|
alexanderluiscampino/ESTsoft-Data-Statistics-by-Period
|
74949ec3de214a04ffd97ed484bfe27addee1bff
|
[
"MIT"
] | null | null | null |
authentication.py
|
alexanderluiscampino/ESTsoft-Data-Statistics-by-Period
|
74949ec3de214a04ffd97ed484bfe27addee1bff
|
[
"MIT"
] | null | null | null |
import os, sys
cwd = os.getcwd()
def readAuthenticationFile(page):
"""
# Reads Authentication data from file authenticate.txt
# params: @page - website to retrieve configs from
# returns: @payload to be used with HTTP requests library
"""
filename = "authenticate.txt"
data = [] # Temp File to store data
payload = {} # Return payload with authentication fields
with open(os.path.join(cwd, filename)) as authFile:
for line in authFile:
if line.find('##') == -1: #if it is not a comment line
if line.find('*{}'.format(page)) > -1: # If header matches
read = True
while read: # read next lines after the header
data = authFile.readline() # Read next line
# Creates condition to stop when hits empty line or EOF
read = not(data == '' or data =='\n')
data = data.split('\n')[0].replace(' ','').split(':')
if not read: # Leaves file reading if condition is met
break
payload.update({data[0] : data[1]})
return payload
if __name__ == '__main__':
print(readAuthenticationFile('Billing'))
| 42.290323
| 81
| 0.529367
|
131bf93fd118a4bbf7dfcd44fe3e621d6045409a
| 3,466
|
py
|
Python
|
tests/test_service.py
|
ArdanaCLM/ardana-service
|
680099101de36fce893ef6d138b75d778f92f182
|
[
"Apache-2.0"
] | 1
|
2017-07-20T01:04:21.000Z
|
2017-07-20T01:04:21.000Z
|
tests/test_service.py
|
GarySmith/ardana-service
|
680099101de36fce893ef6d138b75d778f92f182
|
[
"Apache-2.0"
] | null | null | null |
tests/test_service.py
|
GarySmith/ardana-service
|
680099101de36fce893ef6d138b75d778f92f182
|
[
"Apache-2.0"
] | 2
|
2017-08-02T17:18:47.000Z
|
2019-10-11T23:48:58.000Z
|
# (c) Copyright 2017-2018 SUSE LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import os
from oslo_config import cfg
from oslo_config import fixture as oslo_fixture
from oslo_serialization import jsonutils
import testtools
from ardana_service.service import bp
from flask import Flask
app = Flask(__name__)
app.register_blueprint(bp)
class TestServiceFiles(testtools.TestCase):
def setUp(self):
super(TestServiceFiles, self).setUp()
self.TEST_DATA_DIR = os.path.join(os.path.dirname(__file__),
'test_data')
self.conf = self.useFixture(oslo_fixture.Config(cfg.CONF))
@mock.patch('os.walk')
def test_get_all_files(self, mock_os_walk):
self.conf.config(group='paths', config_dir='/root')
mock_os_walk.return_value = \
[('/root', ['dir1'], []), ('/root/dir1', [], ['test.j2'])]
myapp = app.test_client()
x = myapp.get('/api/v2/service/files')
y = jsonutils.loads(x.data)
self.assertEqual(len(y), 1)
self.assertEqual(y[0]['files'][0], 'test.j2')
@mock.patch('os.walk')
def test_get_all_files_subdir(self, mock_os_walk):
self.conf.config(group='paths', config_dir='/root')
mock_os_walk.return_value = \
[('/root', ['dir1'], []), ('/root/dir1', ['subdir'], []),
('/root/dir1/subdir', [], ['test.j2'])]
myapp = app.test_client()
x = myapp.get('/api/v2/service/files')
y = jsonutils.loads(x.data)
self.assertEqual(len(y), 1)
self.assertEqual(y[0]['files'][0], 'subdir/test.j2')
@mock.patch('os.walk')
def test_get_all_files_emptydir(self, mock_os_walk):
self.conf.config(group='paths', config_dir='/root')
mock_os_walk.return_value = \
[('/root', ['dir1'], []), ('/root/dir1', [], [])]
myapp = app.test_client()
x = myapp.get('/api/v2/service/files')
y = jsonutils.loads(x.data)
self.assertEqual(len(y), 0)
def test_get_a_file(self):
self.conf.config(group='paths',
config_dir=self.TEST_DATA_DIR + '/service_files/')
myapp = app.test_client()
x = myapp.get('/api/v2/service/files/testservice/test.j2')
y = jsonutils.loads(x.data)
content = 'log_config_append={{ cinder_api_conf_dir }}' + \
'/api-logging.conf'
self.assertTrue(y.find(content))
def test_post_a_file(self):
self.conf.config(group='paths',
config_dir=self.TEST_DATA_DIR + '/service_files/')
myapp = app.test_client()
x = myapp.get('/api/v2/service/files/testservice/test.j2')
y = jsonutils.loads(x.data)
result = myapp.post(
'/api/v2/service/files/testservice/test.j2',
data=jsonutils.dumps(y),
content_type='application/json')
self.assertTrue(str(result).find('200'))
| 36.104167
| 75
| 0.621754
|
752dec82678180b9710fc4c6e2809bb362af5883
| 1,718
|
py
|
Python
|
allauth/socialaccount/providers/__init__.py
|
christopherpoland/django-allauth
|
df78274669a87f5c1a9147843b353353230d1940
|
[
"MIT"
] | 2
|
2020-04-19T20:13:46.000Z
|
2022-03-11T09:48:53.000Z
|
allauth/socialaccount/providers/__init__.py
|
CZZLEGEND/django-allauth
|
ef0774318993c2f7757df6ca2b962bc0df1d5674
|
[
"MIT"
] | 11
|
2021-04-08T19:05:56.000Z
|
2022-03-12T00:10:25.000Z
|
allauth/socialaccount/providers/__init__.py
|
safwanrahman/django-allauth
|
dac31fdac2c13a97829e6879e09431c735abab4e
|
[
"MIT"
] | 17
|
2020-03-03T08:42:17.000Z
|
2020-10-03T16:08:49.000Z
|
import importlib
from collections import OrderedDict
from django.conf import settings
class ProviderRegistry(object):
def __init__(self):
self.provider_map = OrderedDict()
self.loaded = False
def get_list(self, request=None):
self.load()
return [
provider_cls(request)
for provider_cls in self.provider_map.values()]
def register(self, cls):
self.provider_map[cls.id] = cls
def by_id(self, id, request=None):
self.load()
return self.provider_map[id](request=request)
def as_choices(self):
self.load()
for provider_cls in self.provider_map.values():
yield (provider_cls.id, provider_cls.name)
def load(self):
# TODO: Providers register with the provider registry when
# loaded. Here, we build the URLs for all registered providers. So, we
# really need to be sure all providers did register, which is why we're
# forcefully importing the `provider` modules here. The overall
# mechanism is way to magical and depends on the import order et al, so
# all of this really needs to be revisited.
if not self.loaded:
for app in settings.INSTALLED_APPS:
try:
provider_module = importlib.import_module(
app + '.provider'
)
except ImportError:
pass
else:
for cls in getattr(
provider_module, 'provider_classes', []
):
self.register(cls)
self.loaded = True
registry = ProviderRegistry()
| 31.814815
| 79
| 0.579162
|
e7b5a979efcd5b616bf343d5f67bf2f6b3fba3dc
| 1,855
|
py
|
Python
|
userbot/plugins/zip.py
|
gamerfuckerofficial/userbot
|
887332ab492a2deb6152257f0c169a895234eb7b
|
[
"MIT"
] | 3
|
2020-09-04T09:34:51.000Z
|
2020-09-04T09:39:26.000Z
|
userbot/plugins/zip.py
|
gamerfuckerofficial/userbot
|
887332ab492a2deb6152257f0c169a895234eb7b
|
[
"MIT"
] | null | null | null |
userbot/plugins/zip.py
|
gamerfuckerofficial/userbot
|
887332ab492a2deb6152257f0c169a895234eb7b
|
[
"MIT"
] | null | null | null |
""" command: .compress """
from telethon import events
import asyncio
import zipfile
from pySmartDL import SmartDL
from datetime import datetime
import os
from uniborg.util import admin_cmd, humanbytes, progress, time_formatter
@borg.on(admin_cmd("compress"))
async def _(event):
if event.fwd_from:
return
if not event.is_reply:
await event.edit("Reply to a file to compress it.")
return
mone = await event.edit("Processing ...")
if not os.path.isdir(Config.TMP_DOWNLOAD_DIRECTORY):
os.makedirs(Config.TMP_DOWNLOAD_DIRECTORY)
if event.reply_to_msg_id:
reply_message = await event.get_reply_message()
try:
c_time = time.time()
downloaded_file_name = await borg.download_media(
reply_message,
Config.TMP_DOWNLOAD_DIRECTORY,
progress_callback=lambda d, t: asyncio.get_event_loop().create_task(
progress(d, t, mone, c_time, "trying to download")
)
)
directory_name = downloaded_file_name
await event.edit(downloaded_file_name)
except Exception as e: # pylint:disable=C0103,W0703
await mone.edit(str(e))
zipfile.ZipFile(directory_name + '.zip', 'w', zipfile.ZIP_DEFLATED).write(directory_name)
await borg.send_file(
event.chat_id,
directory_name + ".zip",
caption="Zipped By SnapDragon",
force_document=True,
allow_cache=False,
reply_to=event.message.id,
)
await event.edit("DONE!!!")
await asyncio.sleep(7)
await event.delete()
def zipdir(path, ziph):
# ziph is zipfile handle
for root, dirs, files in os.walk(path):
for file in files:
ziph.write(os.path.join(root, file))
os.remove(os.path.join(root, file))
| 32.54386
| 93
| 0.636119
|
b7e96d34789adeb1edca8e8cd0c68063111db49f
| 500
|
py
|
Python
|
coupon/migrations/0002_auto_20181110_0015.py
|
oereo/Hackton_8th_test
|
786bc804f07d9b3b3c1e7785e70b83c570f37450
|
[
"MIT"
] | 3
|
2020-09-22T00:24:17.000Z
|
2020-10-11T03:44:57.000Z
|
coupon/migrations/0002_auto_20181110_0015.py
|
oereo/Hackton_8th_test
|
786bc804f07d9b3b3c1e7785e70b83c570f37450
|
[
"MIT"
] | 11
|
2020-09-21T19:04:39.000Z
|
2020-10-19T18:08:14.000Z
|
coupon/migrations/0002_auto_20181110_0015.py
|
oereo/Hackton_8th_test
|
786bc804f07d9b3b3c1e7785e70b83c570f37450
|
[
"MIT"
] | 1
|
2020-09-22T00:24:23.000Z
|
2020-09-22T00:24:23.000Z
|
# Generated by Django 2.1.3 on 2018-11-10 00:15
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('coupon', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='coupon',
name='amount',
field=models.IntegerField(validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(100000)]),
),
]
| 25
| 146
| 0.654
|
07cb79e3bd27a674831ae87b7d11cb2bda8f426a
| 760
|
py
|
Python
|
src/schema/question/response_example/delete.py
|
week-with-me/fastapi-mongodb
|
c1b319a5f93a4a5c9c7800506fd7a1313de38ac1
|
[
"MIT"
] | 1
|
2021-12-21T15:01:28.000Z
|
2021-12-21T15:01:28.000Z
|
src/schema/question/response_example/delete.py
|
week-with-me/fastapi-mongodb
|
c1b319a5f93a4a5c9c7800506fd7a1313de38ac1
|
[
"MIT"
] | null | null | null |
src/schema/question/response_example/delete.py
|
week-with-me/fastapi-mongodb
|
c1b319a5f93a4a5c9c7800506fd7a1313de38ac1
|
[
"MIT"
] | null | null | null |
from src.schema.response import DeleteResponseModel, ErrorResponseModel
delete_response_example = {
"200": {
"model": DeleteResponseModel,
"description": "성공",
"content": {"application/json": {"example": {"detail": "Success"}}},
},
"400": {
"model": ErrorResponseModel,
"description": "유효하지 않은 형태의 ObjectId 요청",
"content": {
"application/json": {
"example": {"detail": "ObjectId 1234 is Invalid"}
}
},
},
"404": {
"model": ErrorResponseModel,
"description": "존재하지 않는 엔티티",
"content": {
"application/json": {
"example": {"detail": "ObjectId 1234 Not Found"}
}
},
},
}
| 27.142857
| 76
| 0.502632
|
43a99b9358c8bd59db632bbdb8facdc86f1d5126
| 4,790
|
py
|
Python
|
tests/unit/form_tests/test_query.py
|
campagnola/pynwb
|
b3f1034909ac4462378e79d0e438dc5b803e5fbf
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
tests/unit/form_tests/test_query.py
|
campagnola/pynwb
|
b3f1034909ac4462378e79d0e438dc5b803e5fbf
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
tests/unit/form_tests/test_query.py
|
campagnola/pynwb
|
b3f1034909ac4462378e79d0e438dc5b803e5fbf
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
import unittest2 as unittest
import os
from h5py import File
import numpy as np
from pynwb.form.query import FORMDataset, Query
from pynwb.form.array import SortedArray, LinSpace
class AbstractQueryTest(unittest.TestCase):
def getDataset(self):
raise unittest.SkipTest('getDataset must be implemented')
def setUp(self):
self.dset = self.getDataset()
self.wrapper = FORMDataset(self.dset) # noqa: F405
def test_get_dataset(self):
array = self.wrapper.dataset
self.assertIsInstance(array, SortedArray) # noqa: F405
def test___gt__(self):
'''
Test wrapper greater than magic method
'''
q = self.wrapper > 5
self.assertIsInstance(q, Query) # noqa: F405
result = q.evaluate()
expected = [False, False, False, False, False,
False, True, True, True, True]
expected = slice(6, 10)
self.assertEqual(result, expected)
def test___ge__(self):
'''
Test wrapper greater than or equal magic method
'''
q = self.wrapper >= 5
self.assertIsInstance(q, Query) # noqa: F405
result = q.evaluate()
expected = [False, False, False, False, False,
True, True, True, True, True]
expected = slice(5, 10)
self.assertEqual(result, expected)
def test___lt__(self):
'''
Test wrapper less than magic method
'''
q = self.wrapper < 5
self.assertIsInstance(q, Query) # noqa: F405
result = q.evaluate()
expected = [True, True, True, True, True,
False, False, False, False, False]
expected = slice(0, 5)
self.assertEqual(result, expected)
def test___le__(self):
'''
Test wrapper less than or equal magic method
'''
q = self.wrapper <= 5
self.assertIsInstance(q, Query) # noqa: F405
result = q.evaluate()
expected = [True, True, True, True, True,
True, False, False, False, False]
expected = slice(0, 6)
self.assertEqual(result, expected)
def test___eq__(self):
'''
Test wrapper equals magic method
'''
q = self.wrapper == 5
self.assertIsInstance(q, Query) # noqa: F405
result = q.evaluate()
expected = [False, False, False, False, False,
True, False, False, False, False]
expected = 5
self.assertTrue(np.array_equal(result, expected))
def test___ne__(self):
'''
Test wrapper not equal magic method
'''
q = self.wrapper != 5
self.assertIsInstance(q, Query) # noqa: F405
result = q.evaluate()
expected = [True, True, True, True, True,
False, True, True, True, True]
expected = [slice(0, 5), slice(6, 10)]
self.assertTrue(np.array_equal(result, expected))
def test___getitem__(self):
'''
Test wrapper getitem using slice
'''
result = self.wrapper[0:5]
expected = [0, 1, 2, 3, 4]
self.assertTrue(np.array_equal(result, expected))
def test___getitem__query(self):
'''
Test wrapper getitem using query
'''
q = self.wrapper < 5
result = self.wrapper[q]
expected = [0, 1, 2, 3, 4]
self.assertTrue(np.array_equal(result, expected))
class SortedQueryTest(AbstractQueryTest):
path = 'SortedQueryTest.h5'
def getDataset(self):
self.f = File(self.path, 'w')
self.input = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
self.d = self.f.create_dataset('dset', data=self.input)
return SortedArray(self.d) # noqa: F405
def tearDown(self):
self.f.close()
if os.path.exists(self.path):
os.remove(self.path)
class LinspaceQueryTest(AbstractQueryTest):
path = 'LinspaceQueryTest.h5'
def getDataset(self):
return LinSpace(0, 10, 1) # noqa: F405
class CompoundQueryTest(unittest.TestCase):
def getM(self):
return SortedArray(np.arange(10, 20, 1))
def getN(self):
return SortedArray(np.arange(10.0, 20.0, 0.5))
def setUp(self):
self.m = FORMDataset(self.getM())
self.n = FORMDataset(self.getN())
@unittest.skip('not implemented')
def test_map(self):
q = self.m == (12, 16) # IN operation
q.evaluate() # [2,3,4,5]
q.evaluate(False) # RangeResult(2,6)
r = self.m[q] # noqa: F841
r = self.m[q.evaluate()] # noqa: F841
r = self.m[q.evaluate(False)] # noqa: F841
def tearDown(self):
pass
| 29.9375
| 65
| 0.561169
|
cdb89eff064f321e499370ce569ad221646eb697
| 526
|
py
|
Python
|
app/logic/benchmark/migrations/0004_auto_20151209_2112.py
|
imvu/bluesteel
|
ab52133249a693b3cd2d8593c5d47408a3b0fce6
|
[
"MIT"
] | 10
|
2017-01-13T06:28:04.000Z
|
2020-11-18T13:00:26.000Z
|
app/logic/benchmark/migrations/0004_auto_20151209_2112.py
|
imvu/bluesteel
|
ab52133249a693b3cd2d8593c5d47408a3b0fce6
|
[
"MIT"
] | null | null | null |
app/logic/benchmark/migrations/0004_auto_20151209_2112.py
|
imvu/bluesteel
|
ab52133249a693b3cd2d8593c5d47408a3b0fce6
|
[
"MIT"
] | 2
|
2018-03-29T14:10:53.000Z
|
2019-11-20T08:21:57.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('benchmark', '0003_benchmarkexecutionentry_worker'),
]
operations = [
migrations.AlterField(
model_name='benchmarkexecutionentry',
name='worker',
field=models.ForeignKey(related_name='benchmark_exec_worker', to='bluesteelworker.WorkerEntry'),
preserve_default=True,
),
]
| 25.047619
| 108
| 0.653992
|
39497b8f007f11d431b7b01bb4ecdeed424cb154
| 1,902
|
py
|
Python
|
examples/sparkles.py
|
krishols/designer
|
8f80b3309802d16d8577280274e0c9fa02db306b
|
[
"MIT"
] | 1
|
2022-01-05T19:51:13.000Z
|
2022-01-05T19:51:13.000Z
|
examples/sparkles.py
|
krishols/designer
|
8f80b3309802d16d8577280274e0c9fa02db306b
|
[
"MIT"
] | 24
|
2021-08-13T17:09:15.000Z
|
2022-01-05T16:12:09.000Z
|
examples/sparkles.py
|
krishols/designer
|
8f80b3309802d16d8577280274e0c9fa02db306b
|
[
"MIT"
] | null | null | null |
from designer import *
import math
import random
DECELERATION = .5
Spark = {'image': DesignerObject, 'velocity': float}
World = {'sparks': [Spark]}
def create_world() -> World:
return {
'sparks': []
}
def create_spark(x: int, y: int) -> Spark:
color = random.choice(['red', 'yellow', 'orange', 'blue', 'green', 'purple'])
spark = circle(color, 5)
# Partially transparent
spark['alpha'] = .5
# Rotate randomly 360 degrees
spark['angle'] = random.randint(0, 360)
# Move to the mouse location
spark['x'] = x
spark['y'] = y
# Random velocity between 7 and 10
velocity = random.randint(7, 10)
return {'image': spark, 'velocity': velocity}
def make_sparks(world: World):
# Get the current mouse x/y
x = get_mouse_x()
y = get_mouse_y()
# Create spark at that location
world['sparks'].append(create_spark(x, y))
def move_sparks(world: World):
# For each spark
for spark in world['sparks']:
# Calculate their angle in radians (0 to 2pi)
angle = math.radians(spark['image']['angle'])
# Get the velocity
velocity = spark['velocity']
# Increase X by cosine of the angle times velocity
spark['image']['x'] += math.cos(angle) * velocity
# Decrease Y by sine of the angle times velocity
spark['image']['y'] -= math.sin(angle) * velocity
# Decrease velocity by the deceleration amount
spark['velocity'] -= DECELERATION
def delete_stopped_sparks(world: World):
kept = []
for spark in world['sparks']:
# Is the spark still moving?
if spark['velocity'] > 0:
# Keep this spark
kept.append(spark)
# Update sparks list
world['sparks'] = kept
when('starting', create_world)
when('updating', make_sparks)
when('updating', move_sparks)
when('updating', delete_stopped_sparks)
start()
| 26.416667
| 81
| 0.619874
|
e4c28559c9681fffadf76b260e0eeb6981958cb9
| 625
|
py
|
Python
|
hknweb/elections/views.py
|
anthonymaltsev/hknweb
|
7490fbb166208ba14463445ff0149292ca3c37ba
|
[
"MIT"
] | 20
|
2018-01-07T02:15:43.000Z
|
2021-09-15T04:25:50.000Z
|
hknweb/elections/views.py
|
anthonymaltsev/hknweb
|
7490fbb166208ba14463445ff0149292ca3c37ba
|
[
"MIT"
] | 292
|
2018-02-01T18:31:18.000Z
|
2022-03-30T22:15:08.000Z
|
hknweb/elections/views.py
|
anthonymaltsev/hknweb
|
7490fbb166208ba14463445ff0149292ca3c37ba
|
[
"MIT"
] | 85
|
2017-11-13T06:33:13.000Z
|
2022-03-30T20:32:55.000Z
|
from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.template import loader
from django.contrib import messages
from .forms import OfficerForm
def index(request):
form = OfficerForm(request.POST or None)
if request.method == "POST":
if form.is_valid():
form.assignGroups()
messages.info(request, "New Officers have been added")
return redirect("/elections")
else:
return render(request, "elections/index.html", {"form": OfficerForm(None)})
return render(request, "elections/index.html", {"form": form})
| 31.25
| 87
| 0.68
|
4619c98b023f8f23b710ae13f135d2ecde3432f8
| 19,056
|
py
|
Python
|
test/acceptance/features/steps/steps.py
|
DuncanDoyle/service-binding-operator
|
e7594230afd28fb178bdd31c6d7a2d055af003a3
|
[
"Apache-2.0"
] | null | null | null |
test/acceptance/features/steps/steps.py
|
DuncanDoyle/service-binding-operator
|
e7594230afd28fb178bdd31c6d7a2d055af003a3
|
[
"Apache-2.0"
] | null | null | null |
test/acceptance/features/steps/steps.py
|
DuncanDoyle/service-binding-operator
|
e7594230afd28fb178bdd31c6d7a2d055af003a3
|
[
"Apache-2.0"
] | null | null | null |
# @mark.steps
# ----------------------------------------------------------------------------
# STEPS:
# ----------------------------------------------------------------------------
import ipaddress
import json
import os
import re
import time
import polling2
import parse
import binascii
from behave import given, register_type, then, when, step
from dboperator import DbOperator
from etcdcluster import EtcdCluster
from etcdoperator import EtcdOperator
from knative_serving import KnativeServing
from namespace import Namespace
from nodejs_application import NodeJSApp
from openshift import Openshift
from postgres_db import PostgresDB
from quarkus_application import QuarkusApplication
from quarkus_s2i_builder_image import QuarkusS2IBuilderImage
from serverless_operator import ServerlessOperator
from service_binding import ServiceBinding
from servicebindingoperator import Servicebindingoperator
# STEP
@given(u'Namespace "{namespace_name}" is used')
def given_namespace_is_used(context, namespace_name):
namespace = Namespace(namespace_name)
if not namespace.is_present():
print("Namespace is not present, creating namespace: {}...".format(namespace_name))
assert namespace.create(), f"Unable to create namespace '{namespace_name}'"
print("Namespace {} is created!!!".format(namespace_name))
context.namespace = namespace
# STEP
@given(u'Namespace [{namespace_env}] is used')
def given_namespace_from_env_is_used(context, namespace_env):
env = os.getenv(namespace_env)
assert env is not None, f"{namespace_env} environment variable needs to be set"
print(f"{namespace_env} = {env}")
given_namespace_is_used(context, env)
# STEP
sbo_is_running_in_namespace_step = u'Service Binding Operator is running in "{operator_namespace}" namespace'
@given(sbo_is_running_in_namespace_step)
@when(sbo_is_running_in_namespace_step)
def sbo_is_running_in_namespace(context, operator_namespace):
"""
Checks if the SBO is up and running in the given namespace
"""
sb_operator = Servicebindingoperator(namespace=operator_namespace)
assert sb_operator.is_running(), "Service Binding Operator is not running"
print("Service binding operator is running!!!")
# STEP
sbo_is_running_in_namespace_from_env_step = u'Service Binding Operator is running in [{operator_namespace_env}] namespace'
@given(sbo_is_running_in_namespace_from_env_step)
@when(sbo_is_running_in_namespace_from_env_step)
def sbo_is_running_in_namespace_from_env(context, operator_namespace_env):
env = os.getenv(operator_namespace_env)
assert env is not None, f"{operator_namespace_env} environment variable needs to be set"
print(f"{operator_namespace_env} = {env}")
sbo_is_running_in_namespace(context, env)
# STEP
sbo_is_running_step = u'Service Binding Operator is running'
@given(sbo_is_running_step)
@when(sbo_is_running_step)
def sbo_is_running(context):
if "sbo_namespace" in context:
sbo_is_running_in_namespace(context, context.sbo_namespace)
else:
assert context.namespace is not None, "Namespace is not set in context"
sbo_is_running_in_namespace(context, context.namespace.name)
# STEP
@given(u'PostgreSQL DB operator is installed')
def given_db_operator_is_installed(context):
db_operator = DbOperator()
if not db_operator.is_running():
print("DB operator is not installed, installing...")
assert db_operator.install_catalog_source(), "Unable to install DB catalog source"
assert db_operator.install_operator_subscription(), "Unable to install DB operator subscription"
assert db_operator.is_running(wait=True), "Unable to launch DB operator"
print("PostgresSQL DB operator is running!!!")
# STEP
nodejs_app_imported_from_image_is_running_step = u'Nodejs application "{application_name}" imported from "{application_image}" image is running'
@given(nodejs_app_imported_from_image_is_running_step)
@when(nodejs_app_imported_from_image_is_running_step)
def nodejs_app_imported_from_image_is_running(context, application_name, application_image):
namespace = context.namespace
application = NodeJSApp(application_name, namespace.name, application_image)
if not application.is_running():
print("application is not running, trying to import it")
assert application.install(), f"Unable to install application '{application_name}' from image '{application_image}'"
assert application.is_running(wait=True), f"Unable to start application '{application_name}' from image '{application_image}'"
print("Nodejs application is running!!!")
context.application = application
context.application_type = "nodejs"
# STEP
app_endpoint_is_available_step = u'Application endpoint "{endpoint}" is available'
@given(app_endpoint_is_available_step)
@when(app_endpoint_is_available_step)
@then(app_endpoint_is_available_step)
def app_endpoint_is_available(context, endpoint):
application = context.application
assert application.get_response_from_api(endpoint=endpoint) is not None, f'Application endpoint "{endpoint}" is not available'
# STEP
default_nodejs_app_imported_from_image_is_running_step = u'Imported Nodejs application "{application_name}" is running'
@given(default_nodejs_app_imported_from_image_is_running_step)
@when(default_nodejs_app_imported_from_image_is_running_step)
def default_nodejs_app_imported_from_image_is_running(context, application_name):
nodejs_app_imported_from_image_is_running(context, application_name, application_image="quay.io/pmacik/nodejs-rest-http-crud")
app_endpoint_is_available(context, endpoint="/api/status/dbNameCM")
# STEP
imported_nodejs_app_is_not_running_step = u'Imported Nodejs application "{application_name}" is not running'
@given(imported_nodejs_app_is_not_running_step)
@when(imported_nodejs_app_is_not_running_step)
def imported_nodejs_app_is_not_running(context, application_name):
namespace = context.namespace
application = NodeJSApp(application_name, namespace.name)
assert application.is_running() is False, "Application is running already"
# STEP
db_instance_is_running_step = u'DB "{db_name}" is running'
@given(db_instance_is_running_step)
@when(db_instance_is_running_step)
def db_instance_is_running(context, db_name):
namespace = context.namespace
db = PostgresDB(db_name, namespace.name)
if not db.is_running():
assert db.create(), f"Unable to create DB '{db_name}'"
assert db.is_running(wait=True), f"Unable to launch DB '{db_name}'"
print(f"DB {db_name} is running!!!")
# STEP
sbr_is_applied_step = u'Service Binding is applied'
@given(sbr_is_applied_step)
@when(sbr_is_applied_step)
@then(sbr_is_applied_step)
def sbr_is_applied(context):
sbr_yaml = context.text
metadata_name = re.sub(r'.*: ', '', re.search(r'name: .*', sbr_yaml).group(0))
context.sbr_name = metadata_name
sbr = ServiceBinding()
if "application" in context and "application_type" in context:
application = context.application
if context.application_type == "nodejs":
context.application_original_generation = application.get_observed_generation()
context.application_original_pod_name = application.get_running_pod_name()
elif context.application_type == "knative":
context.application_original_generation = context.application.get_generation()
else:
assert False, f"Invalid application type in context.application_type={context.application_type}, valid are 'nodejs', 'knative'"
assert sbr.create(sbr_yaml) is not None, "Service binding not created"
# STEP
@then(u'application should be re-deployed')
def then_application_redeployed(context):
application = context.application
if context.application_type == "nodejs":
application_pod_name = application.get_redeployed_pod_name(context.application_original_pod_name)
assert application_pod_name is not None, "There is no running application pod different from the original one before re-deployment."
elif context.application_type == "knative":
assert context.application_original_generation is not None, "application is never deployed"
application_rev_name = application.get_rev_name_redeployed_by_generation(context.application_original_generation)
assert application_rev_name is not None, "application is not redeployed"
else:
assert False, f"Invalid application type in context.application_type={context.application_type}, valid are 'nodejs', 'knative'"
# STEP
@then(u'application should be connected to the DB "{db_name}"')
def then_app_is_connected_to_db(context, db_name):
application = context.application
db_endpoint = "/api/status/dbNameCM"
app_db_name = application.get_response_from_api(wait=True, endpoint=db_endpoint)
assert app_db_name == db_name, f"Unexpected response from API ('{db_endpoint}'): '{app_db_name}'. Expected is '{db_name}'"
# STEP
@then(u'jsonpath "{json_path}" of Service Binding "{sbr_name}" should be changed to "{json_value_regex}"')
def then_sbo_jsonpath_is(context, json_path, sbr_name, json_value_regex):
openshift = Openshift()
assert openshift.search_resource_in_namespace(
"servicebindings", sbr_name, context.namespace.name) is not None, f"Service Binding '{sbr_name}' does not exist in namespace '{context.namespace.name}'"
result = openshift.get_resource_info_by_jsonpath("sbr", sbr_name, context.namespace.name, json_path, wait=True, timeout=600)
assert result is not None, f"Invalid result for SBO jsonpath: {result}."
assert re.fullmatch(json_value_regex, result) is not None, f"SBO jsonpath result \"{result}\" does not match \"{json_value_regex}\""
# STEP
@step(u'jq "{jq_expression}" of Service Binding "{sbr_name}" should be changed to "{json_value}"')
def sbo_jq_is(context, jq_expression, sbr_name, json_value):
openshift = Openshift()
polling2.poll(lambda: json.loads(
openshift.get_resource_info_by_jq("servicebinding", sbr_name, context.namespace.name, jq_expression,
wait=False)) == json_value, step=5, timeout=800,
ignore_exceptions=(json.JSONDecodeError,))
@given(u'Openshift Serverless Operator is running')
def given_serverless_operator_is_running(context):
"""
Checks if the serverless operator is up and running
"""
serverless_operator = ServerlessOperator()
if not serverless_operator.is_running():
print("Serverless operator is not installed, installing...")
assert serverless_operator.install_operator_subscription() is True, "serverless operator subscription is not installed"
assert serverless_operator.is_running(wait=True) is True, "serverless operator not installed"
context.serverless_operator = serverless_operator
@given(u'Quarkus s2i builder image is present')
def given_quarkus_builder_image_is_present(context):
"""
Checks if quarkus s2i builder image is present
"""
builder_image = QuarkusS2IBuilderImage()
if not builder_image.is_present():
print("Builder image is not present, importing and patching...")
assert builder_image.import_and_patch() is True, "Quarkus image import from image registry and patch failed"
assert builder_image.is_present() is True, "Quarkus image is not present"
@given(u'Knative serving is running')
def given_knative_serving_is_running(context):
"""
creates a KnativeServing object to install Knative Serving using the OpenShift Serverless Operator.
"""
knative_namespace = Namespace("knative-serving")
assert knative_namespace.create() is True, "Knative serving namespace not created"
assert Namespace(context.namespace.name).switch_to() is True, "Unable to switch to the context namespace"
knative_serving = KnativeServing(namespace=knative_namespace.name)
if not knative_serving.is_present():
print("knative serving is not present, create knative serving")
assert knative_serving.create() is True, "Knative serving is not created"
assert knative_serving.is_present() is True, "Knative serving is not present"
# STEP
quarkus_app_is_imported_step = u'Quarkus application "{application_name}" is imported as Knative service'
@given(quarkus_app_is_imported_step)
@when(quarkus_app_is_imported_step)
def quarkus_app_is_imported_as_knative_service(context, application_name):
namespace = context.namespace
application = QuarkusApplication(application_name, namespace.name)
if not application.is_imported():
print("application is not imported, trying to import it")
assert application.install() is True, "Quarkus application is not installed"
assert application.is_imported(wait=True) is True, "Quarkus application is not imported"
context.application = application
context.application_type = "knative"
# STEP
@then(u'"{app_name}" deployment must contain SBR name "{sbr_name}"')
def then_envFrom_contains(context, app_name, sbr_name):
time.sleep(60)
openshift = Openshift()
result = openshift.get_deployment_envFrom_info(app_name, context.namespace.name)
# Expected result from 'oc' (openshift client) v4.5
expected_result_oc_45 = f'secretRef:map[name:{sbr_name}]'
# Expected result from 'oc' (openshift client) v4.6+
expected_result_oc_46 = f'{{"secretRef":{{"name":"{sbr_name}"}}}}'
assert re.search(re.escape(expected_result_oc_45), result) is not None or re.search(re.escape(expected_result_oc_46), result) is not None, \
f'\n{app_name} deployment should contain secretRef: {sbr_name} \nActual secretRef: {result}'
# STEP
@then(u'deployment must contain intermediate secret "{intermediate_secret_name}"')
def envFrom_contains_intermediate_secret_name(context, intermediate_secret_name):
assert context.application.get_deployment_with_intermediate_secret(
intermediate_secret_name) is not None, f"There is no deployment with intermediate secret {intermediate_secret_name}"
# STEP
@given(u'OLM Operator "{backend_service}" is running')
def operator_manifest_installed(context, backend_service):
openshift = Openshift()
_ = openshift.oc_apply_yaml_file(os.path.join(os.getcwd(), "test/acceptance/resources/", backend_service + ".operator.manifest.yaml"))
@parse.with_pattern(r'.*')
def parse_nullable_string(text):
return text
register_type(NullableString=parse_nullable_string)
# STEP
@step(u'Secret "{secret_name}" contains "{secret_key}" key with value "{secret_value:NullableString}"')
def check_secret_key_value(context, secret_name, secret_key, secret_value):
openshift = Openshift()
json_path = f'{{.data.{secret_key}}}'
polling2.poll(lambda: openshift.get_resource_info_by_jsonpath("secrets", secret_name, context.namespace.name,
json_path) == secret_value,
step=5, timeout=120, ignore_exceptions=(binascii.Error,))
# STEP
@then(u'Secret "{secret_name}" contains "{secret_key}" key with dynamic IP addess as the value')
def check_secret_key_with_ip_value(context, secret_name, secret_key):
openshift = Openshift()
json_path = f'{{.data.{secret_key}}}'
polling2.poll(lambda: ipaddress.ip_address(
openshift.get_resource_info_by_jsonpath("secrets", secret_name, context.namespace.name, json_path)),
step=5, timeout=120, ignore_exceptions=(ValueError,))
# STEP
@given(u'Backend service CSV is installed')
@given(u'The Custom Resource Definition is present')
@given(u'The Custom Resource is present')
@when(u'The Custom Resource is present')
@given(u'The ConfigMap is present')
@given(u'The Secret is present')
def apply_yaml(context):
openshift = Openshift()
yaml = context.text
metadata_name = re.sub(r'.*: ', '', re.search(r'name: .*', yaml).group(0))
output = openshift.oc_apply(yaml)
result = re.search(rf'.*{metadata_name}.*(created|unchanged|configured)', output)
assert result is not None, f"Unable to apply YAML for CR '{metadata_name}': {output}"
@then(u'Secret "{secret_ref}" has been injected in to CR "{cr_name}" of kind "{crd_name}" at path "{json_path}"')
def verify_injected_secretRef(context, secret_ref, cr_name, crd_name, json_path):
openshift = Openshift()
polling2.poll(lambda: openshift.get_resource_info_by_jsonpath(crd_name, cr_name, context.namespace.name, json_path) == secret_ref,
step=5, timeout=400)
@given(u'Etcd operator running')
def etcd_operator_is_running(context):
"""
Checks if the etcd operator is up and running
"""
etcd_operator = EtcdOperator()
if not etcd_operator.is_running():
print("Etcd operator is not installed, installing...")
assert etcd_operator.install_operator_subscription() is True, "etcd operator subscription is not installed"
assert etcd_operator.is_running(wait=True) is True, "etcd operator not installed"
context.etcd_operator = etcd_operator
@given(u'Etcd cluster "{etcd_name}" is running')
def etc_cluster_is_running(context, etcd_name):
"""
Checks if the etcd cluster is created
"""
namespace = context.namespace
etcd_cluster = EtcdCluster(etcd_name, namespace.name)
if not etcd_cluster.is_present():
print("etcd cluster not present, creating etcd cluster")
assert etcd_cluster.create() is True, "etcd cluster is not created"
assert etcd_cluster.is_present() is True, "etcd cluster is not present"
@when(u'Invalid Service Binding is applied')
def invalid_sbr_is_applied(context):
sbr = ServiceBinding()
# Get resource version of sbr if sbr is available
if "sbr_name" in context:
json_path = "{.metadata.resourceVersion}"
rv = sbr.get_servicebinding_info_by_jsonpath(context.sbr_name, context.namespace.name, json_path)
context.resource_version = rv
context.expected_error = sbr.attempt_to_create(context.text)
@then(u'Error message "{err_msg}" is thrown')
def validate_error(context, err_msg):
search = re.search(rf'.*{err_msg}.*', context.expected_error)
assert search is not None, f"Actual error: '{context.expected_error}', Expected error: '{err_msg}'"
@then(u'Service Binding "{sb_name}" is not persistent in the cluster')
def validate_absent_sb(context, sb_name):
openshift = Openshift()
output = openshift.search_resource_in_namespace("servicebindings", sb_name, context.namespace.name)
assert output is None, f"Service Binding {sb_name} is present in namespace '{context.namespace.name}'"
@then(u'Service Binding "{sb_name}" is not updated')
def validate_persistent_sb(context, sb_name):
openshift = Openshift()
json_path = "{.metadata.resourceVersion}"
output = openshift.get_resource_info_by_jsonpath("servicebindings", sb_name, context.namespace.name, json_path)
if output == context.resource_version:
assert True
else:
assert False, "Service Binding got updated"
| 43.407745
| 160
| 0.750892
|
4d03ca7b7cce8da2eb3ab978c3e6c5324f2d28e8
| 2,626
|
py
|
Python
|
path-finding/yolov5-rpi/yolov5_tflite_image_inference.py
|
sa-y-an/open-source-autonomous-vehicle-controller
|
0cc415fb141d1b66ac45a7bf6b50add6814728fb
|
[
"MIT"
] | 3
|
2021-06-15T05:10:00.000Z
|
2021-09-05T18:07:01.000Z
|
path-finding/yolov5-rpi/yolov5_tflite_image_inference.py
|
sa-y-an/open-source-autonomous-vehicle-controller
|
0cc415fb141d1b66ac45a7bf6b50add6814728fb
|
[
"MIT"
] | 1
|
2021-06-07T21:05:14.000Z
|
2021-06-07T21:05:14.000Z
|
path-finding/yolov5-rpi/yolov5_tflite_image_inference.py
|
sa-y-an/open-source-autonomous-vehicle-controller
|
0cc415fb141d1b66ac45a7bf6b50add6814728fb
|
[
"MIT"
] | 9
|
2021-06-10T08:42:53.000Z
|
2022-03-28T05:46:16.000Z
|
from yolov5_tflite_inference import yolov5_tflite
import argparse
import cv2
import time
from PIL import Image, ImageOps
import numpy as np
from utils import letterbox_image, scale_coords
def detect_image(weights,image_url,img_size,conf_thres,iou_thres):
start_time = time.time()
#image = cv2.imread(image_url)
image = Image.open(image_url)
original_size = image.size[:2]
size = (img_size,img_size)
image_resized = letterbox_image(image,size)
img = np.asarray(image)
#image = ImageOps.fit(image, size, Image.ANTIALIAS)
image_array = np.asarray(image_resized)
normalized_image_array = image_array.astype(np.float32) / 255.0
yolov5_tflite_obj = yolov5_tflite(weights,img_size,conf_thres,iou_thres)
result_boxes, result_scores, result_class_names = yolov5_tflite_obj.detect(normalized_image_array)
if len(result_boxes) > 0:
result_boxes = scale_coords(size,np.array(result_boxes),(original_size[1],original_size[0]))
font = cv2.FONT_HERSHEY_SIMPLEX
# org
org = (20, 40)
# fontScale
fontScale = 0.5
# Blue color in BGR
color = (0, 255, 0)
# Line thickness of 1 px
thickness = 1
for i,r in enumerate(result_boxes):
org = (int(r[0]),int(r[1]))
cv2.rectangle(img, (int(r[0]),int(r[1])), (int(r[2]),int(r[3])), (255,0,0), 1)
cv2.putText(img, str(int(100*result_scores[i])) + '% ' + str(result_class_names[i]), org, font,
fontScale, color, thickness, cv2.LINE_AA)
save_result_filepath = image_url.split('/')[-1].split('.')[0] + 'yolov5_output.jpg'
cv2.imwrite(save_result_filepath,img[:,:,::-1])
end_time = time.time()
print('FPS:',1/(end_time-start_time))
print('Total Time Taken:',end_time-start_time)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-w','--weights', type=str, default='yolov5s-fp16.tflite', help='model.tflite path(s)')
parser.add_argument('-i','--img_path', type=str, required=True, help='image path')
parser.add_argument('--img_size', type=int, default=416, help='image size')
parser.add_argument('--conf_thres', type=float, default=0.25, help='object confidence threshold')
parser.add_argument('--iou_thres', type=float, default=0.45, help='IOU threshold for NMS')
opt = parser.parse_args()
print(opt)
detect_image(opt.weights,opt.img_path,opt.img_size,opt.conf_thres,opt.iou_thres)
| 30.894118
| 111
| 0.642803
|
fa9a5030c071981317dd5365e6e67a4b53750cee
| 9,752
|
py
|
Python
|
docs/notebooks/expected_improvement.pct.py
|
uri-granta/trieste
|
94678eb6c68e26e0db60707de7f43855cf1e826e
|
[
"Apache-2.0"
] | 119
|
2020-10-06T16:27:05.000Z
|
2022-03-28T00:27:18.000Z
|
docs/notebooks/expected_improvement.pct.py
|
uri-granta/trieste
|
94678eb6c68e26e0db60707de7f43855cf1e826e
|
[
"Apache-2.0"
] | 275
|
2020-10-07T22:32:53.000Z
|
2022-03-31T15:57:44.000Z
|
docs/notebooks/expected_improvement.pct.py
|
uri-granta/trieste
|
94678eb6c68e26e0db60707de7f43855cf1e826e
|
[
"Apache-2.0"
] | 30
|
2020-10-08T23:00:01.000Z
|
2022-02-25T17:04:22.000Z
|
# %% [markdown]
# # Noise-free optimization with Expected Improvement
# %%
import numpy as np
import tensorflow as tf
np.random.seed(1793)
tf.random.set_seed(1793)
# %% [markdown]
# ## Describe the problem
# In this example, we look to find the minimum value of the two-dimensional Branin function over the hypercube $[0, 1]^2$. We can represent the search space using a `Box`, and plot contours of the Branin over this space.
#
#
# %%
from trieste.objectives import scaled_branin, SCALED_BRANIN_MINIMUM
from trieste.objectives.utils import mk_observer
from util.plotting_plotly import plot_function_plotly
from trieste.space import Box
search_space = Box([0, 0], [1, 1])
fig = plot_function_plotly(scaled_branin, search_space.lower, search_space.upper, grid_density=20)
fig.update_layout(height=400, width=400)
fig.show()
# %% [markdown]
# ## Sample the observer over the search space
#
# Sometimes we don't have direct access to the objective function. We only have an observer that indirectly observes it. In _Trieste_, an observer can output a number of datasets. In our case, we only have one dataset, the objective. We can convert a function with `branin`'s signature to a single-output observer using `mk_observer`.
#
# The optimization procedure will benefit from having some starting data from the objective function to base its search on. We sample a five point space-filling design from the search space and evaluate it with the observer. For continuous search spaces, Trieste supports random, Sobol and Halton initial designs.
# %%
import trieste
observer = trieste.objectives.utils.mk_observer(scaled_branin)
num_initial_points = 5
initial_query_points = search_space.sample_sobol(num_initial_points)
initial_data = observer(initial_query_points)
# %% [markdown]
# ## Model the objective function
#
# The Bayesian optimization procedure estimates the next best points to query by using a probabilistic model of the objective. We'll use Gaussian Process (GP) regression for this, as provided by GPflow. The model will need to be trained on each step as more points are evaluated, so we'll package it with GPflow's Scipy optimizer.
#
# We put priors on the parameters of our GP model's kernel in order to stabilize model fitting. We found the priors below to be highly effective for objective functions defined over the unit hypercube and with an ouput standardized to have zero mean and unit variance. For objective functions with different scaling, other priors will likely be more appropriate. Our fitted model uses the maximum a posteriori estimate of these kernel parameters, as found by optimizing the kernel parameters starting from the best of `num_kernel_samples` random samples from the kernel parameter priors.
#
# If we do not specify kernel priors, then Trieste returns the maximum likelihood estimate of the kernel parameters.
# %%
import gpflow
import tensorflow_probability as tfp
from trieste.models.gpflow import GPflowModelConfig
def build_model(data):
variance = tf.math.reduce_variance(data.observations)
kernel = gpflow.kernels.Matern52(variance=variance, lengthscales=[0.2, 0.2])
prior_scale = tf.cast(1.0, dtype=tf.float64)
kernel.variance.prior = tfp.distributions.LogNormal(tf.cast(-2.0, dtype=tf.float64), prior_scale)
kernel.lengthscales.prior = tfp.distributions.LogNormal(tf.math.log(kernel.lengthscales), prior_scale)
gpr = gpflow.models.GPR(data.astuple(), kernel, noise_variance=1e-5)
gpflow.set_trainable(gpr.likelihood, False)
return GPflowModelConfig(**{
"model": gpr,
"model_args": {
"num_kernel_samples": 100,
},
"optimizer": gpflow.optimizers.Scipy(),
"optimizer_args": {
"minimize_args": {"options": dict(maxiter=100)},
},
})
model = build_model(initial_data)
# %% [markdown]
# ## Run the optimization loop
#
# We can now run the Bayesian optimization loop by defining a `BayesianOptimizer` and calling its `optimize` method.
#
# The optimizer uses an acquisition rule to choose where in the search space to try on each optimization step. We'll use the default acquisition rule, which is Efficient Global Optimization with Expected Improvement.
#
# We'll run the optimizer for fifteen steps.
#
# The optimization loop catches errors so as not to lose progress, which means the optimization loop might not complete and the data from the last step may not exist. Here we'll handle this crudely by asking for the data regardless, using `.try_get_final_datasets()`, which will re-raise the error if one did occur. For a review of how to handle errors systematically, there is a [dedicated tutorial](recovering_from_errors.ipynb).
# %%
bo = trieste.bayesian_optimizer.BayesianOptimizer(observer, search_space)
result = bo.optimize(15, initial_data, model)
dataset = result.try_get_final_dataset()
# %% [markdown]
# ## Explore the results
#
# We can now get the best point found by the optimizer. Note this isn't necessarily the point that was last evaluated.
# %%
query_points = dataset.query_points.numpy()
observations = dataset.observations.numpy()
arg_min_idx = tf.squeeze(tf.argmin(observations, axis=0))
print(f"query point: {query_points[arg_min_idx, :]}")
print(f"observation: {observations[arg_min_idx, :]}")
# %% [markdown]
# We can visualise how the optimizer performed by plotting all the acquired observations, along with the true function values and optima, either in a two-dimensional contour plot ...
# %%
from util.plotting import plot_bo_points, plot_function_2d
_, ax = plot_function_2d(
scaled_branin, search_space.lower, search_space.upper, grid_density=30, contour=True
)
plot_bo_points(query_points, ax[0, 0], num_initial_points, arg_min_idx)
ax[0, 0].set_xlabel(r'$x_1$')
ax[0, 0].set_xlabel(r'$x_2$')
# %% [markdown]
# ... or as a three-dimensional plot
# %%
from util.plotting_plotly import add_bo_points_plotly
fig = plot_function_plotly(scaled_branin, search_space.lower, search_space.upper, grid_density=20)
fig.update_layout(height=500, width=500)
fig = add_bo_points_plotly(
x=query_points[:, 0],
y=query_points[:, 1],
z=observations[:, 0],
num_init=num_initial_points,
idx_best=arg_min_idx,
fig=fig,
)
fig.show()
# %% [markdown]
# We can also visualise the how each successive point compares the current best.
#
# We produce two plots. The left hand plot shows the observations (crosses and dots), the current best (orange line), and the start of the optimization loop (blue line). The right hand plot is the same as the previous two-dimensional contour plot, but without the resulting observations. The best point is shown in each (purple dot).
# %%
import matplotlib.pyplot as plt
from util.plotting import plot_regret
suboptimality = observations - SCALED_BRANIN_MINIMUM.numpy()
_, ax = plt.subplots(1, 2)
plot_regret(suboptimality, ax[0], num_init=num_initial_points, idx_best=arg_min_idx)
plot_bo_points(query_points, ax[1], num_init=num_initial_points, idx_best=arg_min_idx)
ax[0].set_yscale("log")
ax[0].set_ylabel("Regret")
ax[0].set_ylim(0.001, 100)
ax[0].set_xlabel("# evaluations")
# %% [markdown]
# We can visualise the model over the objective function by plotting the mean and 95% confidence intervals of its predictive distribution. Like with the data before, we can get the model with `.try_get_final_model()`.
# %%
from util.plotting_plotly import plot_gp_plotly
fig = plot_gp_plotly(
result.try_get_final_model().model, # type: ignore
search_space.lower,
search_space.upper,
grid_density=30,
)
fig = add_bo_points_plotly(
x=query_points[:, 0],
y=query_points[:, 1],
z=observations[:, 0],
num_init=num_initial_points,
idx_best=arg_min_idx,
fig=fig,
figrow=1,
figcol=1,
)
fig.show()
# %% [markdown]
# We can also inspect the model hyperparameters, and use the history to see how the length scales evolved over iterations. Note the history is saved at the *start* of each step, and as such never includes the final result, so we'll add that ourselves.
# %%
gpflow.utilities.print_summary(
result.try_get_final_model().model # type: ignore
)
variance_list = [
step.model.model.kernel.variance.numpy() # type: ignore
for step in result.history + [result.final_result.unwrap()]
]
ls_list = [
step.model.model.kernel.lengthscales.numpy() # type: ignore
for step in result.history + [result.final_result.unwrap()]
]
variance = np.array(variance_list)
ls = np.array(ls_list)
fig, ax = plt.subplots(1, 2)
ax[0].plot(variance, label="Kernel variance")
ax[0].legend(loc="upper left")
ax[0].set_xlabel("# Evaluations")
ax[0].set_xlabel("Parameter Value")
ax[1].plot(ls[:, 0], label="Kernel lengthscale 1")
ax[1].plot(ls[:, 1], label="Kernel lengthscale 2")
ax[1].legend(loc="upper left")
ax[1].set_xlabel("# Evaluations")
fig.tight_layout()
# %% [markdown]
# ## Run the optimizer for more steps
#
# If we need more iterations for better convergence, we can run the optimizer again using the data produced from the last run, as well as the model. We'll visualise the final data.
# %%
result = bo.optimize(5, result.try_get_final_dataset(), result.try_get_final_model())
dataset = result.try_get_final_dataset()
arg_min_idx = tf.squeeze(tf.argmin(dataset.observations, axis=0))
_, ax = plot_function_2d(
scaled_branin, search_space.lower, search_space.upper, grid_density=40, contour=True
)
plot_bo_points(
dataset.query_points.numpy(),
ax=ax[0, 0],
num_init=len(dataset.query_points),
idx_best=arg_min_idx,
)
ax[0, 0].set_xlabel(r'$x_1$')
ax[0, 0].set_xlabel(r'$x_2$')
# %% [markdown]
# ## LICENSE
#
# [Apache License 2.0](https://github.com/secondmind-labs/trieste/blob/develop/LICENSE)
| 38.85259
| 589
| 0.752153
|
d0f268d29a470527534beb3d22e49e7263f94e40
| 868
|
py
|
Python
|
reptile/pyn_reptile_douban_movie.py
|
LuckyYcj/Python
|
86eb55c868c31e589e5c8a3d71a69df446f71742
|
[
"CNRI-Python"
] | null | null | null |
reptile/pyn_reptile_douban_movie.py
|
LuckyYcj/Python
|
86eb55c868c31e589e5c8a3d71a69df446f71742
|
[
"CNRI-Python"
] | null | null | null |
reptile/pyn_reptile_douban_movie.py
|
LuckyYcj/Python
|
86eb55c868c31e589e5c8a3d71a69df446f71742
|
[
"CNRI-Python"
] | null | null | null |
#!/usr/bin/env python
# coding=gbk
# -*- coding : utf-8 -*-
import requests
from bs4 import BeautifulSoup
# pyquery
ua_headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.70 Safari/537.36"}
url = "https://movie.douban.com/top250"
response = requests.get(url, headers=ua_headers)
html = response.text
# print(html)
soup = BeautifulSoup(html, "html.parser") # lxml
# soup.find(condition)
# soup.find_all(condition)
# soup.select(css_selector)
movie_list = soup.find('ol', class_='grid_view')
movies = movie_list.find_all('li')
# print(movie_list)
# print(movies)
names = []
for movie in movies:
movie_name = movie.find('span', class_='title').get_text()
names.append(movie_name)
print(names)
with open('source/movies.txt', 'w') as f:
for name in names:
f.write(name+'\n')
| 26.30303
| 145
| 0.697005
|
40487a8d3ee3d812988d3a6e99a890931bd463d1
| 227
|
py
|
Python
|
meiduo_mall/meiduo_mall/apps/contents/views.py
|
15779500251/meiduo_project
|
f9beba0b71e6deb4d38c233e4f4bc957d89f1381
|
[
"MIT"
] | null | null | null |
meiduo_mall/meiduo_mall/apps/contents/views.py
|
15779500251/meiduo_project
|
f9beba0b71e6deb4d38c233e4f4bc957d89f1381
|
[
"MIT"
] | null | null | null |
meiduo_mall/meiduo_mall/apps/contents/views.py
|
15779500251/meiduo_project
|
f9beba0b71e6deb4d38c233e4f4bc957d89f1381
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
# Create your views here.
from django.views import View
class IndexView(View):
"""首页广告"""
def get(self,request):
"""提供广告页面"""
return render(request, 'index.html')
| 17.461538
| 44
| 0.651982
|
1f9aa2e80718129fe5345a29d96abb94da056aa4
| 167
|
py
|
Python
|
kpm_api/__init__.py
|
kevihiiin/kpm-api
|
65d14fa380b45dc9237a7f4460c8c3db9ba2be93
|
[
"MIT"
] | null | null | null |
kpm_api/__init__.py
|
kevihiiin/kpm-api
|
65d14fa380b45dc9237a7f4460c8c3db9ba2be93
|
[
"MIT"
] | null | null | null |
kpm_api/__init__.py
|
kevihiiin/kpm-api
|
65d14fa380b45dc9237a7f4460c8c3db9ba2be93
|
[
"MIT"
] | null | null | null |
"""Top-level package for KeyPathwayMiner Python API Library."""
__author__ = """Kevin Yuan"""
__email__ = 'kevihiiin@users.noreply.github.com '
__version__ = '0.1.0'
| 27.833333
| 63
| 0.724551
|
3df9b5800ce77bf520ba117514ca7a11d498842d
| 49,971
|
py
|
Python
|
test/python/transpiler/test_pass_scheduler.py
|
yeralin/qiskit-terra
|
251930a7b5d83af121ea0f3aafb33a54a1860e14
|
[
"Apache-2.0"
] | 1
|
2019-06-04T12:23:36.000Z
|
2019-06-04T12:23:36.000Z
|
test/python/transpiler/test_pass_scheduler.py
|
yeralin/qiskit-terra
|
251930a7b5d83af121ea0f3aafb33a54a1860e14
|
[
"Apache-2.0"
] | 35
|
2019-03-07T02:09:22.000Z
|
2022-03-22T19:55:15.000Z
|
test/python/transpiler/test_pass_scheduler.py
|
yeralin/qiskit-terra
|
251930a7b5d83af121ea0f3aafb33a54a1860e14
|
[
"Apache-2.0"
] | null | null | null |
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2018.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
# pylint: disable=invalid-name
"""Transpiler testing"""
import io
from logging import StreamHandler, getLogger
import unittest.mock
import sys
from qiskit import QuantumRegister, QuantumCircuit
from qiskit.transpiler import PassManager, TranspilerError
from qiskit.transpiler.runningpassmanager import DoWhileController, ConditionalController, \
FlowController
from qiskit.test import QiskitTestCase
from ._dummy_passes import (PassA_TP_NR_NP, PassB_TP_RA_PA, PassC_TP_RA_PA,
PassD_TP_NR_NP, PassE_AP_NR_NP, PassF_reduce_dag_property,
PassI_Bad_AP, PassJ_Bad_NoReturn,
PassK_check_fixed_point_property, PassM_AP_NR_NP)
class SchedulerTestCase(QiskitTestCase):
"""Asserts for the scheduler."""
def assertScheduler(self, circuit, passmanager, expected):
"""
Run `transpile(circuit, passmanager)` and check
if the passes run as expected.
Args:
circuit (QuantumCircuit): Circuit to transform via transpilation.
passmanager (PassManager): pass manager instance for the transpilation process
expected (list): List of things the passes are logging
"""
logger = 'LocalLogger'
with self.assertLogs(logger, level='INFO') as cm:
out = passmanager.run(circuit)
self.assertIsInstance(out, QuantumCircuit)
self.assertEqual([record.message for record in cm.records], expected)
def assertSchedulerRaises(self, circuit, passmanager, expected, exception_type):
"""
Run `transpile(circuit, passmanager)` and check
if the passes run as expected until exception_type is raised.
Args:
circuit (QuantumCircuit): Circuit to transform via transpilation
passmanager (PassManager): pass manager instance for the transpilation process
expected (list): List of things the passes are logging
exception_type (Exception): Exception that is expected to be raised.
"""
logger = 'LocalLogger'
with self.assertLogs(logger, level='INFO') as cm:
self.assertRaises(exception_type, passmanager.run, circuit)
self.assertEqual([record.message for record in cm.records], expected)
class TestPassManagerInit(SchedulerTestCase):
""" The pass manager sets things at init time."""
def test_passes(self):
""" A single chain of passes, with Requests and Preserves, at __init__ time"""
circuit = QuantumCircuit(QuantumRegister(1))
passmanager = PassManager(passes=[
PassC_TP_RA_PA(), # Request: PassA / Preserves: PassA
PassB_TP_RA_PA(), # Request: PassA / Preserves: PassA
PassD_TP_NR_NP(argument1=[1, 2]), # Requires: {}/ Preserves: {}
PassB_TP_RA_PA()])
self.assertScheduler(circuit, passmanager, ['run transformation pass PassA_TP_NR_NP',
'run transformation pass PassC_TP_RA_PA',
'run transformation pass PassB_TP_RA_PA',
'run transformation pass PassD_TP_NR_NP',
'argument [1, 2]',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassB_TP_RA_PA'])
class TestUseCases(SchedulerTestCase):
"""Combine passes in different ways and checks that passes are run
in the right order."""
def setUp(self):
self.circuit = QuantumCircuit(QuantumRegister(1))
self.passmanager = PassManager()
def test_chain(self):
"""A single chain of passes, with Requires and Preserves."""
self.passmanager.append(PassC_TP_RA_PA()) # Requires: PassA / Preserves: PassA
self.passmanager.append(PassB_TP_RA_PA()) # Requires: PassA / Preserves: PassA
self.passmanager.append(PassD_TP_NR_NP(argument1=[1, 2])) # Requires: {}/ Preserves: {}
self.passmanager.append(PassB_TP_RA_PA())
self.assertScheduler(self.circuit, self.passmanager,
['run transformation pass PassA_TP_NR_NP',
'run transformation pass PassC_TP_RA_PA',
'run transformation pass PassB_TP_RA_PA',
'run transformation pass PassD_TP_NR_NP',
'argument [1, 2]',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassB_TP_RA_PA'])
def test_conditional_passes_true(self):
"""A pass set with a conditional parameter. The callable is True."""
self.passmanager.append(PassE_AP_NR_NP(True))
self.passmanager.append(PassA_TP_NR_NP(),
condition=lambda property_set: property_set['property'])
self.assertScheduler(self.circuit, self.passmanager,
['run analysis pass PassE_AP_NR_NP',
'set property as True',
'run transformation pass PassA_TP_NR_NP'])
def test_conditional_passes_false(self):
"""A pass set with a conditional parameter. The callable is False."""
self.passmanager.append(PassE_AP_NR_NP(False))
self.passmanager.append(PassA_TP_NR_NP(),
condition=lambda property_set: property_set['property'])
self.assertScheduler(self.circuit, self.passmanager,
['run analysis pass PassE_AP_NR_NP',
'set property as False'])
def test_conditional_and_loop(self):
"""Run a conditional first, then a loop."""
self.passmanager.append(PassE_AP_NR_NP(True))
self.passmanager.append(
[PassK_check_fixed_point_property(),
PassA_TP_NR_NP(),
PassF_reduce_dag_property()],
do_while=lambda property_set: not property_set['property_fixed_point'],
condition=lambda property_set: property_set['property'])
self.assertScheduler(self.circuit, self.passmanager,
['run analysis pass PassE_AP_NR_NP',
'set property as True',
'run analysis pass PassG_calculates_dag_property',
'set property as 8 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 6',
'run analysis pass PassG_calculates_dag_property',
'set property as 6 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 5',
'run analysis pass PassG_calculates_dag_property',
'set property as 5 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 4',
'run analysis pass PassG_calculates_dag_property',
'set property as 4 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 3',
'run analysis pass PassG_calculates_dag_property',
'set property as 3 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2',
'run analysis pass PassG_calculates_dag_property',
'set property as 2 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2',
'run analysis pass PassG_calculates_dag_property',
'set property as 2 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2'])
def test_loop_and_conditional(self):
"""Run a loop first, then a conditional."""
FlowController.remove_flow_controller('condition')
FlowController.add_flow_controller('condition', ConditionalController)
self.passmanager.append(PassK_check_fixed_point_property())
self.passmanager.append(
[PassK_check_fixed_point_property(),
PassA_TP_NR_NP(),
PassF_reduce_dag_property()],
do_while=lambda property_set: not property_set['property_fixed_point'],
condition=lambda property_set: not property_set['property_fixed_point'])
self.assertScheduler(self.circuit, self.passmanager,
['run analysis pass PassG_calculates_dag_property',
'set property as 8 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 6',
'run analysis pass PassG_calculates_dag_property',
'set property as 6 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 5',
'run analysis pass PassG_calculates_dag_property',
'set property as 5 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 4',
'run analysis pass PassG_calculates_dag_property',
'set property as 4 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 3',
'run analysis pass PassG_calculates_dag_property',
'set property as 3 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2',
'run analysis pass PassG_calculates_dag_property',
'set property as 2 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2',
'run analysis pass PassG_calculates_dag_property',
'set property as 2 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2'])
def test_do_not_repeat_based_on_preservation(self):
"""When a pass is still a valid pass (because the following passes
preserved it), it should not run again."""
self.passmanager.append([PassB_TP_RA_PA(),
PassA_TP_NR_NP(),
PassB_TP_RA_PA()])
self.assertScheduler(self.circuit, self.passmanager,
['run transformation pass PassA_TP_NR_NP',
'run transformation pass PassB_TP_RA_PA'])
def test_do_not_repeat_based_on_idempotence(self):
"""Repetition can be optimized to a single execution when
the pass is idempotent."""
self.passmanager.append(PassA_TP_NR_NP())
self.passmanager.append([PassA_TP_NR_NP(), PassA_TP_NR_NP()])
self.passmanager.append(PassA_TP_NR_NP())
self.assertScheduler(self.circuit, self.passmanager,
['run transformation pass PassA_TP_NR_NP'])
def test_non_idempotent_pass(self):
"""Two or more runs of a non-idempotent pass cannot be optimized."""
self.passmanager.append(PassF_reduce_dag_property())
self.passmanager.append([PassF_reduce_dag_property(),
PassF_reduce_dag_property()])
self.passmanager.append(PassF_reduce_dag_property())
self.assertScheduler(self.circuit, self.passmanager,
['run transformation pass PassF_reduce_dag_property',
'dag property = 6',
'run transformation pass PassF_reduce_dag_property',
'dag property = 5',
'run transformation pass PassF_reduce_dag_property',
'dag property = 4',
'run transformation pass PassF_reduce_dag_property',
'dag property = 3'])
def test_fenced_dag(self):
"""Analysis passes are not allowed to modified the DAG."""
qr = QuantumRegister(2)
circ = QuantumCircuit(qr)
circ.cx(qr[0], qr[1])
circ.cx(qr[0], qr[1])
circ.cx(qr[1], qr[0])
circ.cx(qr[1], qr[0])
self.passmanager.append(PassI_Bad_AP())
self.assertSchedulerRaises(circ, self.passmanager,
['run analysis pass PassI_Bad_AP',
'cx_runs: {(4, 5, 6, 7)}'],
TranspilerError)
def test_analysis_pass_is_idempotent(self):
"""Analysis passes are idempotent."""
passmanager = PassManager()
passmanager.append(PassE_AP_NR_NP(argument1=1))
passmanager.append(PassE_AP_NR_NP(argument1=1))
self.assertScheduler(self.circuit, passmanager,
['run analysis pass PassE_AP_NR_NP',
'set property as 1'])
def test_ap_before_and_after_a_tp(self):
"""A default transformation does not preserves anything
and analysis passes need to be re-run"""
passmanager = PassManager()
passmanager.append(PassE_AP_NR_NP(argument1=1))
passmanager.append(PassA_TP_NR_NP())
passmanager.append(PassE_AP_NR_NP(argument1=1))
self.assertScheduler(self.circuit, passmanager,
['run analysis pass PassE_AP_NR_NP',
'set property as 1',
'run transformation pass PassA_TP_NR_NP',
'run analysis pass PassE_AP_NR_NP',
'set property as 1'])
def test_pass_no_return(self):
"""Transformation passes that don't return a DAG raise error."""
self.passmanager.append(PassJ_Bad_NoReturn())
self.assertSchedulerRaises(self.circuit, self.passmanager,
['run transformation pass PassJ_Bad_NoReturn'],
TranspilerError)
def test_fixed_point_pass(self):
"""A pass set with a do_while parameter that checks for a fixed point."""
self.passmanager.append(
[PassK_check_fixed_point_property(),
PassA_TP_NR_NP(),
PassF_reduce_dag_property()],
do_while=lambda property_set: not property_set['property_fixed_point'])
self.assertScheduler(self.circuit, self.passmanager,
['run analysis pass PassG_calculates_dag_property',
'set property as 8 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 6',
'run analysis pass PassG_calculates_dag_property',
'set property as 6 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 5',
'run analysis pass PassG_calculates_dag_property',
'set property as 5 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 4',
'run analysis pass PassG_calculates_dag_property',
'set property as 4 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 3',
'run analysis pass PassG_calculates_dag_property',
'set property as 3 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2',
'run analysis pass PassG_calculates_dag_property',
'set property as 2 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2',
'run analysis pass PassG_calculates_dag_property',
'set property as 2 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2'])
def test_fixed_point_pass_max_iteration(self):
"""A pass set with a do_while parameter that checks that
the max_iteration is raised."""
self.passmanager.append(
[PassK_check_fixed_point_property(),
PassA_TP_NR_NP(),
PassF_reduce_dag_property()],
do_while=lambda property_set: not property_set['property_fixed_point'],
max_iteration=2)
self.assertSchedulerRaises(self.circuit, self.passmanager,
['run analysis pass PassG_calculates_dag_property',
'set property as 8 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 6',
'run analysis pass PassG_calculates_dag_property',
'set property as 6 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 5'], TranspilerError)
def test_fresh_initial_state(self):
"""New construction gives fresh instance."""
self.passmanager.append(PassM_AP_NR_NP(argument1=1))
self.passmanager.append(PassA_TP_NR_NP())
self.passmanager.append(PassM_AP_NR_NP(argument1=1))
self.assertScheduler(self.circuit, self.passmanager,
['run analysis pass PassM_AP_NR_NP',
'self.argument1 = 2',
'run transformation pass PassA_TP_NR_NP',
'run analysis pass PassM_AP_NR_NP',
'self.argument1 = 2'])
class DoXTimesController(FlowController):
"""A control-flow plugin for running a set of passes an X amount of times."""
def __init__(self, passes, options, do_x_times=0, **_):
self.do_x_times = do_x_times()
super().__init__(passes, options)
def __iter__(self):
for _ in range(self.do_x_times):
yield from self.passes
class TestControlFlowPlugin(SchedulerTestCase):
"""Testing the control flow plugin system."""
def setUp(self):
self.passmanager = PassManager()
self.circuit = QuantumCircuit(QuantumRegister(1))
def test_control_flow_plugin(self):
"""Adds a control flow plugin with a single parameter and runs it."""
FlowController.add_flow_controller('do_x_times', DoXTimesController)
self.passmanager.append([PassB_TP_RA_PA(), PassC_TP_RA_PA()],
do_x_times=lambda x: 3)
self.assertScheduler(self.circuit, self.passmanager,
['run transformation pass PassA_TP_NR_NP',
'run transformation pass PassB_TP_RA_PA',
'run transformation pass PassC_TP_RA_PA',
'run transformation pass PassB_TP_RA_PA',
'run transformation pass PassC_TP_RA_PA',
'run transformation pass PassB_TP_RA_PA',
'run transformation pass PassC_TP_RA_PA'])
def test_callable_control_flow_plugin(self):
"""Removes do_while, then adds it back. Checks max_iteration still working."""
controllers_length = len(FlowController.registered_controllers)
FlowController.remove_flow_controller('do_while')
self.assertEqual(controllers_length - 1,
len(FlowController.registered_controllers))
FlowController.add_flow_controller('do_while', DoWhileController)
self.assertEqual(controllers_length, len(FlowController.registered_controllers))
self.passmanager.append([PassB_TP_RA_PA(), PassC_TP_RA_PA()],
do_while=lambda property_set: True, max_iteration=2)
self.assertSchedulerRaises(self.circuit, self.passmanager,
['run transformation pass PassA_TP_NR_NP',
'run transformation pass PassB_TP_RA_PA',
'run transformation pass PassC_TP_RA_PA',
'run transformation pass PassB_TP_RA_PA',
'run transformation pass PassC_TP_RA_PA'],
TranspilerError)
def test_remove_nonexistent_plugin(self):
"""Tries to remove a plugin that does not exist."""
self.assertRaises(KeyError, FlowController.remove_flow_controller, "foo")
def test_bad_conditional(self):
"""Flow controller are not allowed to modify the property set."""
def bad_condition(property_set):
property_set['property'] = 'forbidden write'
self.passmanager.append(PassA_TP_NR_NP(), condition=bad_condition)
self.assertRaises(TranspilerError, self.passmanager.run, self.circuit)
class TestDumpPasses(SchedulerTestCase):
"""Testing the passes method."""
def test_passes(self):
"""Dump passes in different FlowControllerLinear"""
passmanager = PassManager()
passmanager.append(PassC_TP_RA_PA())
passmanager.append(PassB_TP_RA_PA())
expected = [{'flow_controllers': {}, 'passes': [PassC_TP_RA_PA()]},
{'flow_controllers': {}, 'passes': [PassB_TP_RA_PA()]}]
self.assertEqual(expected, passmanager.passes())
def test_passes_in_linear(self):
"""Dump passes in the same FlowControllerLinear"""
passmanager = PassManager(passes=[
PassC_TP_RA_PA(),
PassB_TP_RA_PA(),
PassD_TP_NR_NP(argument1=[1, 2]),
PassB_TP_RA_PA()])
expected = [{'flow_controllers': {}, 'passes': [PassC_TP_RA_PA(),
PassB_TP_RA_PA(),
PassD_TP_NR_NP(argument1=[1, 2]),
PassB_TP_RA_PA()]}]
self.assertEqual(expected, passmanager.passes())
def test_control_flow_plugin(self):
"""Dump passes in a custom flow controller."""
passmanager = PassManager()
FlowController.add_flow_controller('do_x_times', DoXTimesController)
passmanager.append([PassB_TP_RA_PA(), PassC_TP_RA_PA()],
do_x_times=lambda x: 3)
expected = [{'passes': [PassB_TP_RA_PA(), PassC_TP_RA_PA()],
'flow_controllers': {'do_x_times'}}]
self.assertEqual(expected, passmanager.passes())
def test_conditional_and_loop(self):
"""Dump passes with a conditional and a loop."""
passmanager = PassManager()
passmanager.append(PassE_AP_NR_NP(True))
passmanager.append(
[PassK_check_fixed_point_property(),
PassA_TP_NR_NP(),
PassF_reduce_dag_property()],
do_while=lambda property_set: not property_set['property_fixed_point'],
condition=lambda property_set: property_set['property_fixed_point'])
expected = [{'passes': [PassE_AP_NR_NP(True)], 'flow_controllers': {}},
{'passes': [PassK_check_fixed_point_property(),
PassA_TP_NR_NP(),
PassF_reduce_dag_property()], 'flow_controllers': {'condition',
'do_while'}}]
self.assertEqual(expected, passmanager.passes())
class StreamHandlerRaiseException(StreamHandler):
"""Handler class that will raise an exception on formatting errors."""
def handleError(self, record):
raise sys.exc_info()
class TestLogPasses(QiskitTestCase):
"""Testing the log_passes option."""
def setUp(self):
logger = getLogger()
logger.setLevel('DEBUG')
self.output = io.StringIO()
logger.addHandler(StreamHandlerRaiseException(self.output))
self.circuit = QuantumCircuit(QuantumRegister(1))
def assertPassLog(self, passmanager, list_of_passes):
""" Runs the passmanager and checks that the elements in
passmanager.property_set['pass_log'] match list_of_passes (the names)."""
passmanager.run(self.circuit)
self.output.seek(0)
# Filter unrelated log lines
output_lines = self.output.readlines()
pass_log_lines = [x for x in output_lines if x.startswith('Pass:')]
for index, pass_name in enumerate(list_of_passes):
self.assertTrue(pass_log_lines[index].startswith(
'Pass: %s -' % pass_name))
def test_passes(self):
"""Dump passes in different FlowControllerLinear"""
passmanager = PassManager()
passmanager.append(PassC_TP_RA_PA())
passmanager.append(PassB_TP_RA_PA())
self.assertPassLog(passmanager, ['PassA_TP_NR_NP',
'PassC_TP_RA_PA',
'PassB_TP_RA_PA'])
def test_passes_in_linear(self):
"""Dump passes in the same FlowControllerLinear"""
passmanager = PassManager(passes=[
PassC_TP_RA_PA(),
PassB_TP_RA_PA(),
PassD_TP_NR_NP(argument1=[1, 2]),
PassB_TP_RA_PA()])
self.assertPassLog(passmanager, ['PassA_TP_NR_NP',
'PassC_TP_RA_PA',
'PassB_TP_RA_PA',
'PassD_TP_NR_NP',
'PassA_TP_NR_NP',
'PassB_TP_RA_PA'])
def test_control_flow_plugin(self):
""" Dump passes in a custom flow controller. """
passmanager = PassManager()
FlowController.add_flow_controller('do_x_times', DoXTimesController)
passmanager.append([PassB_TP_RA_PA(), PassC_TP_RA_PA()], do_x_times=lambda x: 3)
self.assertPassLog(passmanager, ['PassA_TP_NR_NP',
'PassB_TP_RA_PA',
'PassC_TP_RA_PA',
'PassB_TP_RA_PA',
'PassC_TP_RA_PA',
'PassB_TP_RA_PA',
'PassC_TP_RA_PA'])
def test_conditional_and_loop(self):
""" Dump passes with a conditional and a loop"""
passmanager = PassManager()
passmanager.append(PassE_AP_NR_NP(True))
passmanager.append(
[PassK_check_fixed_point_property(),
PassA_TP_NR_NP(),
PassF_reduce_dag_property()],
do_while=lambda property_set: not property_set['property_fixed_point'],
condition=lambda property_set: property_set['property_fixed_point'])
self.assertPassLog(passmanager, ['PassE_AP_NR_NP'])
class TestPassManagerReuse(SchedulerTestCase):
"""The PassManager instance should be reusable."""
def setUp(self):
self.passmanager = PassManager()
self.circuit = QuantumCircuit(QuantumRegister(1))
def test_chain_twice(self):
""" Run a chain twice."""
self.passmanager.append(PassC_TP_RA_PA()) # Request: PassA / Preserves: PassA
self.passmanager.append(PassB_TP_RA_PA()) # Request: PassA / Preserves: PassA
expected = ['run transformation pass PassA_TP_NR_NP',
'run transformation pass PassC_TP_RA_PA',
'run transformation pass PassB_TP_RA_PA']
self.assertScheduler(self.circuit, self.passmanager, expected)
self.assertScheduler(self.circuit, self.passmanager, expected)
def test_conditional_twice(self):
""" Run a conditional twice. """
self.passmanager.append(PassE_AP_NR_NP(True))
self.passmanager.append(PassA_TP_NR_NP(),
condition=lambda property_set: property_set['property'])
expected = ['run analysis pass PassE_AP_NR_NP',
'set property as True',
'run transformation pass PassA_TP_NR_NP']
self.assertScheduler(self.circuit, self.passmanager, expected)
self.assertScheduler(self.circuit, self.passmanager, expected)
def test_fixed_point_twice(self):
"""A fixed point scheduler, twice."""
self.passmanager.append(
[PassK_check_fixed_point_property(),
PassA_TP_NR_NP(),
PassF_reduce_dag_property()],
do_while=lambda property_set: not property_set['property_fixed_point'])
expected = ['run analysis pass PassG_calculates_dag_property',
'set property as 8 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 6',
'run analysis pass PassG_calculates_dag_property',
'set property as 6 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 5',
'run analysis pass PassG_calculates_dag_property',
'set property as 5 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 4',
'run analysis pass PassG_calculates_dag_property',
'set property as 4 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 3',
'run analysis pass PassG_calculates_dag_property',
'set property as 3 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2',
'run analysis pass PassG_calculates_dag_property',
'set property as 2 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2',
'run analysis pass PassG_calculates_dag_property',
'set property as 2 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2']
self.assertScheduler(self.circuit, self.passmanager, expected)
self.assertScheduler(self.circuit, self.passmanager, expected)
class TestPassManagerReplace(SchedulerTestCase):
"""Test PassManager.replace"""
def setUp(self):
self.passmanager = PassManager()
self.circuit = QuantumCircuit(QuantumRegister(1))
def test_replace0(self):
""" Test passmanager.replace(0, ...)."""
self.passmanager.append(PassC_TP_RA_PA()) # Request: PassA / Preserves: PassA
self.passmanager.append(PassB_TP_RA_PA()) # Request: PassA / Preserves: PassA
self.passmanager.replace(0, PassB_TP_RA_PA())
expected = ['run transformation pass PassA_TP_NR_NP',
'run transformation pass PassB_TP_RA_PA']
self.assertScheduler(self.circuit, self.passmanager, expected)
def test_replace1(self):
""" Test passmanager.replace(1, ...)."""
self.passmanager.append(PassC_TP_RA_PA()) # Request: PassA / Preserves: PassA
self.passmanager.append(PassB_TP_RA_PA()) # Request: PassA / Preserves: PassA
self.passmanager.replace(1, PassC_TP_RA_PA())
expected = ['run transformation pass PassA_TP_NR_NP',
'run transformation pass PassC_TP_RA_PA']
self.assertScheduler(self.circuit, self.passmanager, expected)
def test_setitem(self):
""" Test passmanager[1] = ..."""
self.passmanager.append(PassC_TP_RA_PA()) # Request: PassA / Preserves: PassA
self.passmanager.append(PassB_TP_RA_PA()) # Request: PassA / Preserves: PassA
self.passmanager[1] = PassC_TP_RA_PA()
expected = ['run transformation pass PassA_TP_NR_NP',
'run transformation pass PassC_TP_RA_PA']
self.assertScheduler(self.circuit, self.passmanager, expected)
def test_replace_with_conditional(self):
""" Replace a pass with a conditional pass. """
self.passmanager.append(PassE_AP_NR_NP(False))
self.passmanager.append(PassB_TP_RA_PA())
self.passmanager.replace(1, PassA_TP_NR_NP(),
condition=lambda property_set: property_set['property'])
expected = ['run analysis pass PassE_AP_NR_NP',
'set property as False']
self.assertScheduler(self.circuit, self.passmanager, expected)
def test_replace_error(self):
""" Replace a non-existing index. """
self.passmanager.append(PassB_TP_RA_PA())
with self.assertRaises(TranspilerError):
self.passmanager.replace(99, PassA_TP_NR_NP())
class TestPassManagerSlicing(SchedulerTestCase):
"""test PassManager slicing."""
def setUp(self):
self.passmanager = PassManager()
self.circuit = QuantumCircuit(QuantumRegister(1))
def test_empty_passmanager_length(self):
""" test len(PassManager) when PassManager is empty """
length = len(self.passmanager)
expected_length = 0
self.assertEqual(length, expected_length)
def test_passmanager_length(self):
""" test len(PassManager) when PassManager is not empty """
self.passmanager.append(PassA_TP_NR_NP())
self.passmanager.append(PassA_TP_NR_NP())
length = len(self.passmanager)
expected_length = 2
self.assertEqual(length, expected_length)
def test_accessing_passmanager_by_index(self):
""" test accessing PassManager's passes by index """
self.passmanager.append(PassB_TP_RA_PA())
self.passmanager.append(PassC_TP_RA_PA())
new_passmanager = self.passmanager[1]
expected = ['run transformation pass PassA_TP_NR_NP',
'run transformation pass PassC_TP_RA_PA']
self.assertScheduler(self.circuit, new_passmanager, expected)
def test_accessing_passmanager_by_index_with_condition(self):
""" test accessing PassManager's conditioned passes by index """
self.passmanager.append(PassF_reduce_dag_property())
self.passmanager.append(
[PassK_check_fixed_point_property(),
PassA_TP_NR_NP(),
PassF_reduce_dag_property()],
condition=lambda property_set: True,
do_while=lambda property_set: not property_set['property_fixed_point'])
new_passmanager = self.passmanager[1]
expected = ['run analysis pass PassG_calculates_dag_property',
'set property as 8 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 6',
'run analysis pass PassG_calculates_dag_property',
'set property as 6 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 5',
'run analysis pass PassG_calculates_dag_property',
'set property as 5 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 4',
'run analysis pass PassG_calculates_dag_property',
'set property as 4 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 3',
'run analysis pass PassG_calculates_dag_property',
'set property as 3 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2',
'run analysis pass PassG_calculates_dag_property',
'set property as 2 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2',
'run analysis pass PassG_calculates_dag_property',
'set property as 2 (from dag.property)',
'run analysis pass PassK_check_fixed_point_property',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassF_reduce_dag_property',
'dag property = 2']
self.assertScheduler(self.circuit, new_passmanager, expected)
def test_accessing_passmanager_by_range(self):
""" test accessing PassManager's passes by range """
self.passmanager.append(PassC_TP_RA_PA())
self.passmanager.append(PassB_TP_RA_PA())
self.passmanager.append(PassC_TP_RA_PA())
self.passmanager.append(PassD_TP_NR_NP())
new_passmanager = self.passmanager[1:3]
expected = ['run transformation pass PassA_TP_NR_NP',
'run transformation pass PassB_TP_RA_PA',
'run transformation pass PassC_TP_RA_PA']
self.assertScheduler(self.circuit, new_passmanager, expected)
def test_accessing_passmanager_by_range_with_condition(self):
""" test accessing PassManager's passes by range with condition """
self.passmanager.append(PassB_TP_RA_PA())
self.passmanager.append(PassE_AP_NR_NP(True))
self.passmanager.append(PassA_TP_NR_NP(),
condition=lambda property_set: property_set['property'])
self.passmanager.append(PassB_TP_RA_PA())
new_passmanager = self.passmanager[1:3]
expected = ['run analysis pass PassE_AP_NR_NP',
'set property as True',
'run transformation pass PassA_TP_NR_NP']
self.assertScheduler(self.circuit, new_passmanager, expected)
def test_accessing_passmanager_error(self):
""" testing accessing a pass item not in list """
self.passmanager.append(PassB_TP_RA_PA())
with self.assertRaises(IndexError):
self.passmanager = self.passmanager[99]
class TestPassManagerConcatenation(SchedulerTestCase):
"""test PassManager concatenation by + operator."""
def setUp(self):
self.passmanager1 = PassManager()
self.passmanager2 = PassManager()
self.circuit = QuantumCircuit(QuantumRegister(1))
def test_concatenating_passmanagers(self):
""" test adding two PassManagers together """
self.passmanager1.append(PassB_TP_RA_PA())
self.passmanager2.append(PassC_TP_RA_PA())
new_passmanager = self.passmanager1 + self.passmanager2
expected = ['run transformation pass PassA_TP_NR_NP',
'run transformation pass PassB_TP_RA_PA',
'run transformation pass PassC_TP_RA_PA']
self.assertScheduler(self.circuit, new_passmanager, expected)
def test_concatenating_passmanagers_with_condition(self):
""" test adding two pass managers with condition """
self.passmanager1.append(PassE_AP_NR_NP(True))
self.passmanager1.append(PassB_TP_RA_PA())
self.passmanager2.append(PassC_TP_RA_PA(),
condition=lambda property_set: property_set['property'])
self.passmanager2.append(PassB_TP_RA_PA())
new_passmanager = self.passmanager1 + self.passmanager2
expected = ['run analysis pass PassE_AP_NR_NP',
'set property as True',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassB_TP_RA_PA',
'run transformation pass PassC_TP_RA_PA',
'run transformation pass PassB_TP_RA_PA']
self.assertScheduler(self.circuit, new_passmanager, expected)
def test_adding_pass_to_passmanager(self):
""" test adding a pass to PassManager """
self.passmanager1.append(PassE_AP_NR_NP(argument1=1))
self.passmanager1.append(PassB_TP_RA_PA())
self.passmanager1 += PassC_TP_RA_PA()
expected = ['run analysis pass PassE_AP_NR_NP',
'set property as 1',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassB_TP_RA_PA',
'run transformation pass PassC_TP_RA_PA']
self.assertScheduler(self.circuit, self.passmanager1, expected)
def test_adding_list_of_passes_to_passmanager(self):
""" test adding a list of passes to PassManager """
self.passmanager1.append(PassE_AP_NR_NP(argument1=1))
self.passmanager1.append(PassB_TP_RA_PA())
self.passmanager1 += [PassC_TP_RA_PA(), PassB_TP_RA_PA()]
expected = ['run analysis pass PassE_AP_NR_NP',
'set property as 1',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassB_TP_RA_PA',
'run transformation pass PassC_TP_RA_PA',
'run transformation pass PassB_TP_RA_PA']
self.assertScheduler(self.circuit, self.passmanager1, expected)
def test_adding_list_of_passes_to_passmanager_with_condition(self):
""" test adding a list of passes to a PassManager that have conditions"""
self.passmanager1.append(PassE_AP_NR_NP(False))
self.passmanager1.append(PassB_TP_RA_PA(),
condition=lambda property_set: property_set['property'])
self.passmanager1 += PassC_TP_RA_PA()
expected = ['run analysis pass PassE_AP_NR_NP',
'set property as False',
'run transformation pass PassA_TP_NR_NP',
'run transformation pass PassC_TP_RA_PA']
self.assertScheduler(self.circuit, self.passmanager1, expected)
def test_adding_pass_to_passmanager_error(self):
""" testing adding a non-pass item to PassManager """
with self.assertRaises(TypeError):
self.passmanager1 += "not a pass"
def test_adding_list_to_passmanager_error(self):
""" testing adding a list having a non-pass item to PassManager """
with self.assertRaises(TypeError):
self.passmanager1 += [PassB_TP_RA_PA(), "not a pass"]
if __name__ == '__main__':
unittest.main()
| 50.990816
| 96
| 0.58638
|
f37322d6395ba0c122644340de695d2e87f3ce1b
| 835
|
py
|
Python
|
Alot_Todo/urls.py
|
meetdaxini/Alot_Todo
|
bf42a05f1b55273eb9211e95802c4837c605692f
|
[
"MIT"
] | 1
|
2020-11-16T09:20:25.000Z
|
2020-11-16T09:20:25.000Z
|
Alot_Todo/urls.py
|
meetdaxini/Alot_Todo
|
bf42a05f1b55273eb9211e95802c4837c605692f
|
[
"MIT"
] | null | null | null |
Alot_Todo/urls.py
|
meetdaxini/Alot_Todo
|
bf42a05f1b55273eb9211e95802c4837c605692f
|
[
"MIT"
] | null | null | null |
"""Alot_Todo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
import todo.urls
urlpatterns = [
path('admin/', admin.site.urls),
path('', include(todo.urls)),
]
| 34.791667
| 78
| 0.686228
|
babbf69a909a3f250b424fac8efd0516cb1550d7
| 2,244
|
py
|
Python
|
setup.py
|
renovate-bot/python-video-transcoder-1
|
e9c1c229fe88d200d0f60314814078e79e3f1524
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
renovate-bot/python-video-transcoder-1
|
e9c1c229fe88d200d0f60314814078e79e3f1524
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
renovate-bot/python-video-transcoder-1
|
e9c1c229fe88d200d0f60314814078e79e3f1524
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (C) 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import io
import os
import setuptools # type: ignore
version = "1.1.0"
package_root = os.path.abspath(os.path.dirname(__file__))
readme_filename = os.path.join(package_root, "README.rst")
with io.open(readme_filename, encoding="utf-8") as readme_file:
readme = readme_file.read()
setuptools.setup(
name="google-cloud-video-transcoder",
version=version,
long_description=readme,
author="Google LLC",
author_email="googleapis-packages@google.com",
license="Apache 2.0",
url="https://github.com/googleapis/python-video-transcoder",
packages=setuptools.PEP420PackageFinder.find(),
namespace_packages=("google", "google.cloud"),
platforms="Posix; MacOS X; Windows",
include_package_data=True,
install_requires=(
# NOTE: Maintainers, please do not require google-api-core>=2.x.x
# Until this issue is closed
# https://github.com/googleapis/google-cloud-python/issues/10566
"google-api-core[grpc] >= 1.26.0, <3.0.0dev",
"proto-plus >= 1.4.0",
"packaging >= 14.3",
),
python_requires=">=3.6",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
],
zip_safe=False,
)
| 34
| 74
| 0.669786
|
a71ab363780cbbfb96d55b9259457c34eb856771
| 136
|
py
|
Python
|
tools/dev_tools/test/pygments/lexers/compiled.py
|
salilab/rmf
|
4895bff9d22381882ac38180bdd025e22bdc7c00
|
[
"Apache-2.0"
] | 2
|
2017-12-22T18:09:47.000Z
|
2019-12-18T05:00:50.000Z
|
tools/dev_tools/test/pygments/lexers/compiled.py
|
salilab/rmf
|
4895bff9d22381882ac38180bdd025e22bdc7c00
|
[
"Apache-2.0"
] | 5
|
2015-03-07T19:32:39.000Z
|
2021-04-22T20:00:10.000Z
|
tools/dev_tools/test/pygments/lexers/compiled.py
|
salilab/rmf
|
4895bff9d22381882ac38180bdd025e22bdc7c00
|
[
"Apache-2.0"
] | 2
|
2015-03-12T18:34:23.000Z
|
2015-06-19T20:15:14.000Z
|
class CppLexer(object):
def get_tokens_unprocessed(self, code):
return (('ind1', 'tok1', 'val1'), ('ind2', 'tok2', 'val2'))
| 34
| 67
| 0.602941
|
e99a9ac2ec99393ee24d39765b806ba98e989f98
| 1,158
|
py
|
Python
|
lib/surface/bq/__init__.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | 2
|
2019-11-10T09:17:07.000Z
|
2019-12-18T13:44:08.000Z
|
lib/surface/bq/__init__.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | null | null | null |
lib/surface/bq/__init__.py
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
2a48a04df14be46c8745050f98768e30474a1aac
|
[
"Apache-2.0"
] | 1
|
2020-07-25T01:40:19.000Z
|
2020-07-25T01:40:19.000Z
|
# -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The command group for Google BigQuery."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Bq(base.Group):
"""Interact with and manage resources in Google BigQuery."""
category = base.BIG_DATA_CATEGORY
def Filter(self, context, args):
# TODO(b/190526493): Determine if command group works with project number
base.RequireProjectID(args)
del context, args
| 34.058824
| 78
| 0.761658
|
32130f0d213cb5534976acd4b900dbbe64fcbd33
| 25,954
|
py
|
Python
|
BrianAmedee/repo/7SAofRevolution.py
|
javovelez/manim
|
0a2b4786779448a4e35081909cf58f8d9ffa7e39
|
[
"MIT"
] | null | null | null |
BrianAmedee/repo/7SAofRevolution.py
|
javovelez/manim
|
0a2b4786779448a4e35081909cf58f8d9ffa7e39
|
[
"MIT"
] | null | null | null |
BrianAmedee/repo/7SAofRevolution.py
|
javovelez/manim
|
0a2b4786779448a4e35081909cf58f8d9ffa7e39
|
[
"MIT"
] | null | null | null |
from re import L
from manim import *
# HELPERS
def get_arc_lines(
graph, plane, dx=1, x_min=None, x_max=None, line_color=RED, line_width=3
):
dots = VGroup()
lines = VGroup()
result = VGroup(dots, lines)
x_range = np.arange(x_min, x_max, dx)
colors = color_gradient([BLUE_B, GREEN_B], len(x_range))
for x, color in zip(x_range, colors):
p1 = Dot().move_to(plane.input_to_graph_point(x, graph))
p2 = Dot().move_to(plane.input_to_graph_point(x + dx, graph))
dots.add(p1, p2)
dots.set_fill(colors, opacity=0.8)
line = Line(
p1.get_center(),
p2.get_center(),
stroke_color=line_color,
stroke_width=line_width,
)
lines.add(line)
return result
def get_conic_approximations(
axes, graph, x_min=0, x_max=1, dx=0.5, color_A=RED, color_B=GREEN, opacity=1
):
result = VGroup()
for x in np.arange(x_min + dx, x_max + dx, dx):
if graph.underlying_function(x) == 0:
k = 0
conic_surface = VectorizedPoint()
else:
k = graph.underlying_function(x) / x
conic_surface = ParametricSurface(
lambda u, v: axes.c2p(v, k * v * np.cos(u), k * v * np.sin(u)),
u_min=0,
u_max=2 * PI,
v_min=x - dx,
v_max=x,
checkerboard_colors=[color_A, color_B],
fill_opacity=opacity,
)
result.add(conic_surface)
return result
def get_riemann_truncated_cones(
axes,
graph,
x_min=0,
x_max=1,
dx=0.5,
color_A=RED,
color_B=GREEN,
stroke_color=WHITE,
stroke_width=1,
opacity=1,
theta=45,
):
result = VGroup()
for x in np.arange(x_min, x_max, dx):
points = VGroup()
p1 = axes.c2p(x + dx, 0)
p2 = axes.c2p(x + dx, graph.underlying_function(x + dx))
p3 = axes.c2p(x, graph.underlying_function(x))
p4 = axes.c2p(x, 0)
truncated_conic = ArcPolygon(
p1,
p2,
p3,
p4,
stroke_color=stroke_color,
stroke_width=stroke_width,
fill_color=[color_A, color_B],
fill_opacity=opacity,
arc_config=[
{"angle": theta * DEGREES, "color": stroke_color},
{"angle": 0, "color": stroke_color},
{"angle": -theta * DEGREES, "color": stroke_color},
{"angle": 0, "color": stroke_color},
],
)
result.add(truncated_conic)
return result
class One(ThreeDScene):
def construct(self):
l = 2
w = 4
h = 1
rect_prism = Prism(dimensions=[l, w, h]).to_edge(LEFT, buff=1)
kwargs = {"stroke_color": BLUE_D, "fill_color": BLUE_B, "fill_opacity": 0.8}
bottom = Rectangle(width=w, height=l, **kwargs)
s1 = Rectangle(height=h, width=w, **kwargs).next_to(bottom, UP, buff=0)
s2 = Rectangle(height=h, width=w, **kwargs).next_to(bottom, DOWN, buff=0)
l1 = Rectangle(height=l, width=h, **kwargs).next_to(bottom, LEFT, buff=0)
l2 = Rectangle(height=l, width=h, **kwargs).next_to(bottom, RIGHT, buff=0)
top = Rectangle(width=w, height=l, **kwargs).next_to(s1, UP, buff=0)
net = VGroup(top, bottom, s1, s2, l1, l2).rotate(-PI / 2).to_edge(RIGHT, buff=1)
arrow = Line(
start=rect_prism.get_right(), end=net.get_left(), buff=0.2
).add_tip()
self.begin_ambient_camera_rotation()
self.set_camera_orientation(phi=45 * DEGREES, theta=-45 * DEGREES)
self.play(Create(rect_prism))
self.play(
LaggedStart(Create(arrow), Transform(rect_prism.copy(), net)),
run_time=2,
lag_ratio=0.5,
)
self.wait()
self.play(FadeOut(Group(*self.mobjects)))
self.stop_ambient_camera_rotation()
self.set_camera_orientation(phi=0, theta=-90 * DEGREES)
text = Tex("Surface Area of a Solid Revolutions?")
self.play(Write(text))
self.wait()
self.play(FadeOut(text))
self.begin_ambient_camera_rotation()
self.set_camera_orientation(phi=45 * DEGREES, theta=-45 * DEGREES)
axes = ThreeDAxes(
x_range=[0, 4.1, 1],
x_length=5,
y_range=[-4, 4.1, 1],
y_length=5,
z_range=[-4, 4, 1],
z_length=5,
).add_coordinates()
function = axes.get_graph(lambda x: 0.25 * x ** 2, x_range=[0, 4], color=YELLOW)
area = axes.get_area(graph=function, x_range=[0, 4], color=[BLUE_B, BLUE_D])
e = ValueTracker(2 * PI)
surface = always_redraw(
lambda: ParametricSurface(
lambda u, v: axes.c2p(
v, 0.25 * v ** 2 * np.cos(u), 0.25 * v ** 2 * np.sin(u)
),
u_min=0,
u_max=e.get_value(),
v_min=0,
v_max=4,
checkerboard_colors=[BLUE_B, BLUE_D],
)
)
self.play(
LaggedStart(Create(axes), Create(function), Create(area), Create(surface)),
run_time=4,
lag_ratio=0.5,
)
self.play(
Rotating(
VGroup(function, area),
axis=RIGHT,
radians=2 * PI,
about_point=axes.c2p(0, 0, 0),
),
e.animate.set_value(2 * PI),
run_time=5,
rate_func=linear,
)
self.wait(3)
class Two(ThreeDScene):
def construct(self):
axes = (
ThreeDAxes(
x_range=[0, 4.1, 1],
x_length=5,
y_range=[-4, 4.1, 1],
y_length=5,
z_range=[-4, 4, 1],
z_length=5,
)
.to_edge(LEFT)
.add_coordinates()
)
graph = axes.get_graph(lambda x: 0.25 * x ** 2, x_range=[0, 4], color=YELLOW)
surface = always_redraw(
lambda: ParametricSurface(
lambda u, v: axes.c2p(
v, 0.25 * v ** 2 * np.cos(u), 0.25 * v ** 2 * np.sin(u)
),
u_min=0,
u_max=2 * PI,
v_min=0,
v_max=4,
checkerboard_colors=[BLUE_B, BLUE_D],
)
)
self.set_camera_orientation(phi=30 * DEGREES, theta=-90 * DEGREES)
self.begin_ambient_camera_rotation(rate=0.1)
self.play(
LaggedStart(Create(axes), Create(graph), Create(surface)),
run_time=2,
lag_ratio=0.4,
)
cone = get_conic_approximations(
axes=axes, graph=graph, x_min=0, x_max=4, dx=4, opacity=0.4
)
dist = ValueTracker(2)
truncated_cone = always_redraw(
lambda: ParametricSurface(
lambda u, v: axes.c2p(v, v * np.cos(u), v * np.sin(u)),
u_min=0,
u_max=2 * PI,
v_min=2,
v_max=2 + dist.get_value(),
checkerboard_colors=[RED, GREEN],
opacity=0.6,
)
)
self.play(Create(cone))
self.play(Create(truncated_cone))
self.wait()
a = [0, 0, 0]
b = [4, 0, 0]
c = [3, 2, 0]
d = [1, 2, 0]
##Now wanting to work on 2D Screen
truncated_net = always_redraw(
lambda: ArcPolygon(
[0, 0, 0],
[4, 0, 0],
[3, dist.get_value(), 0],
[1, dist.get_value(), 0],
stroke_color=RED_B,
fill_color=[GREEN_B, GREEN_D],
fill_opacity=0.8,
arc_config=[
{"angle": 45 * DEGREES, "color": RED},
{"angle": 0, "color": RED},
{"angle": -45 * DEGREES, "color": RED},
{"angle": 0, "color": RED},
],
)
.rotate(PI / 2)
.next_to(axes, RIGHT, buff=1.5)
)
self.play(
ReplacementTransform(truncated_cone.copy(), truncated_net), run_time=2
)
self.wait(2)
self.stop_ambient_camera_rotation()
self.move_camera(phi=0, theta=-90 * DEGREES)
anot1 = always_redraw(
lambda: MathTex("2 \\pi {r}_{0}")
.set(width=0.8)
.next_to(truncated_net, LEFT, buff=0.2)
)
anot2 = always_redraw(
lambda: MathTex("2 \\pi {r}_{1}")
.set(width=0.8)
.next_to(truncated_net, RIGHT, buff=0.2)
)
anot3 = always_redraw(lambda: MathTex("L").next_to(truncated_net, UP, buff=0.1))
annotations = VGroup(anot1, anot2, anot3)
area = MathTex("Area = 2 \\pi r L").next_to(truncated_net, DOWN, buff=0.5)
anot = MathTex("where \\ r=average \\ radius").next_to(
area, DOWN, aligned_edge=LEFT
)
formula = VGroup(area, anot)
self.play(FadeOut(VGroup(surface, cone)))
self.play(Write(annotations))
self.play(Write(formula))
bound_a = MathTex("a").next_to(axes.c2p(0, 0, 0), DOWN, buff=0.1)
bound_b = MathTex("b").next_to(axes.c2p(4, 0, 0), DOWN, buff=0.1)
dx = always_redraw(
lambda: DashedLine(
start=axes.c2p(2, graph.underlying_function(2)),
end=axes.c2p(2 + dist.get_value(), graph.underlying_function(2)),
stroke_color=GREEN,
)
)
dx_brace = always_redraw(lambda: Brace(dx).next_to(dx, DOWN, buff=0.1))
dx_text = always_redraw(
lambda: MathTex("dx").set(width=0.3).next_to(dx_brace, DOWN, buff=0)
)
dy = always_redraw(
lambda: DashedLine(
start=axes.c2p(
2 + dist.get_value(),
graph.underlying_function(2 + dist.get_value()),
),
end=axes.c2p(2 + dist.get_value(), graph.underlying_function(2)),
stroke_color=GREEN,
)
)
dy_brace = always_redraw(
lambda: Brace(dy, direction=RIGHT).next_to(dy, RIGHT, buff=0.1)
)
dy_text = always_redraw(
lambda: MathTex("dy").set(width=0.3).next_to(dy_brace, RIGHT, buff=0)
)
dl = always_redraw(
lambda: Line(
start=axes.c2p(2, graph.underlying_function(2)),
end=axes.c2p(
2 + dist.get_value(),
graph.underlying_function(2 + dist.get_value()),
),
stroke_color=YELLOW,
)
)
dl_brace = always_redraw(
lambda: BraceBetweenPoints(point_1=dl.get_end(), point_2=dl.get_start())
)
dl_text = always_redraw(
lambda: MathTex("dL")
.set(width=0.3)
.next_to(dl_brace, UP, buff=0)
.set_color(YELLOW)
)
radius_line = Line(
start=axes.c2p(2.25, 0),
end=axes.c2p(2.25, graph.underlying_function(2.25)),
stroke_color=BLUE,
stroke_width=10,
)
radius_text = (
MathTex("r")
.set_color(BLUE)
.set(width=0.3)
.next_to(radius_line, RIGHT, buff=0.1)
)
demo_mobjects = VGroup(
bound_a,
bound_b,
dx,
dx_brace,
dx_text,
dy,
dy_brace,
dy_text,
dl,
dl_brace,
dl_text,
)
self.play(Create(demo_mobjects))
self.play(dist.animate.set_value(0.5), run_time=5)
intuition_text = MathTex(
"As \\ L\\rightarrow 0, L \\ is \\ Arc \\ Length"
).to_edge(UR, buff=0.2)
sa_formula = MathTex("SA = \\int_{a}^{b}2\pi", "x", "\\ dL").next_to(
intuition_text, DOWN, buff=0.2, aligned_edge=LEFT
)
self.play(Write(intuition_text))
self.play(Write(sa_formula))
self.wait()
self.play(Create(radius_line), Write(radius_text))
self.wait()
self.play(Transform(radius_text.copy(), sa_formula[1]))
self.wait()
class ThreePers1(ThreeDScene):
def construct(self):
axes = ThreeDAxes(
x_range=[0, 4, 1],
x_length=5,
y_range=[-4, 4, 1],
y_length=5,
z_range=[-4, 4, 1],
z_length=5,
axis_config={"decimal_number_config": {"num_decimal_places": 0}},
).to_edge(LEFT)
graph = axes.get_graph(lambda x: 0.25 * x ** 2, x_range=[0, 4], color=YELLOW)
surface = always_redraw(
lambda: ParametricSurface(
lambda u, v: axes.c2p(
v, 0.25 * v ** 2 * np.cos(u), 0.25 * v ** 2 * np.sin(u)
),
u_min=0,
u_max=2 * PI,
v_min=0,
v_max=4,
checkerboard_colors=[BLUE_B, BLUE_D],
)
)
dx = ValueTracker(1)
conic_approx = always_redraw(
lambda: get_conic_approximations(
axes=axes, graph=graph, x_min=0, x_max=4, dx=dx.get_value()
)
)
num_text = MathTex("dx=").next_to(axes, UP, buff=0.5)
num = always_redraw(
lambda: DecimalNumber()
.set_value(dx.get_value())
.next_to(num_text, RIGHT, buff=0.1)
)
plane = NumberPlane(
x_range=[0, 4, 1],
x_length=5,
y_range=[0, 61, 20],
y_length=6,
axis_config={"decimal_number_config": {"num_decimal_places": 0}},
).to_edge(DR)
plane.add_coordinates()
def sa_func(x):
return 6.2832 * x * (1 + (x ** 2 / 4)) ** 0.5
graph2 = plane.get_graph(sa_func, x_range=[0, 4], color=BLUE)
graph2_lab = Tex("SA Function").next_to(plane, UP, buff=0.2)
t = ValueTracker(
45
) # Tracking the bowness in the sa of conics, mathematically incorrect but whatever
truncated_area = always_redraw(
lambda: get_riemann_truncated_cones(
axes=plane,
graph=graph2,
x_min=0,
x_max=4,
dx=dx.get_value(),
theta=t.get_value(),
)
)
self.set_camera_orientation(phi=0 * DEGREES, theta=-90 * DEGREES)
self.add(axes, graph, surface, conic_approx, num_text, num)
self.play(
LaggedStart(
Create(conic_approx),
Write(VGroup(num_text, num)),
DrawBorderThenFill(plane),
run_time=2,
lag_ratio=0.25,
)
)
self.wait()
self.play(ReplacementTransform(conic_approx.copy(), truncated_area), run_time=2)
self.play(
dx.animate.set_value(0.1), t.animate.set_value(5), run_time=6
) # set dx = 0.1, and t = 5
self.play(
LaggedStart(Create(graph2), Write(graph2_lab), run_time=1, lag_ratio=0.3)
)
self.wait()
class ThreePers2(ThreeDScene):
def construct(self):
axes = ThreeDAxes(
x_range=[0, 4.1, 1],
x_length=5,
y_range=[-4, 4.1, 1],
y_length=5,
z_range=[-4, 4, 1],
z_length=5,
axis_config={"decimal_number_config": {"num_decimal_places": 0}},
).to_edge(LEFT)
axes.add_coordinates()
graph = axes.get_graph(lambda x: 0.25 * x ** 2, x_range=[0, 4], color=YELLOW)
surface = always_redraw(
lambda: ParametricSurface(
lambda u, v: axes.c2p(
v, 0.25 * v ** 2 * np.cos(u), 0.25 * v ** 2 * np.sin(u)
),
u_min=0,
u_max=2 * PI,
v_min=0,
v_max=4,
checkerboard_colors=[BLUE_B, BLUE_D],
)
)
dx = ValueTracker(1)
conic_approx = always_redraw(
lambda: get_conic_approximations(
axes=axes, graph=graph, x_min=0, x_max=4, dx=dx.get_value()
)
)
num_text = MathTex("dx=").next_to(axes, UP, buff=0.5)
num = always_redraw(
lambda: DecimalNumber()
.set_value(dx.get_value())
.next_to(num_text, RIGHT, buff=0.1)
)
axes2 = Axes(
x_range=[0, 4, 1], x_length=5, y_range=[0, 60, 10], y_length=6
).to_edge(DR)
def sa_func(x):
return 6.2832 * x * (1 + (x ** 2 / 4)) ** 0.5
graph2 = axes2.get_graph(sa_func, x_range=[0, 4], color=BLUE)
graph2_lab = Tex("SA Function").next_to(axes2, UP, buff=0.2)
t = ValueTracker(
45
) # Tracking the bowness in the sa of conics, mathematically incorrect but whatever
truncated_area = always_redraw(
lambda: get_riemann_truncated_cones(
axes=axes2,
graph=graph2,
x_min=0,
x_max=4,
dx=dx.get_value(),
theta=t.get_value(),
)
)
self.set_camera_orientation(phi=0 * DEGREES, theta=-90 * DEGREES)
self.add(axes, graph, surface, conic_approx, num_text, num)
self.move_camera(phi=30 * DEGREES, theta=-100 * DEGREES)
self.begin_ambient_camera_rotation(rate=0.01)
self.play(
LaggedStart(
Create(conic_approx),
Write(VGroup(num_text, num)),
DrawBorderThenFill(axes2),
run_time=1,
lag_ratio=0.25,
)
)
self.play(ReplacementTransform(conic_approx.copy(), truncated_area), run_time=1)
self.play(
dx.animate.set_value(0.1), t.animate.set_value(5), run_time=3
) # set dx = 0.1, and t = 5
self.add(graph2, graph2_lab)
self.wait()
class FourArcL(Scene):
def construct(self):
axes = Axes(
x_range=[0, 4.1, 1],
x_length=5,
y_range=[-4, 4.1, 1],
y_length=5,
axis_config={"decimal_number_config": {"num_decimal_places": 0}},
).to_edge(LEFT)
axes.add_coordinates()
graph = axes.get_graph(lambda x: 0.25 * x ** 2, x_range=[0, 4], color=YELLOW)
# Mobjects for explaining construction of Line Integral
dist = ValueTracker(1)
dx = always_redraw(
lambda: DashedLine(
start=axes.c2p(2, graph.underlying_function(2)),
end=axes.c2p(2 + dist.get_value(), graph.underlying_function(2)),
stroke_color=GREEN,
)
)
dx_brace = always_redraw(lambda: Brace(dx).next_to(dx, DOWN, buff=0.1))
dx_text = always_redraw(
lambda: MathTex("dx").set(width=0.3).next_to(dx_brace, DOWN, buff=0)
)
dy = always_redraw(
lambda: DashedLine(
start=axes.c2p(
2 + dist.get_value(),
graph.underlying_function(2 + dist.get_value()),
),
end=axes.c2p(2 + dist.get_value(), graph.underlying_function(2)),
stroke_color=GREEN,
)
)
dy_brace = always_redraw(
lambda: Brace(dy, direction=RIGHT).next_to(dy, RIGHT, buff=0.1)
)
dy_text = always_redraw(
lambda: MathTex("dy").set(width=0.3).next_to(dy_brace, RIGHT, buff=0)
)
dl = always_redraw(
lambda: Line(
start=axes.c2p(2, graph.underlying_function(2)),
end=axes.c2p(
2 + dist.get_value(),
graph.underlying_function(2 + dist.get_value()),
),
stroke_color=YELLOW,
)
)
dl_brace = always_redraw(
lambda: BraceBetweenPoints(point_1=dl.get_end(), point_2=dl.get_start())
)
dl_text = always_redraw(
lambda: MathTex("dL")
.set(width=0.3)
.next_to(dl_brace, UP, buff=0)
.set_color(YELLOW)
)
demo_mobjects = VGroup(
dx, dx_brace, dx_text, dy, dy_brace, dy_text, dl, dl_brace, dl_text
)
# Adding the Latex Mobjects for Mini-Proof
helper_text = (
MathTex("dL \\ approximates \\ curve \\ as \\ dx\\ approaches \\ 0")
.set(width=6)
.to_edge(UR, buff=0.5)
)
line1 = MathTex("{dL}^{2}=", "{dx}^{2}", "+{dy}^{2}")
line2 = MathTex("{dL}^{2}=", "{dx}^{2}", "(1+(\\frac{dy}{dx})^{2})")
line3 = MathTex(
"dL = \\sqrt{", "{dx}^{2}", "(1+(\\frac{dy}{dx})^{2}) }"
) # Then using surds
line4 = MathTex("dL = \\sqrt{", "1", " + (\\frac{dy}{dx})^{2} } dxx")
proof = (
VGroup(line1, line2, line3, line4)
.scale(0.8)
.arrange(DOWN, aligned_edge=LEFT)
.next_to(helper_text, DOWN, buff=0.25)
)
box = SurroundingRectangle(helper_text)
# The actual line integral
dx_tracker = ValueTracker(1.5) # Tracking the dx distance of line integral
line_integral = always_redraw(
lambda: get_arc_lines(
graph=graph,
plane=axes,
dx=dx_tracker.get_value(),
x_min=0,
x_max=4,
line_color=RED,
line_width=7,
)
)
self.add(axes, graph)
self.play(Write(helper_text))
self.wait()
self.play(Write(line1))
self.wait()
self.play(Write(line2[0]))
self.play(ReplacementTransform(line1[1].copy(), line2[1]))
self.play(Write(line2[2]))
self.wait()
self.play(Write(line3), run_time=2)
self.wait()
self.play(Write(line4))
self.wait()
self.add(line_integral)
self.play(dx_tracker.animate.set_value(0.2), Create(box), run_time=8)
self.wait()
class FiveSolving(ThreeDScene): # Need to render
def construct(self):
axes = ThreeDAxes(
x_range=[0, 4, 1],
x_length=5,
y_range=[-4, 4, 1],
y_length=5,
z_range=[-4, 4, 1],
z_length=5,
).to_edge(LEFT)
axes.add_coordinates()
graph = axes.get_graph(lambda x: 0.25 * x ** 2, x_range=[0, 4], color=YELLOW)
graph_lab = (
MathTex("y=\\frac{x^2}{4}")
.scale(0.8)
.next_to(graph, UP, buff=0.2)
.set_color(YELLOW)
)
surface = always_redraw(
lambda: ParametricSurface(
lambda u, v: axes.c2p(
v, 0.25 * v ** 2 * np.cos(u), 0.25 * v ** 2 * np.sin(u)
),
u_min=0,
u_max=2 * PI,
v_min=0,
v_max=4,
checkerboard_colors=[BLUE_B, BLUE_D],
)
)
dx = ValueTracker(0.5)
truncated_conics = always_redraw(
lambda: get_conic_approximations(
axes=axes, graph=graph, x_min=0, x_max=4, dx=dx.get_value()
)
)
self.add(axes, graph, surface, graph_lab)
solve0 = MathTex(
"SA = \\int_{a}^{b} 2 \\pi x dL, \\ dL = \\sqrt{ 1+(\\frac{dy}{dx})^{2}) }"
)
solve1 = MathTex(
"SA = \\int_{a}^{b} 2 \\pi x", "\\sqrt{ 1+(\\frac{dy}{dx})^{2}) }"
)
solve2 = MathTex("y=\\frac{x^2}{4} , \\", "\\frac{dy}{dx} = \\frac{x}{2}")
solve3 = MathTex("(\\frac{dy}{dx})^2 = ", "\\frac{x^2}{4}")
solve4 = MathTex(
"SA = \\int_{0}^{4} 2 \\pi x \\sqrt{ 1 + ", "\\frac{x^2}{4}", " }"
)
solve5 = MathTex("SA = 85.29 \\ units^2").next_to(axes, DOWN, buff=0.3)
solved = (
VGroup(solve0, solve1, solve2, solve3, solve4)
.scale(0.75)
.arrange(DOWN, buff=0.2, aligned_edge=LEFT)
.to_edge(UR, buff=0.2)
)
self.play(Write(solve0), run_time=0.5)
self.wait()
self.play(Write(solve1), run_time=0.5)
self.play(ReplacementTransform(graph_lab.copy(), solve2[0]), run_time=0.5)
self.wait()
self.play(Write(solve2[1]), run_time=0.5)
self.wait()
self.play(Write(solve3[0]), run_time=0.5)
self.play(ReplacementTransform(solve2[1].copy(), solve3[1]), run_time=0.5)
self.wait()
self.play(Write(solve4), run_time=0.5)
self.wait()
self.move_camera(phi=30 * DEGREES, theta=-90 * DEGREES)
self.play(FadeIn(truncated_conics), run_time=0.5)
self.play(dx.animate.set_value(0.1), Write(solve5), run_time=2)
self.wait()
| 32.770202
| 93
| 0.482623
|
c660fb221f03743b28d7f5c0560dc9cf2a08e941
| 73
|
py
|
Python
|
test/fixtures/python/head_without_http.py
|
csperando/curlconverter
|
733f110e5621375701f4424299ccd72e669876f6
|
[
"MIT"
] | 536
|
2021-10-06T17:21:25.000Z
|
2022-03-31T13:05:48.000Z
|
test/fixtures/python/head_without_http.py
|
csperando/curlconverter
|
733f110e5621375701f4424299ccd72e669876f6
|
[
"MIT"
] | 74
|
2021-10-08T13:57:14.000Z
|
2022-03-31T06:55:39.000Z
|
test/fixtures/python/head_without_http.py
|
csperando/curlconverter
|
733f110e5621375701f4424299ccd72e669876f6
|
[
"MIT"
] | 104
|
2021-10-06T19:36:15.000Z
|
2022-03-31T07:34:04.000Z
|
import requests
response = requests.head('http://localhost:28139/page')
| 18.25
| 55
| 0.767123
|
e89719f68062fae6c4c066fb548ad9991cc03098
| 4,886
|
py
|
Python
|
code/markdown2/tools/tables-align-columns.py
|
Endres/Letterpress
|
12a44d6a9c7eceb1fd2e8001c84c2b921ffe1922
|
[
"BSD-3-Clause"
] | 291
|
2015-01-17T06:08:47.000Z
|
2022-01-10T23:24:16.000Z
|
code/markdown2/tools/tables-align-columns.py
|
ahmedfadhil/Letterpress
|
0913bc21c09b4982071159dd52502d597bf2a0c9
|
[
"BSD-3-Clause"
] | 3
|
2015-01-20T14:57:46.000Z
|
2017-07-15T14:27:26.000Z
|
code/markdown2/tools/tables-align-columns.py
|
ahmedfadhil/Letterpress
|
0913bc21c09b4982071159dd52502d597bf2a0c9
|
[
"BSD-3-Clause"
] | 39
|
2015-01-10T11:15:34.000Z
|
2021-05-16T09:11:32.000Z
|
#!/usr/bin/env python
"""
Convert [tables](https://github.com/trentm/python-markdown2/wiki/tables)
a given Markdown document such that columns are aligned.
Limitations:
- Can't handle tables where cells have a pipe.
"""
from __future__ import print_function
__version__ = "1.0.0"
import codecs
import os
from pprint import pprint, pformat
import re
import sys
from collections import defaultdict
p = print
def e(*args, **kwargs):
kwargs['file'] = sys.stderr
p(*args, **kwargs)
#---- internal support stuff
def tables_align_columns(path):
def _table_sub(match):
head, underline, body = match.groups()
data_rows = [
[cell.strip() for cell in head.strip().strip('|').split('|')],
]
for line in body.strip('\n').split('\n'):
data_rows.append([cell.strip() for cell in line.strip().strip('|').split('|')])
width_from_col_idx = defaultdict(int)
for data_row in data_rows:
for col_idx, cell in enumerate(data_row):
width_from_col_idx[col_idx] = max(
2, width_from_col_idx[col_idx], len(cell))
# Determine aligns for columns.
ucells = [cell.strip() for cell in underline.strip('| \t\n').split('|')]
align_from_col_idx = {}
for col_idx, cell in enumerate(ucells):
if cell[0] == ':' and cell[-1] == ':':
align_from_col_idx[col_idx] = 'center'
elif cell[0] == ':':
align_from_col_idx[col_idx] = 'left'
elif cell[-1] == ':':
align_from_col_idx[col_idx] = 'right'
else:
align_from_col_idx[col_idx] = None
table = []
for data_row in data_rows:
row = []
#e('align_from_col_idx:', align_from_col_idx)
#e('data_row:', data_row)
for col_idx, cell in enumerate(data_row):
width = width_from_col_idx[col_idx]
try:
align = align_from_col_idx[col_idx]
except KeyError:
# Limitation: We hit a table row where a cell has a
# literal `|` in it. We can't currently handle that, so
# lets just skip this table.
e('tables-align-columns: warning: skipping a table '
'with literal `|`: %r' % match.group(0))
return match.group(0)
if align == 'center':
space = width - len(cell)
left = space / 2
right = space - left
row.append(' '*left + cell + ' '*right)
elif align == 'right':
row.append('%%%ds' % width % cell)
else:
row.append('%%-%ds' % width % cell)
table.append(row)
underline = []
for col_idx, cell in enumerate(data_rows[0]):
width = width_from_col_idx[col_idx]
align = align_from_col_idx[col_idx]
if align == 'center':
underline.append(':' + u'-'*(width-2) + ':')
elif align == 'right':
underline.append(u'-'*(width-1) + ':')
elif align == 'left':
underline.append(':' + u'-'*(width-1))
else:
underline.append(u'-'*width)
table[1:1] = [underline]
#e(pformat(table, width=200))
table_str = u'\n'.join(('| ' + u' | '.join(r) + ' |') for r in table)
return table_str + '\n'
text = codecs.open(path, 'rb', 'utf8').read()
less_than_tab = 3
table_re = re.compile(r'''
(?:(?<=\n\n)|\A\n?) # leading blank line
^[ ]{0,%d} # allowed whitespace
(.*[|].*) \n # $1: header row (at least one pipe)
^[ ]{0,%d} # allowed whitespace
( # $2: underline row
# underline row with leading bar
(?: \|\ *:?-+:?\ * )+ \|? \n
|
# or, underline row without leading bar
(?: \ *:?-+:?\ *\| )+ (?: \ *:?-+:?\ * )? \n
)
( # $3: data rows
(?:
^[ ]{0,%d}(?!\ ) # ensure line begins with 0 to less_than_tab spaces
.*\|.* \n
)+
)
''' % (less_than_tab, less_than_tab, less_than_tab), re.M | re.X)
return table_re.sub(_table_sub, text)
#---- mainline
def main(argv):
for path in argv[1:]:
text = tables_align_columns(path)
sys.stdout.write(text.encode(
sys.stdout.encoding or "utf-8", 'xmlcharrefreplace'))
if __name__ == "__main__":
sys.exit( main(sys.argv) )
| 33.696552
| 96
| 0.480147
|
33db7234cefb0e335c2281c3c5584eff83cfd86f
| 666
|
py
|
Python
|
src/scripts.py
|
ivezic/MultifitTests
|
9d22d9f5c40d21ec693c0a7d54e2c6f6dda3f36e
|
[
"MIT"
] | null | null | null |
src/scripts.py
|
ivezic/MultifitTests
|
9d22d9f5c40d21ec693c0a7d54e2c6f6dda3f36e
|
[
"MIT"
] | null | null | null |
src/scripts.py
|
ivezic/MultifitTests
|
9d22d9f5c40d21ec693c0a7d54e2c6f6dda3f36e
|
[
"MIT"
] | null | null | null |
from astrometry import *
from makeImage import stamp
from showImage import *
def demo(showStamp=0):
# make image
Npix1D = 23
Bkgd = 1000
s = stamp(Npix1D, Bkgd)
# add a Gaussian source
muX = 0.0
muY = 0.0
alpha = 2.0
Amplitude = 10000.0
s.addGaussianSource(muX, muY, alpha, Amplitude)
# and add noise
sigmaNoise = 100.0
addsourcenoise = 1
s.addNoise(sigmaNoise, addsourcenoise)
# and show it
if (showStamp):
diffimage = s.image - s.sourceImage
FourPanelStampPlot(s.oneDpixels, s.imageNoNoise, s.sourceImage, s.image, diffimage)
return s
# make a stamp
stamp1 = demo(1)
| 19.028571
| 91
| 0.63964
|
e0d66257ef570c26c577346ef0b1d1cc8627d11b
| 9,414
|
py
|
Python
|
tests/shared/core/training_data/story_writer/test_yaml_story_writer.py
|
hercules261188/rasa
|
6b0631f1fddf95a044c728e885e2940dd2f8f83e
|
[
"Apache-2.0"
] | 1
|
2021-11-12T08:01:05.000Z
|
2021-11-12T08:01:05.000Z
|
tests/shared/core/training_data/story_writer/test_yaml_story_writer.py
|
hercules261188/rasa
|
6b0631f1fddf95a044c728e885e2940dd2f8f83e
|
[
"Apache-2.0"
] | 64
|
2021-09-24T06:44:41.000Z
|
2022-03-14T12:12:28.000Z
|
tests/shared/core/training_data/story_writer/test_yaml_story_writer.py
|
hercules261188/rasa
|
6b0631f1fddf95a044c728e885e2940dd2f8f83e
|
[
"Apache-2.0"
] | null | null | null |
from pathlib import Path
import textwrap
from typing import Text
from collections import OrderedDict
import pytest
from rasa.shared.core.constants import (
ACTION_SESSION_START_NAME,
ACTION_LISTEN_NAME,
ACTION_UNLIKELY_INTENT_NAME,
)
from rasa.shared.core.domain import Domain
from rasa.shared.core.events import (
ActionExecuted,
UserUttered,
DefinePrevUserUtteredFeaturization,
)
from rasa.shared.core.trackers import DialogueStateTracker
from rasa.shared.core.training_data.story_reader.yaml_story_reader import (
YAMLStoryReader,
)
from rasa.shared.core.training_data.story_writer.yaml_story_writer import (
YAMLStoryWriter,
)
from rasa.shared.core.training_data.structures import STORY_START
@pytest.mark.parametrize(
"input_yaml_file",
[
"data/test_yaml_stories/stories.yml",
"data/test_yaml_stories/stories_defaultdomain.yml",
],
)
async def test_simple_story(tmpdir: Path, domain: Domain, input_yaml_file: Text):
original_yaml_reader = YAMLStoryReader(domain, None)
original_yaml_story_steps = original_yaml_reader.read_from_file(input_yaml_file)
target_story_filename = tmpdir / "test.yml"
writer = YAMLStoryWriter()
writer.dump(target_story_filename, original_yaml_story_steps)
processed_yaml_reader = YAMLStoryReader(domain, None)
processed_yaml_story_steps = processed_yaml_reader.read_from_file(
target_story_filename
)
assert len(processed_yaml_story_steps) == len(original_yaml_story_steps)
for processed_step, original_step in zip(
processed_yaml_story_steps, original_yaml_story_steps
):
assert len(processed_step.events) == len(original_step.events)
async def test_story_start_checkpoint_is_skipped(domain: Domain):
input_yaml_file = "data/test_yaml_stories/stories.yml"
original_yaml_reader = YAMLStoryReader(domain, None)
original_yaml_story_steps = original_yaml_reader.read_from_file(input_yaml_file)
yaml_text = YAMLStoryWriter().dumps(original_yaml_story_steps)
assert STORY_START not in yaml_text
async def test_forms_are_converted(domain: Domain):
original_yaml_reader = YAMLStoryReader(domain, None)
original_yaml_story_steps = original_yaml_reader.read_from_file(
"data/test_yaml_stories/stories_form.yml"
)
assert YAMLStoryWriter.stories_contain_loops(original_yaml_story_steps)
writer = YAMLStoryWriter()
with pytest.warns(None) as record:
writer.dumps(original_yaml_story_steps)
assert len(record) == 0
def test_yaml_writer_dumps_user_messages():
events = [UserUttered("Hello", {"name": "greet"}), ActionExecuted("utter_greet")]
tracker = DialogueStateTracker.from_events("default", events)
dump = YAMLStoryWriter().dumps(tracker.as_story().story_steps, is_test_story=True)
assert (
dump.strip()
== textwrap.dedent(
"""
version: "2.0"
stories:
- story: default
steps:
- intent: greet
user: |-
Hello
- action: utter_greet
"""
).strip()
)
def test_yaml_writer_doesnt_dump_action_unlikely_intent():
events = [
UserUttered("Hello", {"name": "greet"}),
ActionExecuted("utter_hello"),
ActionExecuted(ACTION_UNLIKELY_INTENT_NAME, metadata={"key1": "value1"}),
ActionExecuted("utter_bye"),
]
tracker = DialogueStateTracker.from_events("default", events)
dump = YAMLStoryWriter().dumps(tracker.as_story().story_steps, is_test_story=True)
assert (
dump.strip()
== textwrap.dedent(
"""
version: "2.0"
stories:
- story: default
steps:
- intent: greet
user: |-
Hello
- action: utter_hello
- action: utter_bye
"""
).strip()
)
def test_yaml_writer_avoids_dumping_not_existing_user_messages():
events = [UserUttered("greet", {"name": "greet"}), ActionExecuted("utter_greet")]
tracker = DialogueStateTracker.from_events("default", events)
dump = YAMLStoryWriter().dumps(tracker.as_story().story_steps)
assert (
dump.strip()
== textwrap.dedent(
"""
version: "2.0"
stories:
- story: default
steps:
- intent: greet
- action: utter_greet
"""
).strip()
)
@pytest.mark.parametrize(
"input_yaml_file", ["data/test_yaml_stories/rules_with_stories_sorted.yaml"]
)
def test_yaml_writer_dumps_rules(input_yaml_file: Text, tmpdir: Path, domain: Domain):
original_yaml_reader = YAMLStoryReader(domain, None)
original_yaml_story_steps = original_yaml_reader.read_from_file(input_yaml_file)
dump = YAMLStoryWriter().dumps(original_yaml_story_steps)
# remove the version string
dump = "\n".join(dump.split("\n")[1:])
with open(input_yaml_file) as original_file:
assert dump == original_file.read()
async def test_action_start_action_listen_are_not_dumped():
events = [
ActionExecuted(ACTION_SESSION_START_NAME),
UserUttered("Hello", {"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
]
tracker = DialogueStateTracker.from_events("default", events)
dump = YAMLStoryWriter().dumps(tracker.as_story().story_steps)
assert ACTION_SESSION_START_NAME not in dump
assert ACTION_LISTEN_NAME not in dump
def test_yaml_writer_stories_to_yaml(domain: Domain):
reader = YAMLStoryReader(domain, None)
writer = YAMLStoryWriter()
steps = reader.read_from_file(
"data/test_yaml_stories/simple_story_with_only_end.yml"
)
result = writer.stories_to_yaml(steps)
assert isinstance(result, OrderedDict)
assert "stories" in result
assert len(result["stories"]) == 1
def test_yaml_writer_stories_to_yaml_with_null_entities(domain: Domain):
writer = YAMLStoryWriter()
stories = textwrap.dedent(
"""
version: "2.0"
stories:
- story: happy path
steps:
- intent: test_intent
entities:
- test_entity: null
- test_entity2: false
"""
)
stories_yaml = YAMLStoryReader().read_from_string(stories)
result = writer.stories_to_yaml(stories_yaml)
assert isinstance(result, OrderedDict)
assert "stories" in result
assert len(result["stories"]) == 1
entities = result["stories"][0]["steps"][0]["entities"]
assert entities[0] == "test_entity"
assert entities[1] == OrderedDict({"test_entity2": False})
def test_writing_end_to_end_stories(domain: Domain):
story_name = "test_writing_end_to_end_stories"
events = [
# Training story story with intent and action labels
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(intent={"name": "greet"}),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
# Prediction story story with intent and action labels
ActionExecuted(ACTION_LISTEN_NAME),
UserUttered(text="Hi", intent={"name": "greet"}),
DefinePrevUserUtteredFeaturization(use_text_for_featurization=False),
ActionExecuted("utter_greet"),
ActionExecuted(ACTION_LISTEN_NAME),
# End-To-End Training Story
UserUttered(text="Hi"),
ActionExecuted(action_text="Hi, I'm a bot."),
ActionExecuted(ACTION_LISTEN_NAME),
# End-To-End Prediction Story
UserUttered("Hi", intent={"name": "greet"}),
DefinePrevUserUtteredFeaturization(use_text_for_featurization=True),
ActionExecuted(action_text="Hi, I'm a bot."),
ActionExecuted(ACTION_LISTEN_NAME),
]
tracker = DialogueStateTracker.from_events(story_name, events)
dump = YAMLStoryWriter().dumps(tracker.as_story().story_steps)
assert (
dump.strip()
== textwrap.dedent(
f"""
version: "2.0"
stories:
- story: {story_name}
steps:
- intent: greet
- action: utter_greet
- intent: greet
- action: utter_greet
- user: |-
Hi
- bot: Hi, I'm a bot.
- user: |-
Hi
- bot: Hi, I'm a bot.
"""
).strip()
)
def test_reading_and_writing_end_to_end_stories_in_test_mode(domain: Domain):
story_name = "test_writing_end_to_end_stories_in_test_mode"
conversation_tests = f"""
stories:
- story: {story_name}
steps:
- intent: greet
user: Hi
- action: utter_greet
- intent: greet
user: |
[Hi](test)
- action: utter_greet
- user: Hi
- bot: Hi, I'm a bot.
- user: |
[Hi](test)
- bot: Hi, I'm a bot.
"""
end_to_end_tests = YAMLStoryReader().read_from_string(conversation_tests)
dump = YAMLStoryWriter().dumps(end_to_end_tests, is_test_story=True)
assert (
dump.strip()
== textwrap.dedent(
f"""
version: "2.0"
stories:
- story: {story_name}
steps:
- intent: greet
user: |-
Hi
- action: utter_greet
- intent: greet
user: |-
[Hi](test)
- action: utter_greet
- user: |-
Hi
- bot: Hi, I'm a bot.
- user: |-
[Hi](test)
- bot: Hi, I'm a bot.
"""
).strip()
)
| 29.236025
| 86
| 0.65955
|
8c1c228d2c6eadae95da83ad7d27e785e3dfc00b
| 40,667
|
py
|
Python
|
k8s/images/codalab/apps/web/migrations/0001_initial.py
|
abdulari/codalab-competitions
|
fdfbb77ac62d56c6b4b9439935037f97ffcd1423
|
[
"Apache-2.0"
] | 2
|
2021-12-20T11:39:03.000Z
|
2021-12-20T11:39:06.000Z
|
k8s/images/codalab/apps/web/migrations/0001_initial.py
|
abdulari/codalab-competitions
|
fdfbb77ac62d56c6b4b9439935037f97ffcd1423
|
[
"Apache-2.0"
] | 2
|
2022-01-27T22:35:11.000Z
|
2022-02-09T21:48:23.000Z
|
k8s/images/codalab/apps/web/migrations/0001_initial.py
|
abdulari/codalab-competitions
|
fdfbb77ac62d56c6b4b9439935037f97ffcd1423
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-07-16 18:50
from __future__ import unicode_literals
import apps.web.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
import mptt.fields
import s3direct.fields
import storages.backends.s3boto3
class Migration(migrations.Migration):
initial = True
dependencies = [
('teams', '0001_initial'),
('queues', '0001_initial'),
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Competition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('chahub_timestamp', models.DateTimeField(blank=True, null=True)),
('chahub_data_hash', models.TextField(blank=True, null=True)),
('chahub_needs_retry', models.BooleanField(default=False)),
('title', models.CharField(max_length=100)),
('description', models.TextField(blank=True, null=True)),
('url_redirect', models.URLField(blank=True, help_text='(NOTE: You should not have Registration Required above checked if using URL redirection, because upon redirect participants will not be approved and unable to participate.)', null=True, verbose_name='URL Redirect')),
('image', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-public'), upload_to=apps.web.models._uuidify('logos'), verbose_name='Logo')),
('image_url_base', models.CharField(max_length=255)),
('has_registration', models.BooleanField(default=False, verbose_name='Registration Required')),
('start_date', models.DateTimeField(blank=True, null=True, verbose_name='Start Date (UTC)')),
('end_date', models.DateTimeField(blank=True, null=True, verbose_name='End Date (UTC)')),
('last_modified', models.DateTimeField(auto_now_add=True)),
('published', models.BooleanField(default=False, verbose_name='Publicly Available')),
('last_phase_migration', models.PositiveIntegerField(default=1)),
('is_migrating', models.BooleanField(default=False)),
('force_submission_to_leaderboard', models.BooleanField(default=False)),
('disallow_leaderboard_modifying', models.BooleanField(default=False)),
('secret_key', django_extensions.db.fields.UUIDField(blank=True, editable=False)),
('enable_medical_image_viewer', models.BooleanField(default=False)),
('enable_detailed_results', models.BooleanField(default=False)),
('original_yaml_file', models.TextField(blank=True, default='', null=True)),
('show_datasets_from_yaml', models.BooleanField(default=True)),
('reward', models.PositiveIntegerField(blank=True, null=True)),
('is_migrating_delayed', models.BooleanField(default=False)),
('allow_teams', models.BooleanField(default=False)),
('enable_per_submission_metadata', models.BooleanField(default=False, help_text='(Team name, Method name, Method description, etc.)')),
('allow_public_submissions', models.BooleanField(default=False, verbose_name='Allow sharing of public submissions')),
('enable_forum', models.BooleanField(default=False)),
('anonymous_leaderboard', models.BooleanField(default=False)),
('enable_teams', models.BooleanField(default=False, verbose_name='Enable Competition level teams')),
('require_team_approval', models.BooleanField(default=True, verbose_name='Organizers need to approve the new teams')),
('hide_top_three', models.BooleanField(default=False, verbose_name='Hide Top Three Leaderboard')),
('hide_chart', models.BooleanField(default=False, verbose_name='Hide Chart')),
('allow_organizer_teams', models.BooleanField(default=False, verbose_name='Allow Organizer Teams')),
('competition_docker_image', models.CharField(blank=True, default='', max_length=128)),
('deleted', models.BooleanField(default=False)),
('admins', models.ManyToManyField(blank=True, null=True, related_name='competition_admins', to=settings.AUTH_USER_MODEL)),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='competitioninfo_creator', to=settings.AUTH_USER_MODEL)),
('modified_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='competitioninfo_modified_by', to=settings.AUTH_USER_MODEL)),
('queue', models.ForeignKey(blank=True, help_text="(don't change this unless you have a reason to, default/empty is fine)", null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='competitions', to='queues.Queue')),
('teams', models.ManyToManyField(blank=True, null=True, related_name='competition_teams', to='teams.Team')),
],
options={
'ordering': ['end_date'],
'permissions': (('is_owner', 'Owner'), ('can_edit', 'Edit')),
},
),
migrations.CreateModel(
name='CompetitionDefBundle',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('config_bundle', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('competition-bundles'))),
('s3_config_bundle', s3direct.fields.S3DirectField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='owner', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='CompetitionDump',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.DateTimeField(auto_now_add=True)),
('status', models.CharField(default='Starting', max_length=64)),
('data_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('competition_dump'), verbose_name='Data file')),
('competition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='dumps', to='web.Competition')),
],
),
migrations.CreateModel(
name='CompetitionParticipant',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('reason', models.CharField(blank=True, max_length=100, null=True)),
('deleted', models.BooleanField(default=False)),
('competition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='participants', to='web.Competition')),
],
),
migrations.CreateModel(
name='CompetitionPhase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(blank=True, max_length=1000, null=True)),
('phasenumber', models.PositiveIntegerField(verbose_name='Number')),
('label', models.CharField(blank=True, max_length=50, verbose_name='Name')),
('start_date', models.DateTimeField(verbose_name='Start Date (UTC)')),
('max_submissions', models.PositiveIntegerField(default=100, verbose_name='Maximum Submissions (per User)')),
('max_submissions_per_day', models.PositiveIntegerField(default=999, verbose_name='Max Submissions (per User) per day')),
('is_scoring_only', models.BooleanField(default=True, verbose_name='Results Scoring Only')),
('scoring_program', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('phase_scoring_program_file'), verbose_name='Scoring Program')),
('reference_data', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('phase_reference_data_file'), verbose_name='Reference Data')),
('input_data', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('phase_input_data_file'), verbose_name='Input Data')),
('leaderboard_management_mode', models.CharField(default='default', max_length=50, verbose_name='Leaderboard Mode')),
('force_best_submission_to_leaderboard', models.BooleanField(default=False, verbose_name='If submission beats old score, put submission on leaderboard')),
('auto_migration', models.BooleanField(default=False)),
('is_migrated', models.BooleanField(default=False)),
('execution_time_limit', models.PositiveIntegerField(default=300, verbose_name='Execution time limit (in seconds)')),
('color', models.CharField(blank=True, choices=[('white', 'White'), ('orange', 'Orange'), ('yellow', 'Yellow'), ('green', 'Green'), ('blue', 'Blue'), ('purple', 'Purple')], max_length=24, null=True)),
('phase_never_ends', models.BooleanField(default=False)),
('scoring_program_docker_image', models.CharField(blank=True, default='', max_length=128)),
('default_docker_image', models.CharField(blank=True, default='', max_length=128)),
('disable_custom_docker_image', models.BooleanField(default=True)),
('starting_kit', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('starting_kit'), verbose_name='Starting Kit')),
('public_data', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('public_data'), verbose_name='Public Data')),
('ingestion_program', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('ingestion_program'))),
('ingestion_program_docker_image', models.CharField(blank=True, default='', max_length=128)),
('competition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='phases', to='web.Competition')),
],
options={
'ordering': ['phasenumber'],
},
),
migrations.CreateModel(
name='CompetitionSubmission',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('chahub_timestamp', models.DateTimeField(blank=True, null=True)),
('chahub_data_hash', models.TextField(blank=True, null=True)),
('chahub_needs_retry', models.BooleanField(default=False)),
('secret', models.CharField(blank=True, default='', max_length=128)),
('docker_image', models.CharField(blank=True, default='', max_length=128)),
('file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('submission_file_name'))),
('s3_file', s3direct.fields.S3DirectField(blank=True, null=True)),
('file_url_base', models.CharField(blank=True, max_length=2000)),
('readable_filename', models.TextField(blank=True, null=True)),
('description', models.CharField(blank=True, max_length=256)),
('inputfile', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('submission_inputfile'))),
('runfile', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('submission_runfile'))),
('submitted_at', models.DateTimeField(auto_now_add=True)),
('started_at', models.DateTimeField(blank=True, null=True)),
('completed_at', models.DateTimeField(blank=True, null=True)),
('execution_key', models.TextField(blank=True, default='')),
('status_details', models.CharField(blank=True, max_length=100, null=True)),
('submission_number', models.PositiveIntegerField(default=0)),
('output_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('submission_output'))),
('private_output_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('submission_private_output'))),
('stdout_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('submission_stdout'))),
('stderr_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('submission_stderr'))),
('history_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('submission_history'))),
('scores_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('submission_scores'))),
('coopetition_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('submission_coopetition'))),
('detailed_results_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('submission_detailed_results'))),
('prediction_runfile', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('submission_prediction_runfile'))),
('prediction_output_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('submission_prediction_output'))),
('exception_details', models.TextField(blank=True, null=True)),
('prediction_stdout_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('predict_submission_stdout'))),
('prediction_stderr_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('predict_submission_stderr'))),
('ingestion_program_stdout_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('predict_submission_stdout'))),
('ingestion_program_stderr_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('predict_submission_stderr'))),
('method_name', models.CharField(blank=True, max_length=20, null=True)),
('method_description', models.TextField(blank=True, null=True)),
('project_url', models.URLField(blank=True, null=True)),
('publication_url', models.URLField(blank=True, null=True)),
('bibtex', models.TextField(blank=True, null=True)),
('organization_or_affiliation', models.CharField(blank=True, max_length=255, null=True)),
('team_name', models.CharField(blank=True, max_length=64, null=True)),
('is_public', models.BooleanField(default=False)),
('when_made_public', models.DateTimeField(blank=True, null=True)),
('when_unmade_public', models.DateTimeField(blank=True, null=True)),
('download_count', models.IntegerField(default=0)),
('like_count', models.IntegerField(default=0)),
('dislike_count', models.IntegerField(default=0)),
('is_migrated', models.BooleanField(default=False)),
('queue_name', models.TextField(blank=True, null=True)),
('participant', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='submissions', to='web.CompetitionParticipant')),
('phase', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='submissions', to='web.CompetitionPhase')),
],
),
migrations.CreateModel(
name='CompetitionSubmissionMetadata',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_predict', models.BooleanField(default=False)),
('is_scoring', models.BooleanField(default=False)),
('hostname', models.CharField(blank=True, max_length=255, null=True)),
('processes_running_in_temp_dir', models.TextField(blank=True, null=True)),
('ingestion_program_duration', models.TextField(blank=True, null=True)),
('beginning_virtual_memory_usage', models.TextField(blank=True, null=True)),
('beginning_swap_memory_usage', models.TextField(blank=True, null=True)),
('beginning_cpu_usage', models.TextField(blank=True, null=True)),
('end_virtual_memory_usage', models.TextField(blank=True, null=True)),
('end_swap_memory_usage', models.TextField(blank=True, null=True)),
('end_cpu_usage', models.TextField(blank=True, null=True)),
('submission', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='metadatas', to='web.CompetitionSubmission')),
],
),
migrations.CreateModel(
name='CompetitionSubmissionStatus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('codename', models.SlugField(max_length=20, unique=True)),
],
),
migrations.CreateModel(
name='ContentCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('codename', models.SlugField(max_length=100, unique=True)),
('is_menu', models.BooleanField(default=True)),
('content_limit', models.PositiveIntegerField(default=1)),
('lft', models.PositiveIntegerField(db_index=True, editable=False)),
('rght', models.PositiveIntegerField(db_index=True, editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
('level', models.PositiveIntegerField(db_index=True, editable=False)),
('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='web.ContentCategory')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ContentVisibility',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('codename', models.SlugField(max_length=20, unique=True)),
('classname', models.CharField(blank=True, max_length=30, null=True)),
],
),
migrations.CreateModel(
name='Dataset',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('description', models.TextField()),
('number', models.PositiveIntegerField(default=1)),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='datasets', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['number'],
},
),
migrations.CreateModel(
name='DefaultContentItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('label', models.CharField(max_length=100)),
('codename', models.SlugField(max_length=100, unique=True)),
('rank', models.IntegerField(default=0)),
('required', models.BooleanField(default=False)),
('category', mptt.fields.TreeForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.ContentCategory')),
('initial_visibility', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.ContentVisibility')),
],
),
migrations.CreateModel(
name='ExternalFile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('source_url', models.URLField()),
('source_address_info', models.CharField(blank=True, max_length=200)),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ExternalFileSource',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('codename', models.SlugField(unique=True)),
('service_url', models.URLField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='ExternalFileType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('codename', models.SlugField(max_length=20, unique=True)),
],
),
migrations.CreateModel(
name='OrganizerDataSet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('full_name', models.TextField(default='')),
('type', models.CharField(choices=[('Reference Data', 'Reference Data'), ('Scoring Program', 'Scoring Program'), ('Input Data', 'Input Data'), ('Ingestion Program', 'Ingestion Program'), ('Starting Kit', 'Starting Kit'), ('Public Data', 'Public Data'), ('None', 'None')], default='None', max_length=64)),
('description', models.TextField(blank=True, null=True)),
('data_file', models.FileField(blank=True, null=True, storage=storages.backends.s3boto3.S3Boto3Storage(bucket='tthomas-codalab-private'), upload_to=apps.web.models._uuidify('dataset_data_file'), verbose_name='Data file')),
('key', django_extensions.db.fields.UUIDField(blank=True, editable=False)),
('sub_data_files', models.ManyToManyField(blank=True, null=True, to='web.OrganizerDataSet', verbose_name='Bundle of data files')),
('uploaded_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Page',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('codename', models.SlugField(max_length=100)),
('title', models.CharField(blank=True, max_length=100, null=True)),
('label', models.CharField(max_length=100, verbose_name='Title')),
('rank', models.IntegerField(default=0, verbose_name='Order')),
('visibility', models.BooleanField(default=True, verbose_name='Visible')),
('markup', models.TextField(blank=True)),
('html', models.TextField(blank=True, verbose_name='Content')),
('category', mptt.fields.TreeForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.ContentCategory')),
('competition', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='pages', to='web.Competition')),
],
options={
'ordering': ['category', 'rank'],
},
),
migrations.CreateModel(
name='PageContainer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=200)),
('object_id', models.PositiveIntegerField(db_index=True)),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
],
),
migrations.CreateModel(
name='ParticipantStatus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('codename', models.CharField(max_length=30, unique=True)),
('description', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='PhaseLeaderBoard',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('phase', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='board', to='web.CompetitionPhase')),
],
),
migrations.CreateModel(
name='PhaseLeaderBoardEntry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('board', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='entries', to='web.PhaseLeaderBoard')),
('result', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='leaderboard_entry_result', to='web.CompetitionSubmission')),
],
),
migrations.CreateModel(
name='SubmissionComputedScore',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('operation', models.CharField(choices=[('Max', 'Max'), ('Avg', 'Average')], max_length=10)),
],
),
migrations.CreateModel(
name='SubmissionComputedScoreField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('computed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='fields', to='web.SubmissionComputedScore')),
],
),
migrations.CreateModel(
name='SubmissionResultGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.CharField(max_length=50)),
('label', models.CharField(max_length=50)),
('ordering', models.PositiveIntegerField(default=1)),
('competition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.Competition')),
],
options={
'ordering': ['ordering'],
},
),
migrations.CreateModel(
name='SubmissionResultGroupPhase',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.SubmissionResultGroup')),
('phase', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.CompetitionPhase')),
],
),
migrations.CreateModel(
name='SubmissionScore',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('value', models.DecimalField(decimal_places=10, max_digits=20)),
('result', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='scores', to='web.CompetitionSubmission')),
],
),
migrations.CreateModel(
name='SubmissionScoreDef',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.CharField(db_index=True, max_length=50)),
('label', models.CharField(max_length=50)),
('sorting', models.SlugField(choices=[('asc', 'Ascending'), ('desc', 'Descending')], default='asc', max_length=20)),
('numeric_format', models.CharField(blank=True, max_length=20, null=True)),
('show_rank', models.BooleanField(default=False)),
('selection_default', models.IntegerField(default=0)),
('computed', models.BooleanField(default=False)),
('ordering', models.PositiveIntegerField(default=1)),
('competition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.Competition')),
],
),
migrations.CreateModel(
name='SubmissionScoreDefGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.SubmissionResultGroup')),
('scoredef', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.SubmissionScoreDef')),
],
),
migrations.CreateModel(
name='SubmissionScoreSet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.CharField(max_length=50)),
('label', models.CharField(max_length=50)),
('ordering', models.PositiveIntegerField(default=1)),
('lft', models.PositiveIntegerField(db_index=True, editable=False)),
('rght', models.PositiveIntegerField(db_index=True, editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
('level', models.PositiveIntegerField(db_index=True, editable=False)),
('competition', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.Competition')),
('parent', mptt.fields.TreeForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='children', to='web.SubmissionScoreSet')),
('scoredef', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='web.SubmissionScoreDef')),
],
),
migrations.AddField(
model_name='submissionscoredef',
name='groups',
field=models.ManyToManyField(through='web.SubmissionScoreDefGroup', to='web.SubmissionResultGroup'),
),
migrations.AddField(
model_name='submissionscore',
name='scoredef',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.SubmissionScoreDef'),
),
migrations.AddField(
model_name='submissionresultgroup',
name='phases',
field=models.ManyToManyField(through='web.SubmissionResultGroupPhase', to='web.CompetitionPhase'),
),
migrations.AddField(
model_name='submissioncomputedscorefield',
name='scoredef',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.SubmissionScoreDef'),
),
migrations.AddField(
model_name='submissioncomputedscore',
name='scoredef',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='computed_score', to='web.SubmissionScoreDef'),
),
migrations.AddField(
model_name='page',
name='container',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='pages', to='web.PageContainer', verbose_name='Page Container'),
),
migrations.AddField(
model_name='page',
name='defaults',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='web.DefaultContentItem'),
),
migrations.AddField(
model_name='externalfile',
name='type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.ExternalFileType'),
),
migrations.AddField(
model_name='dataset',
name='datafile',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.ExternalFile'),
),
migrations.AddField(
model_name='contentcategory',
name='visibility',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.ContentVisibility'),
),
migrations.AddField(
model_name='competitionsubmission',
name='status',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.CompetitionSubmissionStatus'),
),
migrations.AddField(
model_name='competitionsubmission',
name='team',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='team', to='teams.Team'),
),
migrations.AddField(
model_name='competitionphase',
name='datasets',
field=models.ManyToManyField(blank=True, related_name='phase', to='web.Dataset'),
),
migrations.AddField(
model_name='competitionphase',
name='ingestion_program_organizer_dataset',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='ingestion_program_organizer_dataset', to='web.OrganizerDataSet'),
),
migrations.AddField(
model_name='competitionphase',
name='input_data_organizer_dataset',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='input_data_organizer_dataset', to='web.OrganizerDataSet', verbose_name='Input Data'),
),
migrations.AddField(
model_name='competitionphase',
name='public_data_organizer_dataset',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='public_data_organizer_dataset', to='web.OrganizerDataSet', verbose_name='Public Data'),
),
migrations.AddField(
model_name='competitionphase',
name='reference_data_organizer_dataset',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='reference_data_organizer_dataset', to='web.OrganizerDataSet', verbose_name='Reference Data'),
),
migrations.AddField(
model_name='competitionphase',
name='scoring_program_organizer_dataset',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='scoring_program_organizer_dataset', to='web.OrganizerDataSet', verbose_name='Scoring Program'),
),
migrations.AddField(
model_name='competitionphase',
name='starting_kit_organizer_dataset',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='starting_kit_organizer_dataset', to='web.OrganizerDataSet', verbose_name='Starting Kit'),
),
migrations.AddField(
model_name='competitionparticipant',
name='status',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='web.ParticipantStatus'),
),
migrations.AddField(
model_name='competitionparticipant',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='participation', to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='submissionscoreset',
unique_together=set([('key', 'competition')]),
),
migrations.AlterUniqueTogether(
name='submissionscoredefgroup',
unique_together=set([('scoredef', 'group')]),
),
migrations.AlterUniqueTogether(
name='submissionscoredef',
unique_together=set([('key', 'competition')]),
),
migrations.AlterUniqueTogether(
name='submissionscore',
unique_together=set([('result', 'scoredef')]),
),
migrations.AlterUniqueTogether(
name='submissionresultgroupphase',
unique_together=set([('group', 'phase')]),
),
migrations.AlterUniqueTogether(
name='phaseleaderboardentry',
unique_together=set([('board', 'result')]),
),
migrations.AlterUniqueTogether(
name='pagecontainer',
unique_together=set([('object_id', 'content_type')]),
),
migrations.AlterUniqueTogether(
name='page',
unique_together=set([('label', 'category', 'container')]),
),
migrations.AlterUniqueTogether(
name='competitionsubmission',
unique_together=set([('submission_number', 'phase', 'participant')]),
),
migrations.AlterUniqueTogether(
name='competitionparticipant',
unique_together=set([('user', 'competition')]),
),
]
| 68.347899
| 320
| 0.638773
|
862eac2deade1010bc4f3b878e1ec9fb215225b6
| 360
|
py
|
Python
|
backend/database/user/migrations/0004_auto_20200227_2017.py
|
HansErikHeum/tdt4140_2020_gruppe42
|
a916b5a003a174a343e0625ca501b542907cda72
|
[
"MIT"
] | null | null | null |
backend/database/user/migrations/0004_auto_20200227_2017.py
|
HansErikHeum/tdt4140_2020_gruppe42
|
a916b5a003a174a343e0625ca501b542907cda72
|
[
"MIT"
] | null | null | null |
backend/database/user/migrations/0004_auto_20200227_2017.py
|
HansErikHeum/tdt4140_2020_gruppe42
|
a916b5a003a174a343e0625ca501b542907cda72
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.3 on 2020-02-27 19:17
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('user', '0003_auto_20200227_2014'),
]
operations = [
migrations.RenameField(
model_name='kalas',
old_name='zipCode',
new_name='postal',
),
]
| 18.947368
| 47
| 0.580556
|
3630bb0baea2cf0372e579317d83d3f92f499b67
| 34,399
|
py
|
Python
|
bin/storm.py
|
Amauris86/Storm
|
d53615369b03a03362c629d9b3dc5fabfccf50ac
|
[
"Apache-2.0"
] | 3
|
2017-09-05T12:00:41.000Z
|
2018-11-27T03:38:46.000Z
|
bin/storm.py
|
Amauris86/Storm
|
d53615369b03a03362c629d9b3dc5fabfccf50ac
|
[
"Apache-2.0"
] | null | null | null |
bin/storm.py
|
Amauris86/Storm
|
d53615369b03a03362c629d9b3dc5fabfccf50ac
|
[
"Apache-2.0"
] | 1
|
2020-11-24T17:33:33.000Z
|
2020-11-24T17:33:33.000Z
|
#!/usr/bin/env python
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import random
import re
import shlex
import tempfile
import uuid
import subprocess as sub
import json
import sys
try:
# python 3
from urllib.parse import quote_plus
except ImportError:
# python 2
from urllib import quote_plus
try:
# python 3
import configparser
except ImportError:
# python 2
import ConfigParser as configparser
def is_windows():
return sys.platform.startswith('win')
def identity(x):
return x
def cygpath(x):
command = ["cygpath", "-wp", x]
p = sub.Popen(command,stdout=sub.PIPE)
output, errors = p.communicate()
lines = output.split(os.linesep)
return lines[0]
def init_storm_env():
global CLUSTER_CONF_DIR
ini_file = os.path.join(CLUSTER_CONF_DIR, 'storm_env.ini')
if not os.path.isfile(ini_file):
return
config = configparser.ConfigParser()
config.optionxform = str
config.read(ini_file)
options = config.options('environment')
for option in options:
value = config.get('environment', option)
os.environ[option] = value
normclasspath = cygpath if sys.platform == 'cygwin' else identity
STORM_DIR = os.sep.join(os.path.realpath( __file__ ).split(os.sep)[:-2])
USER_CONF_DIR = os.path.expanduser("~" + os.sep + ".storm")
STORM_CONF_DIR = os.getenv('STORM_CONF_DIR', None)
if STORM_CONF_DIR == None:
CLUSTER_CONF_DIR = os.path.join(STORM_DIR, "conf")
else:
CLUSTER_CONF_DIR = STORM_CONF_DIR
if (not os.path.isfile(os.path.join(USER_CONF_DIR, "storm.yaml"))):
USER_CONF_DIR = CLUSTER_CONF_DIR
STORM_LIB_DIR = os.path.join(STORM_DIR, "lib")
STORM_BIN_DIR = os.path.join(STORM_DIR, "bin")
STORM_LOG4J2_CONF_DIR = os.path.join(STORM_DIR, "log4j2")
STORM_SUPERVISOR_LOG_FILE = os.getenv('STORM_SUPERVISOR_LOG_FILE', "supervisor.log")
init_storm_env()
CONFIG_OPTS = []
CONFFILE = ""
JAR_JVM_OPTS = shlex.split(os.getenv('STORM_JAR_JVM_OPTS', ''))
JAVA_HOME = os.getenv('JAVA_HOME', None)
JAVA_CMD = 'java' if not JAVA_HOME else os.path.join(JAVA_HOME, 'bin', 'java')
if JAVA_HOME and not os.path.exists(JAVA_CMD):
print("ERROR: JAVA_HOME is invalid. Could not find bin/java at %s." % JAVA_HOME)
sys.exit(1)
STORM_EXT_CLASSPATH = os.getenv('STORM_EXT_CLASSPATH', None)
STORM_EXT_CLASSPATH_DAEMON = os.getenv('STORM_EXT_CLASSPATH_DAEMON', None)
DEP_JARS_OPTS = []
DEP_ARTIFACTS_OPTS = []
def get_config_opts():
global CONFIG_OPTS
return "-Dstorm.options=" + ','.join(map(quote_plus,CONFIG_OPTS))
if not os.path.exists(STORM_LIB_DIR):
print("******************************************")
print("The storm client can only be run from within a release. You appear to be trying to run the client from a checkout of Storm's source code.")
print("\nYou can download a Storm release at http://storm.apache.org/downloads.html")
print("******************************************")
sys.exit(1)
def get_jars_full(adir):
files = []
if os.path.isdir(adir):
files = os.listdir(adir)
elif os.path.exists(adir):
files = [adir]
ret = []
for f in files:
if f.endswith(".jar"):
ret.append(os.path.join(adir, f))
return ret
def get_classpath(extrajars, daemon=True):
ret = get_jars_full(STORM_DIR)
ret.extend(get_jars_full(STORM_DIR + "/lib"))
ret.extend(get_jars_full(STORM_DIR + "/extlib"))
if daemon:
ret.extend(get_jars_full(STORM_DIR + "/extlib-daemon"))
if STORM_EXT_CLASSPATH != None:
for path in STORM_EXT_CLASSPATH.split(os.pathsep):
ret.extend(get_jars_full(path))
if daemon and STORM_EXT_CLASSPATH_DAEMON != None:
for path in STORM_EXT_CLASSPATH_DAEMON.split(os.pathsep):
ret.extend(get_jars_full(path))
ret.extend(extrajars)
return normclasspath(os.pathsep.join(ret))
def confvalue(name, extrapaths, daemon=True):
global CONFFILE
command = [
JAVA_CMD, "-client", get_config_opts(), "-Dstorm.conf.file=" + CONFFILE,
"-cp", get_classpath(extrapaths, daemon), "org.apache.storm.command.ConfigValue", name
]
p = sub.Popen(command, stdout=sub.PIPE)
output, errors = p.communicate()
# python 3
if not isinstance(output, str):
output = output.decode('utf-8')
lines = output.split(os.linesep)
for line in lines:
tokens = line.split(" ")
if tokens[0] == "VALUE:":
return " ".join(tokens[1:])
return ""
def resolve_dependencies(artifacts):
if len(artifacts) == 0:
return {}
print("Resolving dependencies on demand: artifacts (%s)" % artifacts)
sys.stdout.flush()
# TODO: should we move some external modules to outer place?
# storm-submit module doesn't rely on storm-core and relevant libs
extrajars = get_jars_full(STORM_DIR + "/external/storm-submit-tools")
classpath = normclasspath(os.pathsep.join(extrajars))
command = [
JAVA_CMD, "-client", "-cp", classpath, "org.apache.storm.submit.command.DependencyResolverMain",
",".join(artifacts)
]
p = sub.Popen(command, stdout=sub.PIPE)
output, errors = p.communicate()
if p.returncode != 0:
raise RuntimeError("dependency handler returns non-zero code: code<%s> syserr<%s>" % (p.returncode, errors))
# python 3
if not isinstance(output, str):
output = output.decode('utf-8')
# For debug purpose, uncomment when you need to debug DependencyResolver
# print("Resolved dependencies: %s" % output)
try:
out_dict = json.loads(output)
return out_dict
except:
raise RuntimeError("dependency handler returns non-json response: sysout<%s>", output)
def print_localconfvalue(name):
"""Syntax: [storm localconfvalue conf-name]
Prints out the value for conf-name in the local Storm configs.
The local Storm configs are the ones in ~/.storm/storm.yaml merged
in with the configs in defaults.yaml.
"""
print(name + ": " + confvalue(name, [USER_CONF_DIR]))
def print_remoteconfvalue(name):
"""Syntax: [storm remoteconfvalue conf-name]
Prints out the value for conf-name in the cluster's Storm configs.
The cluster's Storm configs are the ones in $STORM-PATH/conf/storm.yaml
merged in with the configs in defaults.yaml.
This command must be run on a cluster machine.
"""
print(name + ": " + confvalue(name, [CLUSTER_CONF_DIR]))
def parse_args(string):
"""Takes a string of whitespace-separated tokens and parses it into a list.
Whitespace inside tokens may be quoted with single quotes, double quotes or
backslash (similar to command-line arguments in bash).
>>> parse_args(r'''"a a" 'b b' c\ c "d'd" 'e"e' 'f\'f' "g\"g" "i""i" 'j''j' k" "k l' l' mm n\\n''')
['a a', 'b b', 'c c', "d'd", 'e"e', "f'f", 'g"g', 'ii', 'jj', 'k k', 'l l', 'mm', r'n\n']
"""
re_split = re.compile(r'''((?:
[^\s"'\\] |
"(?: [^"\\] | \\.)*" |
'(?: [^'\\] | \\.)*' |
\\.
)+)''', re.VERBOSE)
args = re_split.split(string)[1::2]
args = [re.compile(r'"((?:[^"\\]|\\.)*)"').sub('\\1', x) for x in args]
args = [re.compile(r"'((?:[^'\\]|\\.)*)'").sub('\\1', x) for x in args]
return [re.compile(r'\\(.)').sub('\\1', x) for x in args]
def exec_storm_class(klass, jvmtype="-server", jvmopts=[], extrajars=[], args=[], fork=False, daemon=True, daemonName=""):
global CONFFILE
storm_log_dir = confvalue("storm.log.dir",[CLUSTER_CONF_DIR])
if(storm_log_dir == None or storm_log_dir == "null"):
storm_log_dir = os.path.join(STORM_DIR, "logs")
all_args = [
JAVA_CMD, jvmtype,
"-Ddaemon.name=" + daemonName,
get_config_opts(),
"-Dstorm.home=" + STORM_DIR,
"-Dstorm.log.dir=" + storm_log_dir,
"-Djava.library.path=" + confvalue("java.library.path", extrajars, daemon),
"-Dstorm.conf.file=" + CONFFILE,
"-cp", get_classpath(extrajars, daemon),
] + jvmopts + [klass] + list(args)
print("Running: " + " ".join(all_args))
sys.stdout.flush()
exit_code = 0
if fork:
exit_code = os.spawnvp(os.P_WAIT, JAVA_CMD, all_args)
elif is_windows():
# handling whitespaces in JAVA_CMD
try:
ret = sub.check_output(all_args, stderr=sub.STDOUT)
print(ret)
except sub.CalledProcessor as e:
sys.exit(e.returncode)
else:
os.execvp(JAVA_CMD, all_args)
return exit_code
def jar(jarfile, klass, *args):
"""Syntax: [storm jar topology-jar-path class ...]
Runs the main method of class with the specified arguments.
The storm jars and configs in ~/.storm are put on the classpath.
The process is configured so that StormSubmitter
(http://storm.apache.org/releases/current/javadocs/org/apache/storm/StormSubmitter.html)
will upload the jar at topology-jar-path when the topology is submitted.
When you want to ship other jars which is not included to application jar, you can pass them to --jars option with comma-separated string.
For example, --jars "your-local-jar.jar,your-local-jar2.jar" will load your-local-jar.jar and your-local-jar2.jar.
And when you want to ship maven artifacts and its transitive dependencies, you can pass them to --artifacts with comma-separated string.
You can also exclude some dependencies like what you're doing in maven pom.
Please add exclusion artifacts with '^' separated string after the artifact.
For example, --artifacts "redis.clients:jedis:2.9.0,org.apache.kafka:kafka_2.10:0.8.2.2^org.slf4j:slf4j-log4j12" will load jedis and kafka artifact and all of transitive dependencies but exclude slf4j-log4j12 from kafka.
Complete example of both options is here: `./bin/storm jar example/storm-starter/storm-starter-topologies-*.jar org.apache.storm.starter.RollingTopWords blobstore-remote2 remote --jars "./external/storm-redis/storm-redis-1.1.0.jar,./external/storm-kafka/storm-kafka-1.1.0.jar" --artifacts "redis.clients:jedis:2.9.0,org.apache.kafka:kafka_2.10:0.8.2.2^org.slf4j:slf4j-log4j12"`
When you pass jars and/or artifacts options, StormSubmitter will upload them when the topology is submitted, and they will be included to classpath of both the process which runs the class, and also workers for that topology.
"""
global DEP_JARS_OPTS, DEP_ARTIFACTS_OPTS
local_jars = DEP_JARS_OPTS
artifact_to_file_jars = resolve_dependencies(DEP_ARTIFACTS_OPTS)
transform_class = confvalue("client.jartransformer.class", [CLUSTER_CONF_DIR])
if (transform_class != None and transform_class != "null"):
tmpjar = os.path.join(tempfile.gettempdir(), uuid.uuid1().hex+".jar")
exec_storm_class("org.apache.storm.daemon.ClientJarTransformerRunner", args=[transform_class, jarfile, tmpjar], fork=True, daemon=False)
extra_jars = [tmpjar, USER_CONF_DIR, STORM_BIN_DIR]
extra_jars.extend(local_jars)
extra_jars.extend(artifact_to_file_jars.values())
topology_runner_exit_code = exec_storm_class(
klass,
jvmtype="-client",
extrajars=extra_jars,
args=args,
daemon=False,
fork=True,
jvmopts=JAR_JVM_OPTS + ["-Dstorm.jar=" + tmpjar] +
["-Dstorm.dependency.jars=" + ",".join(local_jars)] +
["-Dstorm.dependency.artifacts=" + json.dumps(artifact_to_file_jars)])
os.remove(tmpjar)
sys.exit(topology_runner_exit_code)
else:
extra_jars=[jarfile, USER_CONF_DIR, STORM_BIN_DIR]
extra_jars.extend(local_jars)
extra_jars.extend(artifact_to_file_jars.values())
exec_storm_class(
klass,
jvmtype="-client",
extrajars=extra_jars,
args=args,
daemon=False,
jvmopts=JAR_JVM_OPTS + ["-Dstorm.jar=" + jarfile] +
["-Dstorm.dependency.jars=" + ",".join(local_jars)] +
["-Dstorm.dependency.artifacts=" + json.dumps(artifact_to_file_jars)])
def sql(sql_file, topology_name):
"""Syntax: [storm sql sql-file topology-name], or [storm sql sql-file --explain] when activating explain mode
Compiles the SQL statements into a Trident topology and submits it to Storm.
If user activates explain mode, SQL Runner analyzes each query statement and shows query plan instead of submitting topology.
--jars and --artifacts options available for jar are also applied to sql command.
Please refer "help jar" to see how to use --jars and --artifacts options.
You normally want to pass these options since you need to set data source to your sql which is an external storage in many cases.
"""
global DEP_JARS_OPTS, DEP_ARTIFACTS_OPTS
local_jars = DEP_JARS_OPTS
artifact_to_file_jars = resolve_dependencies(DEP_ARTIFACTS_OPTS)
sql_core_jars = get_jars_full(STORM_DIR + "/external/sql/storm-sql-core")
sql_runtime_jars = get_jars_full(STORM_DIR + "/external/sql/storm-sql-runtime")
# include storm-sql-runtime jar(s) to local jar list
local_jars.extend(sql_runtime_jars)
extrajars=[USER_CONF_DIR, STORM_BIN_DIR]
extrajars.extend(local_jars)
extrajars.extend(artifact_to_file_jars.values())
# include this for running StormSqlRunner, but not for generated topology
extrajars.extend(sql_core_jars)
if topology_name == "--explain":
args = ["--file", sql_file, "--explain"]
else:
args = ["--file", sql_file, "--topology", topology_name]
exec_storm_class(
"org.apache.storm.sql.StormSqlRunner",
jvmtype="-client",
extrajars=extrajars,
args=args,
daemon=False,
jvmopts=["-Dstorm.dependency.jars=" + ",".join(local_jars)] +
["-Dstorm.dependency.artifacts=" + json.dumps(artifact_to_file_jars)])
def kill(*args):
"""Syntax: [storm kill topology-name [-w wait-time-secs]]
Kills the topology with the name topology-name. Storm will
first deactivate the topology's spouts for the duration of
the topology's message timeout to allow all messages currently
being processed to finish processing. Storm will then shutdown
the workers and clean up their state. You can override the length
of time Storm waits between deactivation and shutdown with the -w flag.
"""
if not args:
print_usage(command="kill")
sys.exit(2)
exec_storm_class(
"org.apache.storm.command.KillTopology",
args=args,
jvmtype="-client",
extrajars=[USER_CONF_DIR, STORM_BIN_DIR])
def upload_credentials(*args):
"""Syntax: [storm upload_credentials topology-name [credkey credvalue]*]
Uploads a new set of credentials to a running topology
"""
if not args:
print_usage(command="upload_credentials")
sys.exit(2)
exec_storm_class(
"org.apache.storm.command.UploadCredentials",
args=args,
jvmtype="-client",
extrajars=[USER_CONF_DIR, STORM_BIN_DIR])
def blobstore(*args):
"""Syntax: [storm blobstore cmd]
list [KEY...] - lists blobs currently in the blob store
cat [-f FILE] KEY - read a blob and then either write it to a file, or STDOUT (requires read access).
create [-f FILE] [-a ACL ...] [--replication-factor NUMBER] KEY - create a new blob. Contents comes from a FILE
or STDIN. ACL is in the form [uo]:[username]:[r-][w-][a-] can be comma separated list.
update [-f FILE] KEY - update the contents of a blob. Contents comes from
a FILE or STDIN (requires write access).
delete KEY - delete an entry from the blob store (requires write access).
set-acl [-s ACL] KEY - ACL is in the form [uo]:[username]:[r-][w-][a-] can be comma
separated list (requires admin access).
replication --read KEY - Used to read the replication factor of the blob.
replication --update --replication-factor NUMBER KEY where NUMBER > 0. It is used to update the
replication factor of a blob.
For example, the following would create a mytopo:data.tgz key using the data
stored in data.tgz. User alice would have full access, bob would have
read/write access and everyone else would have read access.
storm blobstore create mytopo:data.tgz -f data.tgz -a u:alice:rwa,u:bob:rw,o::r
"""
exec_storm_class(
"org.apache.storm.command.Blobstore",
args=args,
jvmtype="-client",
extrajars=[USER_CONF_DIR, STORM_BIN_DIR])
def heartbeats(*args):
"""Syntax: [storm heartbeats [cmd]]
list PATH - lists heartbeats nodes under PATH currently in the ClusterState.
get PATH - Get the heartbeat data at PATH
"""
exec_storm_class(
"org.apache.storm.command.Heartbeats",
args=args,
jvmtype="-client",
extrajars=[USER_CONF_DIR, STORM_BIN_DIR])
def activate(*args):
"""Syntax: [storm activate topology-name]
Activates the specified topology's spouts.
"""
if not args:
print_usage(command="activate")
sys.exit(2)
exec_storm_class(
"org.apache.storm.command.Activate",
args=args,
jvmtype="-client",
extrajars=[USER_CONF_DIR, STORM_BIN_DIR])
def set_log_level(*args):
"""
Dynamically change topology log levels
Syntax: [storm set_log_level -l [logger name]=[log level][:optional timeout] -r [logger name] topology-name]
where log level is one of:
ALL, TRACE, DEBUG, INFO, WARN, ERROR, FATAL, OFF
and timeout is integer seconds.
e.g.
./bin/storm set_log_level -l ROOT=DEBUG:30 topology-name
Set the root logger's level to DEBUG for 30 seconds
./bin/storm set_log_level -l com.myapp=WARN topology-name
Set the com.myapp logger's level to WARN for 30 seconds
./bin/storm set_log_level -l com.myapp=WARN -l com.myOtherLogger=ERROR:123 topology-name
Set the com.myapp logger's level to WARN indifinitely, and com.myOtherLogger
to ERROR for 123 seconds
./bin/storm set_log_level -r com.myOtherLogger topology-name
Clears settings, resetting back to the original level
"""
exec_storm_class(
"org.apache.storm.command.SetLogLevel",
args=args,
jvmtype="-client",
extrajars=[USER_CONF_DIR, STORM_BIN_DIR])
def listtopos(*args):
"""Syntax: [storm list]
List the running topologies and their statuses.
"""
exec_storm_class(
"org.apache.storm.command.ListTopologies",
args=args,
jvmtype="-client",
extrajars=[USER_CONF_DIR, STORM_BIN_DIR])
def deactivate(*args):
"""Syntax: [storm deactivate topology-name]
Deactivates the specified topology's spouts.
"""
if not args:
print_usage(command="deactivate")
sys.exit(2)
exec_storm_class(
"org.apache.storm.command.Deactivate",
args=args,
jvmtype="-client",
extrajars=[USER_CONF_DIR, STORM_BIN_DIR])
def rebalance(*args):
"""Syntax: [storm rebalance topology-name [-w wait-time-secs] [-n new-num-workers] [-e component=parallelism]*]
Sometimes you may wish to spread out where the workers for a topology
are running. For example, let's say you have a 10 node cluster running
4 workers per node, and then let's say you add another 10 nodes to
the cluster. You may wish to have Storm spread out the workers for the
running topology so that each node runs 2 workers. One way to do this
is to kill the topology and resubmit it, but Storm provides a "rebalance"
command that provides an easier way to do this.
Rebalance will first deactivate the topology for the duration of the
message timeout (overridable with the -w flag) and then redistribute
the workers evenly around the cluster. The topology will then return to
its previous state of activation (so a deactivated topology will still
be deactivated and an activated topology will go back to being activated).
The rebalance command can also be used to change the parallelism of a running topology.
Use the -n and -e switches to change the number of workers or number of executors of a component
respectively.
"""
if not args:
print_usage(command="rebalance")
sys.exit(2)
exec_storm_class(
"org.apache.storm.command.Rebalance",
args=args,
jvmtype="-client",
extrajars=[USER_CONF_DIR, STORM_BIN_DIR])
def get_errors(*args):
"""Syntax: [storm get-errors topology-name]
Get the latest error from the running topology. The returned result contains
the key value pairs for component-name and component-error for the components in error.
The result is returned in json format.
"""
if not args:
print_usage(command="get_errors")
sys.exit(2)
exec_storm_class(
"org.apache.storm.command.GetErrors",
args=args,
jvmtype="-client",
extrajars=[USER_CONF_DIR, os.path.join(STORM_DIR, "bin")])
def healthcheck(*args):
"""Syntax: [storm node-health-check]
Run health checks on the local supervisor.
"""
exec_storm_class(
"org.apache.storm.command.HealthCheck",
args=args,
jvmtype="-client",
extrajars=[USER_CONF_DIR, os.path.join(STORM_DIR, "bin")])
def kill_workers(*args):
"""Syntax: [storm kill_workers]
Kill the workers running on this supervisor. This command should be run
on a supervisor node. If the cluster is running in secure mode, then user needs
to have admin rights on the node to be able to successfully kill all workers.
"""
exec_storm_class(
"org.apache.storm.command.KillWorkers",
args=args,
jvmtype="-client",
extrajars=[USER_CONF_DIR, os.path.join(STORM_DIR, "bin")])
def admin(*args):
"""Syntax: [storm admin cmd]
This is a proxy of nimbus and allow to execute admin commands. As of now it supports
command to remove corrupt topologies.
Nimbus doesn't clean up corrupted topologies automatically. This command should clean
up corrupt topologies i.e.topologies whose codes are not available on blobstore.
In future this command would support more admin commands.
Supported command
storm admin remove_corrupt_topologies
"""
exec_storm_class(
"org.apache.storm.command.AdminCommands",
args=args,
jvmtype="-client",
extrajars=[USER_CONF_DIR, os.path.join(STORM_DIR, "bin")])
def shell(resourcesdir, command, *args):
"""Syntax: [storm shell resourcesdir command args]
Archives resources to jar and uploads jar to Nimbus, and executes following arguments on "local". Useful for non JVM languages.
eg: `storm shell resources/ python topology.py arg1 arg2`
"""
tmpjarpath = "stormshell" + str(random.randint(0, 10000000)) + ".jar"
os.system("jar cf %s %s" % (tmpjarpath, resourcesdir))
runnerargs = [tmpjarpath, command]
runnerargs.extend(args)
exec_storm_class(
"org.apache.storm.command.shell_submission",
args=runnerargs,
jvmtype="-client",
extrajars=[USER_CONF_DIR],
fork=True)
os.system("rm " + tmpjarpath)
def repl():
"""Syntax: [storm repl]
Opens up a Clojure REPL with the storm jars and configuration
on the classpath. Useful for debugging.
"""
cppaths = [CLUSTER_CONF_DIR]
exec_storm_class("clojure.main", jvmtype="-client", extrajars=cppaths)
def get_log4j2_conf_dir():
cppaths = [CLUSTER_CONF_DIR]
storm_log4j2_conf_dir = confvalue("storm.log4j2.conf.dir", cppaths)
if(storm_log4j2_conf_dir == None or storm_log4j2_conf_dir == "null"):
storm_log4j2_conf_dir = STORM_LOG4J2_CONF_DIR
elif(not os.path.isabs(storm_log4j2_conf_dir)):
storm_log4j2_conf_dir = os.path.join(STORM_DIR, storm_log4j2_conf_dir)
return storm_log4j2_conf_dir
def nimbus(klass="org.apache.storm.daemon.nimbus"):
"""Syntax: [storm nimbus]
Launches the nimbus daemon. This command should be run under
supervision with a tool like daemontools or monit.
See Setting up a Storm cluster for more information.
(http://storm.apache.org/documentation/Setting-up-a-Storm-cluster)
"""
cppaths = [CLUSTER_CONF_DIR]
jvmopts = parse_args(confvalue("nimbus.childopts", cppaths)) + [
"-Dlogfile.name=nimbus.log",
"-DLog4jContextSelector=org.apache.logging.log4j.core.async.AsyncLoggerContextSelector",
"-Dlog4j.configurationFile=" + os.path.join(get_log4j2_conf_dir(), "cluster.xml"),
]
exec_storm_class(
klass,
jvmtype="-server",
daemonName="nimbus",
extrajars=cppaths,
jvmopts=jvmopts)
def pacemaker(klass="org.apache.storm.pacemaker.Pacemaker"):
"""Syntax: [storm pacemaker]
Launches the Pacemaker daemon. This command should be run under
supervision with a tool like daemontools or monit.
See Setting up a Storm cluster for more information.
(http://storm.apache.org/documentation/Setting-up-a-Storm-cluster)
"""
cppaths = [CLUSTER_CONF_DIR]
jvmopts = parse_args(confvalue("pacemaker.childopts", cppaths)) + [
"-Dlogfile.name=pacemaker.log",
"-Dlog4j.configurationFile=" + os.path.join(get_log4j2_conf_dir(), "cluster.xml"),
]
exec_storm_class(
klass,
jvmtype="-server",
daemonName="pacemaker",
extrajars=cppaths,
jvmopts=jvmopts)
def supervisor(klass="org.apache.storm.daemon.supervisor.Supervisor"):
"""Syntax: [storm supervisor]
Launches the supervisor daemon. This command should be run
under supervision with a tool like daemontools or monit.
See Setting up a Storm cluster for more information.
(http://storm.apache.org/documentation/Setting-up-a-Storm-cluster)
"""
cppaths = [CLUSTER_CONF_DIR]
jvmopts = parse_args(confvalue("supervisor.childopts", cppaths)) + [
"-Dlogfile.name=" + STORM_SUPERVISOR_LOG_FILE,
"-DLog4jContextSelector=org.apache.logging.log4j.core.async.AsyncLoggerContextSelector",
"-Dlog4j.configurationFile=" + os.path.join(get_log4j2_conf_dir(), "cluster.xml"),
]
exec_storm_class(
klass,
jvmtype="-server",
daemonName="supervisor",
extrajars=cppaths,
jvmopts=jvmopts)
def ui():
"""Syntax: [storm ui]
Launches the UI daemon. The UI provides a web interface for a Storm
cluster and shows detailed stats about running topologies. This command
should be run under supervision with a tool like daemontools or monit.
See Setting up a Storm cluster for more information.
(http://storm.apache.org/documentation/Setting-up-a-Storm-cluster)
"""
cppaths = [CLUSTER_CONF_DIR]
jvmopts = parse_args(confvalue("ui.childopts", cppaths)) + [
"-Dlogfile.name=ui.log",
"-DLog4jContextSelector=org.apache.logging.log4j.core.async.AsyncLoggerContextSelector",
"-Dlog4j.configurationFile=" + os.path.join(get_log4j2_conf_dir(), "cluster.xml")
]
exec_storm_class(
"org.apache.storm.ui.core",
jvmtype="-server",
daemonName="ui",
jvmopts=jvmopts,
extrajars=[STORM_DIR, CLUSTER_CONF_DIR])
def logviewer():
"""Syntax: [storm logviewer]
Launches the log viewer daemon. It provides a web interface for viewing
storm log files. This command should be run under supervision with a
tool like daemontools or monit.
See Setting up a Storm cluster for more information.
(http://storm.apache.org/documentation/Setting-up-a-Storm-cluster)
"""
cppaths = [CLUSTER_CONF_DIR]
jvmopts = parse_args(confvalue("logviewer.childopts", cppaths)) + [
"-Dlogfile.name=logviewer.log",
"-DLog4jContextSelector=org.apache.logging.log4j.core.async.AsyncLoggerContextSelector",
"-Dlog4j.configurationFile=" + os.path.join(get_log4j2_conf_dir(), "cluster.xml")
]
exec_storm_class(
"org.apache.storm.daemon.logviewer",
jvmtype="-server",
daemonName="logviewer",
jvmopts=jvmopts,
extrajars=[STORM_DIR, CLUSTER_CONF_DIR])
def drpc():
"""Syntax: [storm drpc]
Launches a DRPC daemon. This command should be run under supervision
with a tool like daemontools or monit.
See Distributed RPC for more information.
(http://storm.apache.org/documentation/Distributed-RPC)
"""
cppaths = [CLUSTER_CONF_DIR]
jvmopts = parse_args(confvalue("drpc.childopts", cppaths)) + [
"-Dlogfile.name=drpc.log",
"-DLog4jContextSelector=org.apache.logging.log4j.core.async.AsyncLoggerContextSelector",
"-Dlog4j.configurationFile=" + os.path.join(get_log4j2_conf_dir(), "cluster.xml")
]
exec_storm_class(
"org.apache.storm.daemon.drpc",
jvmtype="-server",
daemonName="drpc",
jvmopts=jvmopts,
extrajars=[CLUSTER_CONF_DIR])
def dev_zookeeper():
"""Syntax: [storm dev-zookeeper]
Launches a fresh Zookeeper server using "dev.zookeeper.path" as its local dir and
"storm.zookeeper.port" as its port. This is only intended for development/testing, the
Zookeeper instance launched is not configured to be used in production.
"""
cppaths = [CLUSTER_CONF_DIR]
exec_storm_class(
"org.apache.storm.command.DevZookeeper",
jvmtype="-server",
extrajars=[CLUSTER_CONF_DIR])
def version():
"""Syntax: [storm version]
Prints the version number of this Storm release.
"""
cppaths = [CLUSTER_CONF_DIR]
exec_storm_class(
"org.apache.storm.utils.VersionInfo",
jvmtype="-client",
extrajars=[CLUSTER_CONF_DIR])
def print_classpath():
"""Syntax: [storm classpath]
Prints the classpath used by the storm client when running commands.
"""
print(get_classpath([]))
def monitor(*args):
"""Syntax: [storm monitor topology-name [-i interval-secs] [-m component-id] [-s stream-id] [-w [emitted | transferred]]]
Monitor given topology's throughput interactively.
One can specify poll-interval, component-id, stream-id, watch-item[emitted | transferred]
By default,
poll-interval is 4 seconds;
all component-ids will be list;
stream-id is 'default';
watch-item is 'emitted';
"""
exec_storm_class(
"org.apache.storm.command.Monitor",
args=args,
jvmtype="-client",
extrajars=[USER_CONF_DIR, STORM_BIN_DIR])
def print_commands():
"""Print all client commands and link to documentation"""
print("Commands:\n\t" + "\n\t".join(sorted(COMMANDS.keys())))
print("\nHelp: \n\thelp \n\thelp <command>")
print("\nDocumentation for the storm client can be found at http://storm.apache.org/documentation/Command-line-client.html\n")
print("Configs can be overridden using one or more -c flags, e.g. \"storm list -c nimbus.host=nimbus.mycompany.com\"\n")
def print_usage(command=None):
"""Print one help message or list of available commands"""
if command != None:
if command in COMMANDS:
print(COMMANDS[command].__doc__ or
"No documentation provided for <%s>" % command)
else:
print("<%s> is not a valid command" % command)
else:
print_commands()
def unknown_command(*args):
print("Unknown command: [storm %s]" % ' '.join(sys.argv[1:]))
print_usage()
sys.exit(254)
COMMANDS = {"jar": jar, "kill": kill, "shell": shell, "nimbus": nimbus, "ui": ui, "logviewer": logviewer,
"drpc": drpc, "supervisor": supervisor, "localconfvalue": print_localconfvalue,
"remoteconfvalue": print_remoteconfvalue, "repl": repl, "classpath": print_classpath,
"activate": activate, "deactivate": deactivate, "rebalance": rebalance, "help": print_usage,
"list": listtopos, "dev-zookeeper": dev_zookeeper, "version": version, "monitor": monitor,
"upload-credentials": upload_credentials, "pacemaker": pacemaker, "heartbeats": heartbeats, "blobstore": blobstore,
"get-errors": get_errors, "set_log_level": set_log_level, "kill_workers": kill_workers,
"node-health-check": healthcheck, "sql": sql, "admin": admin}
def parse_config(config_list):
global CONFIG_OPTS
if len(config_list) > 0:
for config in config_list:
CONFIG_OPTS.append(config)
def parse_config_opts(args):
curr = args[:]
curr.reverse()
config_list = []
args_list = []
jars_list = []
artifacts_list = []
while len(curr) > 0:
token = curr.pop()
if token == "-c":
config_list.append(curr.pop())
elif token == "--config":
global CONFFILE
CONFFILE = curr.pop()
elif token == "--jars":
jars_list.extend(curr.pop().split(','))
elif token == "--artifacts":
artifacts_list.extend(curr.pop().split(','))
else:
args_list.append(token)
return config_list, jars_list, artifacts_list, args_list
def main():
if len(sys.argv) <= 1:
print_usage()
sys.exit(-1)
global CONFIG_OPTS, DEP_JARS_OPTS, DEP_ARTIFACTS_OPTS
config_list, jars_list, artifacts_list, args = parse_config_opts(sys.argv[1:])
parse_config(config_list)
DEP_JARS_OPTS = jars_list
DEP_ARTIFACTS_OPTS = artifacts_list
COMMAND = args[0]
ARGS = args[1:]
(COMMANDS.get(COMMAND, unknown_command))(*ARGS)
if __name__ == "__main__":
main()
| 38.477629
| 381
| 0.671618
|
d97f6472ad36d99e6ab02a9f8589a32f42888ec5
| 723
|
py
|
Python
|
backend/comment/admin.py
|
PY-GZKY/django-miniprogram
|
0cc49736dbe7bfd64dbcb224c57d7df7b9c72d3a
|
[
"Apache-2.0"
] | 2
|
2021-11-29T01:26:24.000Z
|
2022-02-03T13:59:41.000Z
|
backend/comment/admin.py
|
PY-GZKY/django-miniprogram
|
0cc49736dbe7bfd64dbcb224c57d7df7b9c72d3a
|
[
"Apache-2.0"
] | null | null | null |
backend/comment/admin.py
|
PY-GZKY/django-miniprogram
|
0cc49736dbe7bfd64dbcb224c57d7df7b9c72d3a
|
[
"Apache-2.0"
] | null | null | null |
# from django.contrib import admin
# from django.db import models
# from django.forms import TextInput, Textarea
# from import_export.admin import ImportExportModelAdmin
#
# from comment.models import Comment
#
# @admin.register(Comment)
# class CommentAdmin(ImportExportModelAdmin):
# list_display = ('author', 'article', 'parent', 'content', 'created')
# list_per_page = 5
#
# fieldsets = (
# ('xxx', {
# 'fields': ('author', 'article', 'parent', 'content', 'created')
# }),
# )
#
# formfield_overrides = {
# models.CharField: {'widget': TextInput(attrs={'size': '59'})},
# models.TextField: {'widget': Textarea(attrs={'rows': 4, 'cols': 59})},
# }
#
| 30.125
| 80
| 0.614108
|
625d18ebee8093e27c08ae2cce43afde05a25725
| 95,502
|
py
|
Python
|
retrigger/retrigger.py
|
fixator10/Trusty-cogs
|
3d47a63f562cb64eb44da6bb53cfe9f8324026e7
|
[
"MIT"
] | 148
|
2017-04-23T19:57:50.000Z
|
2022-03-12T06:59:58.000Z
|
retrigger/retrigger.py
|
mina9999/Trusty-cogs
|
a47de7c233f3c1802effd29f4a86f8a9b0e2b34a
|
[
"MIT"
] | 155
|
2018-01-01T13:27:45.000Z
|
2022-03-12T05:17:51.000Z
|
retrigger/retrigger.py
|
mina9999/Trusty-cogs
|
a47de7c233f3c1802effd29f4a86f8a9b0e2b34a
|
[
"MIT"
] | 221
|
2017-04-02T00:26:08.000Z
|
2022-03-26T15:06:54.000Z
|
import asyncio
import logging
from multiprocessing.pool import Pool
from pathlib import Path
from typing import Optional, Union
import discord
from discord.ext import tasks
from redbot.core import Config, VersionInfo, checks, commands, modlog, version_info
from redbot.core.commands import TimedeltaConverter
from redbot.core.i18n import Translator, cog_i18n
# from redbot.core.utils import menus
from redbot.core.utils.chat_formatting import humanize_list, pagify
from redbot.core.utils.menus import start_adding_reactions
from redbot.core.utils.predicates import ReactionPredicate
from .converters import (
ChannelUserRole,
MultiResponse,
Trigger,
TriggerExists,
ValidEmoji,
ValidRegex,
)
from .menus import BaseMenu, ExplainReTriggerPages, ReTriggerMenu, ReTriggerPages
from .triggerhandler import TriggerHandler
log = logging.getLogger("red.trusty-cogs.ReTrigger")
_ = Translator("ReTrigger", __file__)
try:
import regex as re
except ImportError:
import re
@cog_i18n(_)
class ReTrigger(TriggerHandler, commands.Cog):
"""
Trigger bot events using regular expressions
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
__author__ = ["TrustyJAID"]
__version__ = "2.20.4"
def __init__(self, bot):
self.bot = bot
self.config = Config.get_conf(self, 964565433247, force_registration=True)
default_guild = {
"trigger_list": {},
"allow_multiple": False,
"modlog": "default",
"ban_logs": False,
"kick_logs": False,
"add_role_logs": False,
"remove_role_logs": False,
"filter_logs": False,
"bypass": False,
}
self.config.register_guild(**default_guild)
self.config.register_global(trigger_timeout=1)
self.re_pool = Pool()
self.triggers = {}
self.__unload = self.cog_unload
self.trigger_timeout = 1
self.save_loop.start()
def format_help_for_context(self, ctx: commands.Context) -> str:
"""
Thanks Sinbad!
"""
pre_processed = super().format_help_for_context(ctx)
return f"{pre_processed}\n\nCog Version: {self.__version__}"
def cog_unload(self):
if 218773382617890828 in self.bot.owner_ids:
try:
self.bot.remove_dev_env_value("retrigger")
except Exception:
log.exception("Error removing retrigger from dev environment.")
pass
log.debug("Closing process pools.")
self.re_pool.close()
self.bot.loop.run_in_executor(None, self.re_pool.join)
self.save_loop.cancel()
async def save_all_triggers(self):
for guild_id, triggers in self.triggers.items():
guild = self.bot.get_guild(guild_id)
if not guild:
continue
async with self.config.guild(guild).trigger_list() as trigger_list:
for trigger in triggers:
try:
trigger_list[trigger.name] = await trigger.to_json()
except KeyError:
continue
await asyncio.sleep(0.1)
@tasks.loop(seconds=120)
async def save_loop(self):
await self.save_all_triggers()
@save_loop.after_loop
async def after_save_loop(self):
if self.save_loop.is_being_cancelled():
await self.save_all_triggers()
@save_loop.before_loop
async def before_save_loop(self):
if version_info >= VersionInfo.from_str("3.2.0"):
await self.bot.wait_until_red_ready()
else:
await self.bot.wait_until_ready()
if 218773382617890828 in self.bot.owner_ids:
# This doesn't work on bot startup but that's fine
try:
self.bot.add_dev_env_value("retrigger", lambda x: self)
except Exception:
log.error("Error adding retrigger to dev environment.")
pass
self.trigger_timeout = await self.config.trigger_timeout()
data = await self.config.all_guilds()
for guild, settings in data.items():
self.triggers[guild] = []
for trigger in settings["trigger_list"].values():
try:
new_trigger = await Trigger.from_json(trigger)
except Exception:
log.exception("Error trying to compile regex pattern.")
# I might move this to DM the author of the trigger
# before this becomes actually breaking
self.triggers[guild].append(new_trigger)
@commands.group()
@commands.guild_only()
async def retrigger(self, ctx: commands.Context) -> None:
"""
Setup automatic triggers based on regular expressions
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
pass
@checks.is_owner()
@retrigger.command()
async def deleteallbyuser(self, ctx: commands.Context, user_id: int):
"""
Delete all triggers created by a specified user ID.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
await self.red_delete_data_for_user(requester="owner", user_id=user_id)
await ctx.tick()
@retrigger.group(name="blocklist", aliases=["blacklist"])
@checks.mod_or_permissions(manage_messages=True)
async def blacklist(self, ctx: commands.Context) -> None:
"""
Set blocklist options for retrigger
blocklisting supports channels, users, or roles
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
pass
@retrigger.group(name="allowlist", aliases=["whitelist"])
@checks.mod_or_permissions(manage_messages=True)
async def whitelist(self, ctx: commands.Context) -> None:
"""
Set allowlist options for retrigger
allowlisting supports channels, users, or roles
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
pass
@retrigger.group(name="modlog")
@checks.mod_or_permissions(manage_channels=True)
async def _modlog(self, ctx: commands.Context) -> None:
"""
Set which events to record in the modlog.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
pass
@retrigger.group(name="edit")
@checks.mod_or_permissions(manage_channels=True)
async def _edit(self, ctx: commands.Context) -> None:
"""
Edit various settings in a set trigger.
Note: Only the server owner, Bot owner, or original
author can edit a saved trigger. Multi triggers
cannot be edited.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
pass
@_modlog.command(name="settings", aliases=["list"])
async def modlog_settings(self, ctx: commands.Context) -> None:
"""
Show the current modlog settings for this server.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
guild_data = await self.config.guild(ctx.guild).all()
variables = {
"ban_logs": _("Bans"),
"kick_logs": _("Kicks"),
"add_role_logs": _("Add Roles"),
"remove_role_logs": _("Remove Roles"),
"filter_logs": _("Filtered Messages"),
"modlog": _("Channel"),
}
msg = ""
for log, name in variables.items():
msg += f"__**{name}**__: {guild_data[log]}\n"
await ctx.maybe_send_embed(msg)
@_modlog.command(name="bans", aliases=["ban"])
@checks.mod_or_permissions(manage_channels=True)
async def modlog_bans(self, ctx: commands.Context) -> None:
"""
Toggle custom ban messages in the modlog
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if await self.config.guild(ctx.guild).ban_logs():
await self.config.guild(ctx.guild).ban_logs.set(False)
msg = _("Custom ban events disabled.")
# await ctx.send(msg)
else:
await self.config.guild(ctx.guild).ban_logs.set(True)
msg = _("Custom ban events will now appear in the modlog if it's setup.")
await ctx.send(msg)
@_modlog.command(name="kicks", aliases=["kick"])
@checks.mod_or_permissions(manage_channels=True)
async def modlog_kicks(self, ctx: commands.Context) -> None:
"""
Toggle custom kick messages in the modlog
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if await self.config.guild(ctx.guild).kick_logs():
await self.config.guild(ctx.guild).kick_logs.set(False)
msg = _("Custom kick events disabled.")
# await ctx.send(msg)
else:
await self.config.guild(ctx.guild).kick_logs.set(True)
msg = _("Custom kick events will now appear in the modlog if it's setup.")
await ctx.send(msg)
@_modlog.command(name="filter", aliases=["delete", "filters", "deletes"])
@checks.mod_or_permissions(manage_channels=True)
async def modlog_filter(self, ctx: commands.Context) -> None:
"""
Toggle custom filter messages in the modlog
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if await self.config.guild(ctx.guild).filter_logs():
await self.config.guild(ctx.guild).filter_logs.set(False)
msg = _("Custom filter events disabled.")
# await ctx.send(msg)
else:
await self.config.guild(ctx.guild).filter_logs.set(True)
msg = _("Custom filter events will now appear in the modlog if it's setup.")
await ctx.send(msg)
@_modlog.command(name="addroles", aliases=["addrole"])
@checks.mod_or_permissions(manage_channels=True)
async def modlog_addroles(self, ctx: commands.Context) -> None:
"""
Toggle custom add role messages in the modlog
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if await self.config.guild(ctx.guild).add_role_logs():
await self.config.guild(ctx.guild).add_role_logs.set(False)
msg = _("Custom add role events disabled.")
# await ctx.send(msg)
else:
await self.config.guild(ctx.guild).add_role_logs.set(True)
msg = _("Custom add role events will now appear in the modlog if it's setup.")
await ctx.send(msg)
@_modlog.command(name="removeroles", aliases=["removerole", "remrole", "rolerem"])
@checks.mod_or_permissions(manage_channels=True)
async def modlog_removeroles(self, ctx: commands.Context) -> None:
"""
Toggle custom add role messages in the modlog
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if await self.config.guild(ctx.guild).remove_role_logs():
await self.config.guild(ctx.guild).remove_role_logs.set(False)
msg = _("Custom remove role events disabled.")
# await ctx.send(msg)
else:
await self.config.guild(ctx.guild).remove_role_logs.set(True)
msg = _("Custom remove role events will now appear in the modlog if it's setup.")
await ctx.send(msg)
@_modlog.command(name="channel")
@checks.mod_or_permissions(manage_channels=True)
async def modlog_channel(
self, ctx: commands.Context, channel: Union[discord.TextChannel, str, None]
) -> None:
"""
Set the modlog channel for filtered words
`<channel>` The channel you would like filtered word notifications to go
Use `none` or `clear` to not show any modlogs
User `default` to use the built in modlog channel
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if isinstance(channel, discord.TextChannel):
await self.config.guild(ctx.guild).modlog.set(channel.id)
else:
if channel in ["none", "clear"]:
channel = None
elif channel in ["default"]:
channel = "default"
try:
channel = await modlog.get_modlog_channel()
except RuntimeError:
msg = _(
"No modlog channel has been setup yet. "
"Do `[p]modlogset modlog #channel` to setup the default modlog channel"
)
return await ctx.send(msg)
else:
await ctx.send(_('Channel "{channel}" not found.').format(channel=channel))
return
await self.config.guild(ctx.guild).modlog.set(channel)
await ctx.send(_("Modlog set to {channel}").format(channel=channel))
@retrigger.command()
@checks.mod_or_permissions(manage_messages=True)
async def cooldown(
self, ctx: commands.Context, trigger: TriggerExists, time: int, style="guild"
) -> None:
"""
Set cooldown options for retrigger
`<trigger>` is the name of the trigger.
`<time>` is a time in seconds until the trigger will run again
set a time of 0 or less to remove the cooldown
`[style=guild]` must be either `guild`, `server`, `channel`, `user`, or `member`
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if style not in ["guild", "server", "channel", "user", "member"]:
msg = _("Style must be either `guild`, " "`server`, `channel`, `user`, or `member`.")
await ctx.send(msg)
return
msg = _("Cooldown of {time}s per {style} set for Trigger `{name}`.")
if style in ["user", "member"]:
style = "author"
if style in ["guild", "server"]:
cooldown = {"time": time, "style": style, "last": 0}
else:
cooldown = {"time": time, "style": style, "last": []}
if time <= 0:
cooldown = {}
msg = _("Cooldown for Trigger `{name}` reset.")
trigger_list = await self.config.guild(ctx.guild).trigger_list()
trigger.cooldown = cooldown
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
await self.config.guild(ctx.guild).trigger_list.set(trigger_list)
await ctx.send(msg.format(time=time, style=style, name=trigger.name))
@whitelist.command(name="add")
@checks.mod_or_permissions(manage_messages=True)
async def whitelist_add(
self, ctx: commands.Context, trigger: TriggerExists, *channel_user_role: ChannelUserRole
) -> None:
"""
Add a channel, user, or role to triggers allowlist
`<trigger>` is the name of the trigger.
`[channel_user_role...]` is the channel, user or role to allowlist
(You can supply more than one of any at a time)
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if len(channel_user_role) < 1:
return await ctx.send(
_("You must supply 1 or more channels users or roles to be allowed")
)
for obj in channel_user_role:
if obj.id not in trigger.whitelist:
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger.whitelist.append(obj.id)
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} added `{list_type}` to its allowlist.")
list_type = humanize_list([c.name for c in channel_user_role])
await ctx.send(msg.format(list_type=list_type, name=trigger.name))
@whitelist.command(name="remove", aliases=["rem", "del"])
@checks.mod_or_permissions(manage_messages=True)
async def whitelist_remove(
self, ctx: commands.Context, trigger: TriggerExists, *channel_user_role: ChannelUserRole
) -> None:
"""
Remove a channel, user, or role from triggers allowlist
`<trigger>` is the name of the trigger.
`[channel_user_role...]` is the channel, user or role to remove from the allowlist
(You can supply more than one of any at a time)
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if len(channel_user_role) < 1:
return await ctx.send(
_(
"You must supply 1 or more channels users "
"or roles to be removed from the allowlist."
)
)
for obj in channel_user_role:
if obj.id in trigger.whitelist:
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger.whitelist.remove(obj.id)
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} removed `{list_type}` from its allowlist.")
list_type = humanize_list([c.name for c in channel_user_role])
await ctx.send(msg.format(list_type=list_type, name=trigger.name))
@blacklist.command(name="add")
@checks.mod_or_permissions(manage_messages=True)
async def blacklist_add(
self, ctx: commands.Context, trigger: TriggerExists, *channel_user_role: ChannelUserRole
) -> None:
"""
Add a channel, user, or role to triggers blocklist
`<trigger>` is the name of the trigger.
`[channel_user_role...]` is the channel, user or role to blocklist
(You can supply more than one of any at a time)
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if len(channel_user_role) < 1:
return await ctx.send(
_("You must supply 1 or more channels users or roles to be blocked.")
)
for obj in channel_user_role:
if obj.id not in trigger.blacklist:
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger.blacklist.append(obj.id)
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} added `{list_type}` to its blocklist.")
list_type = humanize_list([c.name for c in channel_user_role])
await ctx.send(msg.format(list_type=list_type, name=trigger.name))
@blacklist.command(name="remove", aliases=["rem", "del"])
@checks.mod_or_permissions(manage_messages=True)
async def blacklist_remove(
self, ctx: commands.Context, trigger: TriggerExists, *channel_user_role: ChannelUserRole
) -> None:
"""
Remove a channel, user, or role from triggers blocklist
`<trigger>` is the name of the trigger.
`[channel_user_role...]` is the channel, user or role to remove from the blocklist
(You can supply more than one of any at a time)
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if len(channel_user_role) < 1:
return await ctx.send(
_(
"You must supply 1 or more channels users or "
"roles to be removed from the blocklist."
)
)
for obj in channel_user_role:
if obj.id in trigger.blacklist:
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger.blacklist.remove(obj.id)
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} removed `{list_type}` from its blocklist.")
list_type = humanize_list([c.name for c in channel_user_role])
await ctx.send(msg.format(list_type=list_type, name=trigger.name))
@_edit.command(name="regex")
@checks.mod_or_permissions(manage_messages=True)
async def edit_regex(
self, ctx: commands.Context, trigger: TriggerExists, *, regex: ValidRegex
) -> None:
"""
Edit the regex of a saved trigger.
`<trigger>` is the name of the trigger.
`<regex>` The new regex pattern to use.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
trigger.regex = re.compile(regex)
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} regex changed to ```bf\n{regex}\n```")
await ctx.send(msg.format(name=trigger.name, regex=regex))
@_edit.command(name="ocr")
@commands.check(lambda ctx: TriggerHandler.ALLOW_OCR)
@checks.mod_or_permissions(manage_messages=True)
async def toggle_ocr_search(self, ctx: commands.Context, trigger: TriggerExists) -> None:
"""
Toggle whether to use Optical Character Recognition to search for text within images.
`<trigger>` is the name of the trigger.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
trigger.ocr_search = not trigger.ocr_search
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} OCR Search set to: {ocr_search}")
await ctx.send(msg.format(name=trigger.name, ocr_search=trigger.ocr_search))
@_edit.command(name="readfilenames", aliases=["filenames"])
@checks.mod_or_permissions(manage_messages=True)
async def toggle_filename_search(self, ctx: commands.Context, trigger: TriggerExists) -> None:
"""
Toggle whether to search message attachment filenames.
Note: This will append all attachments in a message to the message content. This **will not**
download and read file content using regex.
`<trigger>` is the name of the trigger.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
trigger.read_filenames = not trigger.read_filenames
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} read filenames set to: {read_filenames}")
await ctx.send(msg.format(name=trigger.name, read_filenames=trigger.read_filenames))
@_edit.command(name="reply", aliases=["replies"])
@checks.mod_or_permissions(manage_messages=True)
async def set_reply(
self, ctx: commands.Context, trigger: TriggerExists, set_to: Optional[bool] = None
) -> None:
"""
Set whether or not to reply to the triggered message
`<trigger>` is the name of the trigger.
`[set_to]` `True` will reply with a notificaiton, `False` will reply without a notification,
leaving this blank will clear replies entirely.
Note: This is only availabe for Red 3.4.6/discord.py 1.6.0 or greater.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
trigger.reply = set_to
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} replies set to: {set_to}")
await ctx.send(msg.format(name=trigger.name, set_to=trigger.reply))
@_edit.command(name="tts", aliases=["texttospeech", "text-to-speech"])
@checks.mod_or_permissions(manage_messages=True)
async def set_tts(self, ctx: commands.Context, trigger: TriggerExists, set_to: bool) -> None:
"""
Set whether or not to send the message with text-to-speech
`<trigger>` is the name of the trigger.
`[set_to]` either `true` or `false` on whether to send the text
reply with text-to-speech enabled.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
trigger.tts = set_to
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} text-to-speech set to: {set_to}")
await ctx.send(msg.format(name=trigger.name, set_to=trigger.tts))
@_edit.command(name="usermention", aliases=["userping"])
@checks.mod_or_permissions(manage_messages=True)
async def set_user_mention(
self, ctx: commands.Context, trigger: TriggerExists, set_to: bool
) -> None:
"""
Set whether or not to send this trigger will mention users in the reply
`<trigger>` is the name of the trigger.
`[set_to]` either `true` or `false` on whether to allow this trigger
to actually ping the users in the message.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
trigger.user_mention = set_to
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} user mentions set to: {set_to}")
await ctx.send(msg.format(name=trigger.name, set_to=trigger.user_mention))
@_edit.command(name="everyonemention", aliases=["everyoneping"])
@checks.mod_or_permissions(manage_messages=True, mention_everyone=True)
async def set_everyone_mention(
self, ctx: commands.Context, trigger: TriggerExists, set_to: bool
) -> None:
"""
Set whether or not to send this trigger will allow everyone mentions
`<trigger>` is the name of the trigger.
`[set_to]` either `true` or `false` on whether to allow this trigger
to actually ping everyone if the bot has correct permissions.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
trigger.everyone_mention = set_to
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} everyone mentions set to: {set_to}")
await ctx.send(msg.format(name=trigger.name, set_to=trigger.everyone_mention))
@_edit.command(name="rolemention", aliases=["roleping"])
@checks.mod_or_permissions(manage_messages=True, mention_everyone=True)
async def set_role_mention(
self, ctx: commands.Context, trigger: TriggerExists, set_to: bool
) -> None:
"""
Set whether or not to send this trigger will allow role mentions
`<trigger>` is the name of the trigger.
`[set_to]` either `true` or `false` on whether to allow this trigger
to actually ping roles if the bot has correct permissions.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
trigger.role_mention = set_to
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} role mentions set to: {set_to}")
await ctx.send(msg.format(name=trigger.name, set_to=trigger.role_mention))
@_edit.command(name="edited")
@checks.mod_or_permissions(manage_messages=True)
async def toggle_check_edits(self, ctx: commands.Context, trigger: TriggerExists) -> None:
"""
Toggle whether the bot will listen to edited messages as well as on_message for
the specified trigger.
`<trigger>` is the name of the trigger.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
trigger.check_edits = not trigger.check_edits
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} check edits set to: {ignore_edits}")
await ctx.send(msg.format(name=trigger.name, ignore_edits=trigger.check_edits))
@_edit.command(name="text", aliases=["msg"])
@checks.mod_or_permissions(manage_messages=True)
async def edit_text(self, ctx: commands.Context, trigger: TriggerExists, *, text: str) -> None:
"""
Edit the text of a saved trigger.
`<trigger>` is the name of the trigger.
`<text>` The new text to respond with.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
if trigger.multi_payload:
return await ctx.send(_("You cannot edit multi triggers response."))
if "text" not in trigger.response_type:
return await ctx.send(_("That trigger cannot be edited this way."))
trigger.text = text
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} text changed to `{text}`")
await ctx.send(msg.format(name=trigger.name, text=text))
@_edit.command(name="chance", aliases=["chances"])
@checks.mod_or_permissions(manage_messages=True)
async def edit_chance(
self, ctx: commands.Context, trigger: TriggerExists, chance: int
) -> None:
"""
Edit the chance a trigger will execute.
`<trigger>` is the name of the trigger.
`<chance>` The chance the trigger will execute in form of 1 in chance.
Set the `chance` to 0 to remove the chance and always perform the trigger.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
if chance < 0:
return await ctx.send(_("You cannot have a negative chance of triggers happening."))
trigger.chance = chance
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
if chance:
msg = _("Trigger {name} chance changed to `1 in {chance}`")
else:
msg = _("Trigger {name} chance changed to always.")
await ctx.send(msg.format(name=trigger.name, chance=str(chance)))
@_edit.command(name="deleteafter", aliases=["autodelete", "delete"])
@checks.mod_or_permissions(manage_messages=True)
async def edit_delete_after(
self,
ctx: commands.Context,
trigger: TriggerExists,
*,
delete_after: TimedeltaConverter = None,
) -> None:
"""
Edit the delete_after parameter of a saved text trigger.
`<trigger>` is the name of the trigger.
`<delete_after>` The time until the message is deleted must include units.
Example: `[p]retrigger edit deleteafter trigger 2 minutes`
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
if "text" not in trigger.response_type:
return await ctx.send(_("That trigger cannot be edited this way."))
if delete_after:
if delete_after.total_seconds() > 0:
delete_after_seconds = delete_after.total_seconds()
if delete_after.total_seconds() < 1:
return await ctx.send(_("`delete_after` must be greater than 1 second."))
else:
delete_after_seconds = None
trigger.delete_after = delete_after_seconds
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} will now delete after `{time}` seconds.")
await ctx.send(msg.format(name=trigger.name, time=delete_after_seconds))
@_edit.command(name="ignorecommands")
@checks.mod_or_permissions(manage_messages=True)
async def edit_ignore_commands(self, ctx: commands.Context, trigger: TriggerExists) -> None:
"""
Toggle the trigger ignoring command messages entirely.
`<trigger>` is the name of the trigger.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
trigger.ignore_commands = not trigger.ignore_commands
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} ignoring commands set to `{text}`")
await ctx.send(msg.format(name=trigger.name, text=trigger.ignore_commands))
@_edit.command(name="command", aliases=["cmd"])
@checks.mod_or_permissions(manage_messages=True)
async def edit_command(
self, ctx: commands.Context, trigger: TriggerExists, *, command: str
) -> None:
"""
Edit the text of a saved trigger.
`<trigger>` is the name of the trigger.
`<command>` The new command for the trigger.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
if trigger.multi_payload:
return await ctx.send(_("You cannot edit multi triggers response."))
cmd_list = command.split(" ")
existing_cmd = self.bot.get_command(cmd_list[0])
if existing_cmd is None:
await ctx.send(
_("`{command}` doesn't seem to be an available command.").format(command=command)
)
return
if "command" not in trigger.response_type:
return await ctx.send(_("That trigger cannot be edited this way."))
trigger.text = command
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} command changed to `{command}`")
await ctx.send(msg.format(name=trigger.name, command=command))
@_edit.command(name="role", aliases=["roles"])
@checks.mod_or_permissions(manage_roles=True)
async def edit_roles(
self, ctx: commands.Context, trigger: TriggerExists, *roles: discord.Role
) -> None:
"""
Edit the added or removed roles of a saved trigger.
`<trigger>` is the name of the trigger.
`<roles>` space separated list of roles or ID's to edit on the trigger.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
if trigger.multi_payload:
return await ctx.send(_("You cannot edit multi triggers response."))
for role in roles:
if role >= ctx.me.top_role:
return await ctx.send(_("I can't assign roles higher than my own."))
if ctx.author.id == ctx.guild.owner_id:
continue
if role >= ctx.author.top_role:
return await ctx.send(
_("I can't assign roles higher than you are able to assign.")
)
role_ids = [r.id for r in roles]
if not any([t for t in trigger.response_type if t in ["add_role", "remove_role"]]):
return await ctx.send(_("That trigger cannot be edited this way."))
trigger.text = role_ids
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} role edits changed to `{roles}`")
await ctx.send(msg.format(name=trigger.name, roles=humanize_list([r.name for r in roles])))
@_edit.command(name="react", aliases=["emojis"])
@checks.mod_or_permissions(manage_messages=True)
async def edit_reactions(
self, ctx: commands.Context, trigger: TriggerExists, *emojis: ValidEmoji
) -> None:
"""
Edit the emoji reactions of a saved trigger.
`<trigger>` is the name of the trigger.
`<emojis>` The new emojis to be used in the trigger.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
if not await self.can_edit(ctx.author, trigger):
return await ctx.send(_("You are not authorized to edit this trigger."))
if "react" not in trigger.response_type:
return await ctx.send(_("That trigger cannot be edited this way."))
trigger.text = emojis
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} reactions changed to {emojis}")
emoji_s = [f"<{e}>" for e in emojis if len(e) > 5] + [e for e in emojis if len(e) < 5]
await ctx.send(msg.format(name=trigger.name, emojis=humanize_list(emoji_s)))
@retrigger.command(name="enable")
@checks.mod_or_permissions(manage_messages=True)
async def enable_trigger(self, ctx: commands.Context, trigger: TriggerExists) -> None:
"""
Enable a trigger that has been disabled either by command or automatically
`<trigger>` is the name of the trigger.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
trigger.enabled = True
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
self.triggers[ctx.guild.id].append(trigger)
msg = _("Trigger {name} has been enabled.")
await ctx.send(msg.format(name=trigger.name))
@retrigger.command(name="disable")
@checks.mod_or_permissions(manage_messages=True)
async def disable_trigger(self, ctx: commands.Context, trigger: TriggerExists) -> None:
"""
Disable a trigger
`<trigger>` is the name of the trigger.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
trigger.enabled = False
async with self.config.guild(ctx.guild).trigger_list() as trigger_list:
trigger_list[trigger.name] = await trigger.to_json()
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
msg = _("Trigger {name} has been disabled.")
await ctx.send(msg.format(name=trigger.name))
@retrigger.command(hidden=True)
@checks.is_owner()
async def timeout(self, ctx: commands.Context, timeout: int) -> None:
"""
Set the timeout period for searching triggers
`<timeout>` is number of seconds until regex searching is kicked out.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if timeout > 1:
msg = await ctx.send(
_(
"Increasing this could cause the bot to become unstable or allow "
"bad regex patterns to continue to exist causing slow downs and "
"even fatal crashes on the bot. Do you wish to continue?"
)
)
start_adding_reactions(msg, ReactionPredicate.YES_OR_NO_EMOJIS)
pred = ReactionPredicate.yes_or_no(msg, user=ctx.author)
try:
await ctx.bot.wait_for("reaction_add", check=pred, timeout=30)
except asyncio.TimeoutError:
return await ctx.send(_("Not changing regex timeout time."))
if pred.result:
await self.config.trigger_timeout.set(timeout)
self.trigger_timeout = timeout
await ctx.tick()
else:
await ctx.send(_("Not changing regex timeout time."))
elif timeout > 10:
return await ctx.send(
_(
"{timeout} seconds is too long, you may want to look at `{prefix}retrigger bypass`"
).format(timeout=timeout, prefix=ctx.clean_prefix)
)
else:
if timeout < 1:
timeout = 1
await self.config.trigger_timeout.set(timeout)
self.trigger_timeout = timeout
await ctx.send(_("Regex search timeout set to {timeout}").format(timeout=timeout))
@retrigger.command(hidden=True)
@checks.is_owner()
async def bypass(self, ctx: commands.Context, bypass: bool) -> None:
"""
Bypass patterns being kicked from memory until reload
**Warning:** Enabling this can allow mods and admins to create triggers
that cause catastrophic backtracking which can lead to the bot crashing
unexpectedly. Only enable in servers where you trust the admins not to
mess with the bot.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if bypass:
msg = await ctx.send(
_(
"Bypassing this could cause the bot to become unstable or allow "
"bad regex patterns to continue to exist causing slow downs and "
"even fatal crashes on the bot. Do you wish to continue?"
)
)
start_adding_reactions(msg, ReactionPredicate.YES_OR_NO_EMOJIS)
pred = ReactionPredicate.yes_or_no(msg, user=ctx.author)
try:
await ctx.bot.wait_for("reaction_add", check=pred, timeout=30)
except asyncio.TimeoutError:
return await ctx.send(_("Not bypassing safe Regex search."))
if pred.result:
await self.config.guild(ctx.guild).bypass.set(bypass)
await ctx.tick()
else:
await ctx.send(_("Not bypassing safe Regex search."))
else:
await self.config.guild(ctx.guild).bypass.set(bypass)
await ctx.send(_("Safe Regex search re-enabled."))
@retrigger.command(usage="[trigger]")
@commands.bot_has_permissions(read_message_history=True, add_reactions=True)
async def list(
self, ctx: commands.Context, guild_id: Optional[int], trigger: TriggerExists = None
) -> None:
"""
List information about triggers.
`[trigger]` if supplied provides information about named trigger.
\N{BLACK RIGHT-POINTING TRIANGLE WITH DOUBLE VERTICAL BAR}\N{VARIATION SELECTOR-16} will toggle the displayed triggers active setting
\N{NEGATIVE SQUARED CROSS MARK} will toggle the displayed trigger to be not active
\N{WHITE HEAVY CHECK MARK} will toggle the displayed trigger to be active
\N{PUT LITTER IN ITS PLACE SYMBOL} will delete the displayed trigger
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
guild = ctx.guild
if guild_id and await ctx.bot.is_owner(ctx.author):
guild = ctx.bot.get_guild(guild_id)
if not guild:
guild = ctx.guild
index = 0
if guild.id not in self.triggers or not self.triggers[guild.id]:
msg = _("There are no triggers setup on this server.")
await ctx.send(msg)
return
if trigger:
if type(trigger) is str:
return await ctx.send(_("Trigger `{name}` doesn't exist.").format(name=trigger))
for t in self.triggers[guild.id]:
if t.name == trigger.name:
index = self.triggers[guild.id].index(t)
await ReTriggerMenu(
source=ReTriggerPages(
triggers=self.triggers[guild.id],
guild=guild,
),
delete_message_after=False,
clear_reactions_after=True,
timeout=60,
cog=self,
page_start=index,
).start(ctx=ctx)
@retrigger.command(aliases=["del", "rem", "delete"])
@checks.mod_or_permissions(manage_messages=True)
async def remove(self, ctx: commands.Context, trigger: TriggerExists) -> None:
"""
Remove a specified trigger
`<trigger>` is the name of the trigger.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(trigger) is Trigger:
await self.remove_trigger(ctx.guild.id, trigger.name)
await self.remove_trigger_from_cache(ctx.guild.id, trigger)
await ctx.send(_("Trigger `") + trigger.name + _("` removed."))
else:
await ctx.send(_("Trigger `") + str(trigger) + _("` doesn't exist."))
@retrigger.command()
async def explain(self, ctx: commands.Context, page_num: Optional[int] = 1) -> None:
"""
Explain how to use retrigger
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
with open(Path(__file__).parent / "README.md", "r", encoding="utf8") as infile:
data = infile.read()
pages = []
for page in pagify(data, ["\n\n\n", "\n\n", "\n"], priority=True):
pages.append(re.sub(r"\[p\]", ctx.clean_prefix, page))
if page_num and (page_num > len(pages) or page_num < 0):
page_num = 1
await BaseMenu(
source=ExplainReTriggerPages(
pages=pages,
),
delete_message_after=False,
clear_reactions_after=True,
timeout=60,
cog=self,
page_start=int(page_num) - 1,
).start(ctx=ctx)
@retrigger.command()
@checks.mod_or_permissions(manage_messages=True)
async def text(
self,
ctx: commands.Context,
name: TriggerExists,
regex: ValidRegex,
delete_after: Optional[TimedeltaConverter] = None,
*,
text: str,
) -> None:
"""
Add a text response trigger
`<name>` name of the trigger.
`<regex>` the regex that will determine when to respond.
`[delete_after]` Optionally have the text autodelete must include units e.g. 2m.
`<text>` response of the trigger.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
guild = ctx.guild
author = ctx.message.author.id
if delete_after:
if delete_after.total_seconds() > 0:
delete_after_seconds = delete_after.total_seconds()
if delete_after.total_seconds() < 1:
return await ctx.send(_("`delete_after` must be greater than 1 second."))
else:
delete_after_seconds = None
new_trigger = Trigger(
name,
regex,
["text"],
author,
text=text,
created_at=ctx.message.id,
delete_after=delete_after_seconds,
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command(aliases=["randomtext", "rtext"])
@checks.mod_or_permissions(manage_messages=True)
async def random(self, ctx: commands.Context, name: TriggerExists, regex: ValidRegex) -> None:
"""
Add a random text response trigger
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
text = await self.wait_for_multiple_responses(ctx)
if not text:
await ctx.send(_("No responses supplied"))
return
guild = ctx.guild
author = ctx.message.author.id
new_trigger = Trigger(
name, regex, ["randtext"], author, text=text, created_at=ctx.message.id
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command()
@checks.mod_or_permissions(manage_messages=True)
async def dm(
self, ctx: commands.Context, name: TriggerExists, regex: ValidRegex, *, text: str
) -> None:
"""
Add a dm response trigger
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
`<text>` response of the trigger
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
guild = ctx.guild
author = ctx.message.author.id
new_trigger = Trigger(name, regex, ["dm"], author, text=text, created_at=ctx.message.id)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command()
@checks.mod_or_permissions(manage_messages=True)
async def dmme(
self, ctx: commands.Context, name: TriggerExists, regex: ValidRegex, *, text: str
) -> None:
"""
Add trigger to DM yourself
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
`<text>` response of the trigger
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
guild = ctx.guild
author = ctx.message.author.id
new_trigger = Trigger(name, regex, ["dmme"], author, text=text, created_at=ctx.message.id)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command()
@checks.mod_or_permissions(manage_nicknames=True)
@checks.bot_has_permissions(manage_nicknames=True)
async def rename(
self, ctx: commands.Context, name: TriggerExists, regex: ValidRegex, *, text: str
) -> None:
"""
Add trigger to rename users
`<name>` name of the trigger.
`<regex>` the regex that will determine when to respond.
`<text>` new users nickanme.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
guild = ctx.guild
author = ctx.message.author.id
new_trigger = Trigger(
name, regex, ["rename"], author, text=text, created_at=ctx.message.id
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command()
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(attach_files=True)
async def image(
self, ctx: commands.Context, name: TriggerExists, regex: ValidRegex, image_url: str = None
) -> None:
"""
Add an image/file response trigger
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
`image_url` optional image_url if none is provided the bot will ask to upload an image
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
guild = ctx.guild
author = ctx.message.author.id
if ctx.message.attachments != []:
attachment_url = ctx.message.attachments[0].url
filename = await self.save_image_location(attachment_url, guild)
if not filename:
return await ctx.send(_("That is not a valid file link."))
elif image_url is not None:
filename = await self.save_image_location(image_url, guild)
if not filename:
return await ctx.send(_("That is not a valid file link."))
else:
msg = await self.wait_for_image(ctx)
if not msg or not msg.attachments:
return
image_url = msg.attachments[0].url
filename = await self.save_image_location(image_url, guild)
if not filename:
return await ctx.send(_("That is not a valid file link."))
new_trigger = Trigger(
name, regex, ["image"], author, image=filename, created_at=ctx.message.id
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command(aliases=["randimage", "randimg", "rimage", "rimg"])
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(attach_files=True)
async def randomimage(
self, ctx: commands.Context, name: TriggerExists, regex: ValidRegex
) -> None:
"""
Add a random image/file response trigger
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
guild = ctx.guild
author = ctx.message.author.id
filename = await self.wait_for_multiple_images(ctx)
new_trigger = Trigger(
name, regex, ["randimage"], author, image=filename, created_at=ctx.message.id
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command()
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(attach_files=True)
async def imagetext(
self,
ctx: commands.Context,
name: TriggerExists,
regex: ValidRegex,
text: str,
image_url: str = None,
) -> None:
"""
Add an image/file response with text trigger
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
`<text>` the triggered text response
`[image_url]` optional image_url if none is provided the bot will ask to upload an image
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
guild = ctx.guild
author = ctx.message.author.id
if ctx.message.attachments != []:
attachment_url = ctx.message.attachments[0].url
filename = await self.save_image_location(attachment_url, guild)
if image_url is not None:
filename = await self.save_image_location(image_url, guild)
if not filename:
return await ctx.send(_("That is not a valid file link."))
else:
msg = await self.wait_for_image(ctx)
if not msg or not msg.attachments:
return
image_url = msg.attachments[0].url
filename = await self.save_image_location(image_url, guild)
if not filename:
return await ctx.send(_("That is not a valid file link."))
new_trigger = Trigger(
name, regex, ["image"], author, image=filename, text=text, created_at=ctx.message.id
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command()
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(attach_files=True)
@commands.check(lambda ctx: TriggerHandler.ALLOW_RESIZE)
async def resize(
self, ctx: commands.Context, name: TriggerExists, regex: ValidRegex, image_url: str = None
) -> None:
"""
Add an image to resize in response to a trigger
this will attempt to resize the image based on length of matching regex
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
`[image_url]` optional image_url if none is provided the bot will ask to upload an image
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
guild = ctx.guild
author = ctx.message.author.id
if ctx.message.attachments != []:
attachment_url = ctx.message.attachments[0].url
filename = await self.save_image_location(attachment_url, guild)
if not filename:
return await ctx.send(_("That is not a valid file link."))
elif image_url is not None:
filename = await self.save_image_location(image_url, guild)
if not filename:
return await ctx.send(_("That is not a valid file link."))
else:
msg = await self.wait_for_image(ctx)
if not msg or not msg.attachments:
return
image_url = msg.attachments[0].url
filename = await self.save_image_location(image_url, guild)
if not filename:
return await ctx.send(_("That is not a valid file link."))
new_trigger = Trigger(
name, regex, ["resize"], author, image=filename, created_at=ctx.message.id
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command()
@checks.mod_or_permissions(ban_members=True)
@commands.bot_has_permissions(ban_members=True)
async def ban(self, ctx: commands.Context, name: TriggerExists, regex: str) -> None:
"""
Add a trigger to ban users for saying specific things found with regex
This respects hierarchy so ensure the bot role is lower in the list
than mods and admin so they don't get banned by accident
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
guild = ctx.guild
author = ctx.message.author.id
new_trigger = Trigger(
name, regex, ["ban"], author, created_at=ctx.message.id, check_edits=True
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command()
@checks.mod_or_permissions(kick_members=True)
@commands.bot_has_permissions(kick_members=True)
async def kick(self, ctx: commands.Context, name: TriggerExists, regex: str) -> None:
"""
Add a trigger to kick users for saying specific things found with regex
This respects hierarchy so ensure the bot role is lower in the list
than mods and admin so they don't get kicked by accident
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
guild = ctx.guild
author = ctx.message.author.id
new_trigger = Trigger(
name, regex, ["kick"], author, created_at=ctx.message.id, check_edits=True
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command()
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(add_reactions=True)
async def react(
self, ctx: commands.Context, name: TriggerExists, regex: ValidRegex, *emojis: ValidEmoji
) -> None:
"""
Add a reaction trigger
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
`emojis` the emojis to react with when triggered separated by spaces
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
guild = ctx.guild
author = ctx.message.author.id
new_trigger = Trigger(
name, regex, ["react"], author, text=emojis, created_at=ctx.message.id
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command()
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(add_reactions=True)
async def publish(self, ctx: commands.Context, name: TriggerExists, regex: ValidRegex) -> None:
"""
Add a trigger to automatically publish content in news channels.
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
guild = ctx.guild
author = ctx.message.author.id
new_trigger = Trigger(name, regex, ["publish"], author, created_at=ctx.message.id)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command(aliases=["cmd"])
@checks.mod_or_permissions(manage_messages=True)
async def command(
self, ctx: commands.Context, name: TriggerExists, regex: ValidRegex, *, command: str
) -> None:
"""
Add a command trigger
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
`<command>` the command that will be triggered, do not add [p] prefix
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
cmd_list = command.split(" ")
existing_cmd = self.bot.get_command(cmd_list[0])
if existing_cmd is None:
await ctx.send(command + _(" doesn't seem to be an available command."))
return
guild = ctx.guild
author = ctx.message.author.id
new_trigger = Trigger(
name, regex, ["command"], author, text=command, created_at=ctx.message.id
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command(aliases=["cmdmock"], hidden=True)
@checks.admin_or_permissions(administrator=True)
async def mock(
self, ctx: commands.Context, name: TriggerExists, regex: ValidRegex, *, command: str
) -> None:
"""
Add a trigger for command as if you used the command
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
`<command>` the command that will be triggered, do not add [p] prefix
**Warning:** This function can let other users run a command on your behalf,
use with caution.
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
msg = await ctx.send(
_(
"Mock commands can allow any user to run a command "
"as if you did, are you sure you want to add this?"
)
)
start_adding_reactions(msg, ReactionPredicate.YES_OR_NO_EMOJIS)
pred = ReactionPredicate.yes_or_no(msg, ctx.author)
try:
await ctx.bot.wait_for("reaction_add", check=pred, timeout=15)
except asyncio.TimeoutError:
return await ctx.send(_("Not creating trigger."))
if not pred.result:
return await ctx.send(_("Not creating trigger."))
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
cmd_list = command.split(" ")
existing_cmd = self.bot.get_command(cmd_list[0])
if existing_cmd is None:
await ctx.send(command + _(" doesn't seem to be an available command."))
return
guild = ctx.guild
author = ctx.message.author.id
new_trigger = Trigger(
name, regex, ["mock"], author, text=command, created_at=ctx.message.id
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command(aliases=["deletemsg"])
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(manage_messages=True)
async def filter(
self,
ctx: commands.Context,
name: TriggerExists,
check_filenames: Optional[bool] = False,
*,
regex: str,
) -> None:
"""
Add a trigger to delete a message
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
guild = ctx.guild
author = ctx.message.author.id
new_trigger = Trigger(
name,
regex,
["delete"],
author,
read_filenames=check_filenames,
created_at=ctx.message.id,
check_edits=True,
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command()
@checks.mod_or_permissions(manage_roles=True)
@commands.bot_has_permissions(manage_roles=True)
async def addrole(
self, ctx: commands.Context, name: TriggerExists, regex: ValidRegex, *roles: discord.Role
) -> None:
"""
Add a trigger to add a role
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
`[role...]` the roles applied when the regex pattern matches space separated
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
for role in roles:
if role >= ctx.me.top_role:
return await ctx.send(_("I can't assign roles higher than my own."))
if ctx.author.id == ctx.guild.owner_id:
continue
if role >= ctx.author.top_role:
return await ctx.send(
_("I can't assign roles higher than you are able to assign.")
)
role_ids = [r.id for r in roles]
guild = ctx.guild
author = ctx.message.author.id
new_trigger = Trigger(
name, regex, ["add_role"], author, text=role_ids, created_at=ctx.message.id
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command()
@checks.mod_or_permissions(manage_roles=True)
@commands.bot_has_permissions(manage_roles=True)
async def removerole(
self, ctx: commands.Context, name: TriggerExists, regex: ValidRegex, *roles: discord.Role
) -> None:
"""
Add a trigger to remove a role
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
`[role...]` the roles applied when the regex pattern matches space separated
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
[For more details click here.](https://github.com/TrustyJAID/Trusty-cogs/blob/master/retrigger/README.md)
"""
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
for role in roles:
if role >= ctx.me.top_role:
return await ctx.send(_("I can't remove roles higher than my own."))
if ctx.author.id == ctx.guild.owner_id:
continue
if role >= ctx.author.top_role:
return await ctx.send(
_("I can't remove roles higher than you are able to remove.")
)
role_ids = [r.id for r in roles]
guild = ctx.guild
author = ctx.message.author.id
new_trigger = Trigger(
name, regex, ["remove_role"], author, text=role_ids, created_at=ctx.message.id
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
@retrigger.command()
@checks.admin_or_permissions(administrator=True)
async def multi(
self,
ctx: commands.Context,
name: TriggerExists,
regex: ValidRegex,
*multi_response: MultiResponse,
) -> None:
"""
Add a multiple response trigger
`<name>` name of the trigger
`<regex>` the regex that will determine when to respond
`[multi_response...]` the list of actions the bot will perform
Multiple responses start with the name of the action which
must be one of the listed options below, followed by a `;`
if there is a followup response add a space for the next trigger response.
If you want to add or remove multiple roles those may be
followed up with additional `;` separations.
e.g. `[p]retrigger multi test \\btest\\b \"dm;You said a bad word!\"
filter "remove_role;Regular Member" add_role;Timeout`
Will attempt to DM the user, delete their message, remove their
`@Regular Member` role and add the `@Timeout` role simultaneously.
Available options:
dm
dmme
remove_role
add_role
ban
kick
text
filter or delete
react
rename
command
See https://regex101.com/ for help building a regex pattern.
See `[p]retrigger explain` or click the link below for more details.
"""
# log.info(multi_response)
# return
if type(name) != str:
msg = _("{name} is already a trigger name").format(name=name.name)
return await ctx.send(msg)
guild = ctx.guild
author = ctx.message.author.id
if not [i[0] for i in multi_response]:
return await ctx.send(_("You have no actions provided for this trigger."))
new_trigger = Trigger(
name,
regex,
[i[0] for i in multi_response],
author,
multi_payload=multi_response,
created_at=ctx.message.id,
)
if ctx.guild.id not in self.triggers:
self.triggers[ctx.guild.id] = []
self.triggers[ctx.guild.id].append(new_trigger)
trigger_list = await self.config.guild(guild).trigger_list()
trigger_list[name] = await new_trigger.to_json()
await self.config.guild(guild).trigger_list.set(trigger_list)
await ctx.send(_("Trigger `{name}` set.").format(name=name))
| 47.022157
| 141
| 0.634542
|
17c8e4657c347da438d7797b9458730f7386b4e5
| 2,226
|
py
|
Python
|
lib/surface/iot/devices/credentials/update.py
|
kustodian/google-cloud-sdk
|
b6bae4137d4b58030adb3dcb1271216dfb19f96d
|
[
"Apache-2.0"
] | null | null | null |
lib/surface/iot/devices/credentials/update.py
|
kustodian/google-cloud-sdk
|
b6bae4137d4b58030adb3dcb1271216dfb19f96d
|
[
"Apache-2.0"
] | 11
|
2020-02-29T02:51:12.000Z
|
2022-03-30T23:20:08.000Z
|
lib/surface/iot/devices/credentials/update.py
|
kustodian/google-cloud-sdk
|
b6bae4137d4b58030adb3dcb1271216dfb19f96d
|
[
"Apache-2.0"
] | 1
|
2020-07-24T18:47:35.000Z
|
2020-07-24T18:47:35.000Z
|
# -*- coding: utf-8 -*- #
# Copyright 2017 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""`gcloud iot credentials describe` command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.cloudiot import devices
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.iot import flags
from googlecloudsdk.command_lib.iot import resource_args
from googlecloudsdk.command_lib.iot import util
from googlecloudsdk.core import log
from googlecloudsdk.core.util import times
class Update(base.DescribeCommand):
"""Update a specific device credential."""
@staticmethod
def Args(parser):
resource_args.AddDeviceResourceArg(parser,
'for which to update credentials',
positional=False)
flags.GetIndexFlag('credential', 'to update').AddToParser(parser)
flags.AddDeviceCredentialFlagsToParser(parser, combine_flags=False,
only_modifiable=True)
def Run(self, args):
client = devices.DevicesClient()
device_ref = args.CONCEPTS.device.Parse()
credentials = client.Get(device_ref).credentials
try:
if args.expiration_time:
credentials[args.index].expirationTime = (
times.FormatDateTime(args.expiration_time))
except IndexError:
raise util.BadCredentialIndexError(device_ref.Name(), credentials,
args.index)
response = client.Patch(device_ref, credentials=credentials)
log.UpdatedResource(device_ref.Name(), 'credentials for device')
return response
| 38.37931
| 74
| 0.715633
|
957139817fe3d1a74c9726e1c96935aec85d727f
| 6,794
|
py
|
Python
|
src/blade/new_project.py
|
southbear-club/blade-build
|
3906806c49f2dd01ce57f4f4a924cab727e41710
|
[
"BSD-3-Clause"
] | null | null | null |
src/blade/new_project.py
|
southbear-club/blade-build
|
3906806c49f2dd01ce57f4f4a924cab727e41710
|
[
"BSD-3-Clause"
] | null | null | null |
src/blade/new_project.py
|
southbear-club/blade-build
|
3906806c49f2dd01ce57f4f4a924cab727e41710
|
[
"BSD-3-Clause"
] | 1
|
2021-10-10T11:58:43.000Z
|
2021-10-10T11:58:43.000Z
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
'''
@File : new_project.py
@Time : 2021/06/27 13:20:42
@Author : wotsen
@Version : 1.0.0
@Contact : astralrovers@outlook.com
@License : (C)Copyright 2020-2030, MIT
@Desc : None
'''
# here put the import lib
import os
import re
import time
import json
from blade import console
from blade import new_template
class NewProject(object):
project_cfg_json_template = {
"project info": {
'name': "",
"author": "",
"creator": "",
"email": "",
"contributor": {
"wotsen": "astralrovers@outlook.com"
},
"create date": "",
"copyrightTag": "",
"project tool version": "",
"project version": "",
"project language": "c++",
"project type": "lib",
"current platform": "x86",
"support platforms": {
"x86_64": "gcc"
}
}
}
@staticmethod
def check_project_name(name):
if not re.match(r'^[a-zA-Z][a-zA-Z0-9-_]+$', name):
raise NameError("ame not in rules: start whith a-z/A-Z,othor use a-z,A-Z,-9,-,_")
if os.path.exists(name):
raise FileExistsError('%s is exsit.' % name)
@staticmethod
def check_email(name):
return 0
@staticmethod
def new_project(blade_path, command, options, targets):
NewProject.project_cfg_json_template['project info']['name'] = options.project_name
NewProject.project_cfg_json_template["project info"]["author"] = options.author
NewProject.project_cfg_json_template["project info"]["creator"] = options.author
NewProject.project_cfg_json_template["project info"]["email"] = options.email
NewProject.project_cfg_json_template["project info"]["contributor"][options.author] = options.email
NewProject.project_cfg_json_template["project info"]["create date"] = time.strftime("%Y-%m-%d", time.localtime())
NewProject.project_cfg_json_template["project info"]["project version"] = "0.0.0"
NewProject.project_cfg_json_template["project info"]["project language"] = options.project_language
NewProject.project_cfg_json_template["project info"]["project type"] = options.project_type
NewProject.create_dir(options.project_name)
NewProject.create_project_fie(options.project_name)
@staticmethod
def create_dir(name):
os.makedirs(name)
os.makedirs(os.path.join(name, '.blade'))
os.makedirs(os.path.join(name, 'include'))
os.makedirs(os.path.join(name, 'lib'))
os.makedirs(os.path.join(name, 'bin'))
os.makedirs(os.path.join(name, 'build'))
os.makedirs(os.path.join(name, 'dist'))
os.makedirs(os.path.join(name, 'target'))
os.makedirs(os.path.join(name, 'docs'))
os.makedirs(os.path.join(name, 'docs/dev-doc'))
os.makedirs(os.path.join(name, 'docs/user-doc'))
os.makedirs(os.path.join(name, 'mk'))
os.makedirs(os.path.join(name, 'src'))
os.makedirs(os.path.join(name, 'unittest'))
os.makedirs(os.path.join(name, 'samples'))
os.makedirs(os.path.join(name, 'thirdparty'))
os.makedirs(os.path.join(name, 'tool'))
@staticmethod
def create_project_fie(name):
with open(os.path.join(name, "BLADE_ROOT"), 'w', encoding='utf-8') as f:
f.write(new_template._BLADE_ROOT_)
with open(os.path.join(name, "ChangeLog"), 'w', encoding='utf-8') as f:
pass
if NewProject.project_cfg_json_template['project info']['project type'] == 'lib':
with open(os.path.join(name, "src/BUILD"), 'w', encoding='utf-8') as f:
f.write(new_template._CC_LIB_CONFIG_.substitute(name=NewProject.project_cfg_json_template["project info"]['name']))
with open(os.path.join(name, "unittest/BUILD"), 'w', encoding='utf-8') as f:
f.write(new_template._CC_TEST_CONFIG_.substitute(name=NewProject.project_cfg_json_template["project info"]['name']))
elif NewProject.project_cfg_json_template['project info']['project type'] == 'exec':
with open(os.path.join(name, "src/BUILD"), 'w', encoding='utf-8') as f:
f.write(new_template._CC_BIN_CONFIG_.substitute(name=NewProject.project_cfg_json_template["project info"]['name']))
else:
with open(os.path.join(name, "src/BUILD"), 'w', encoding='utf-8') as f:
f.write(new_template._CC_LIB_CONFIG_.substitute(name=NewProject.project_cfg_json_template["project info"]['name']))
f.write('\n')
f.write(new_template._CC_BIN_CONFIG_.substitute(name=NewProject.project_cfg_json_template["project info"]['name']))
with open(os.path.join(name, "unittest/BUILD"), 'w', encoding='utf-8') as f:
f.write(new_template._CC_TEST_CONFIG_.substitute(name=NewProject.project_cfg_json_template["project info"]['name']))
with open(os.path.join(name, "README.md") , 'w', encoding='utf-8') as f:
f.write(new_template._README_.substitute(author=NewProject.project_cfg_json_template["project info"]['author'],
email=NewProject.project_cfg_json_template["project info"]['email'],
date=NewProject.project_cfg_json_template["project info"]['create date']))
with open(os.path.join(name, "LICENSE.md"), 'w', encoding='utf-8') as f:
pass
with open(os.path.join(name, "AUTHORS"), 'w', encoding='utf-8') as f:
f.writelines([NewProject.project_cfg_json_template['project info']['author']])
with open(os.path.join(name, ".blade/config.json"), 'w', encoding='utf-8') as f:
json.dump(NewProject.project_cfg_json_template, f, indent=4)
with open(os.path.join(name, ".clang-format"), 'w', encoding='utf-8') as f:
f.write(new_template._CLANG_FORMAT_)
console.output('[info]: ----------Project (%s) create success----------' % NewProject.project_cfg_json_template['project info']['name'])
console.output('[project name ]: %s' % name)
console.output('[project language]: %s' % NewProject.project_cfg_json_template['project info']['project language'])
console.output('[project type ]: %s' % NewProject.project_cfg_json_template['project info']['project type'])
console.output('[author ]: %s' % NewProject.project_cfg_json_template['project info']['author'])
console.output('[email ]: %s' % NewProject.project_cfg_json_template['project info']['email'])
console.output('[create date ]: %s' % NewProject.project_cfg_json_template["project info"]["create date"])
| 50.325926
| 144
| 0.626729
|
7842945e8905dbf5450c505f5ff34dfbc00915f4
| 25,121
|
py
|
Python
|
run_glue_no_trainer_ds_pp.py
|
drunkcoding/model-finetune
|
67c40b347ce41dc480dfd029bc74c9aa2501b6ee
|
[
"MIT"
] | null | null | null |
run_glue_no_trainer_ds_pp.py
|
drunkcoding/model-finetune
|
67c40b347ce41dc480dfd029bc74c9aa2501b6ee
|
[
"MIT"
] | null | null | null |
run_glue_no_trainer_ds_pp.py
|
drunkcoding/model-finetune
|
67c40b347ce41dc480dfd029bc74c9aa2501b6ee
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# Copyright 2021 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Finetuning a 🤗 Transformers model for sequence classification on GLUE."""
import argparse
import logging
import math
import os
import random
from pathlib import Path
from deepspeed.runtime.dataloader import RepeatingLoader
from helpers import get_optimizer_grouped_parameters
from scipy import stats
from tqdm import trange
from deepspeed.runtime.pipe.module import PipelineModule
from deepspeed.utils.groups import initialize_model_parallel
from partitioner import GPTModelPipe, get_loss_fn
import datasets
from datasets import load_dataset, load_metric
import torch
from torch.utils.data import DataLoader
from tqdm.auto import tqdm
import transformers
import deepspeed
# from megatron import mpu
from transformers import (
AdamW,
AutoConfig,
AutoModelForSequenceClassification,
AutoTokenizer,
DataCollatorWithPadding,
PretrainedConfig,
SchedulerType,
default_data_collator,
get_scheduler,
set_seed,
)
from transformers.file_utils import get_full_repo_name
from transformers.utils.versions import require_version
from transformers import BatchEncoding
from transformers.deepspeed import HfDeepSpeedConfig, HfTrainerDeepSpeedConfig
MP_SIZE = torch.cuda.device_count()
os.environ['TOKENIZERS_PARALLELISM'] = 'false'
logger = logging.getLogger(__name__)
require_version("datasets>=1.8.0",
"To fix: pip install -r examples/pytorch/text-classification/requirements.txt")
task_to_keys = {
"cola": ("sentence", None),
"mnli": ("premise", "hypothesis"),
"mrpc": ("sentence1", "sentence2"),
"qnli": ("question", "sentence"),
"qqp": ("question1", "question2"),
"rte": ("sentence1", "sentence2"),
"sst2": ("sentence", None),
"stsb": ("sentence1", "sentence2"),
"wnli": ("sentence1", "sentence2"),
}
def parse_args():
parser = argparse.ArgumentParser(
description="Finetune a transformers model on a text classification task")
parser.add_argument(
"--task_name",
type=str.lower,
default=None,
help="The name of the glue task to train on.",
choices=list(task_to_keys.keys()),
)
parser.add_argument(
"--train_file", type=str, default=None, help="A csv or a json file containing the training data."
)
parser.add_argument(
"--validation_file", type=str, default=None, help="A csv or a json file containing the validation data."
)
parser.add_argument(
"--max_length",
type=int,
default=128,
help=(
"The maximum total input sequence length after tokenization. Sequences longer than this will be truncated,"
" sequences shorter will be padded if `--pad_to_max_lengh` is passed."
),
)
parser.add_argument(
"--pad_to_max_length",
action="store_true",
help="If passed, pad all samples to `max_length`. Otherwise, dynamic padding is used.",
)
parser.add_argument(
"--model_name_or_path",
type=str,
help="Path to pretrained model or model identifier from huggingface.co/models.",
required=True,
)
parser.add_argument(
"--use_slow_tokenizer",
action="store_true",
help="If passed, will use a slow tokenizer (not backed by the 🤗 Tokenizers library).",
)
parser.add_argument(
"--per_device_train_batch_size",
type=int,
default=8,
help="Batch size (per device) for the training dataloader.",
)
parser.add_argument(
"--per_device_eval_batch_size",
type=int,
default=8,
help="Batch size (per device) for the evaluation dataloader.",
)
parser.add_argument(
"--learning_rate",
type=float,
default=5e-5,
help="Initial learning rate (after the potential warmup period) to use.",
)
parser.add_argument("--weight_decay", type=float,
default=0.0, help="Weight decay to use.")
parser.add_argument("--num_train_epochs", type=int, default=3,
help="Total number of training epochs to perform.")
parser.add_argument(
"--max_train_steps",
type=int,
default=None,
help="Total number of training steps to perform. If provided, overrides num_train_epochs.",
)
parser.add_argument(
"--gradient_accumulation_steps",
type=int,
default=1,
help="Number of updates steps to accumulate before performing a backward/update pass.",
)
parser.add_argument(
"--lr_scheduler_type",
type=SchedulerType,
default="linear",
help="The scheduler type to use.",
choices=["linear", "cosine", "cosine_with_restarts",
"polynomial", "constant", "constant_with_warmup"],
)
parser.add_argument(
"--num_warmup_steps", type=int, default=0, help="Number of steps for the warmup in the lr scheduler."
)
parser.add_argument("--output_dir", type=str, default=None,
help="Where to store the final model.")
parser.add_argument("--seed", type=int, default=None,
help="A seed for reproducible training.")
parser.add_argument("--push_to_hub", action="store_true",
help="Whether or not to push the model to the Hub.")
parser.add_argument(
"--hub_model_id", type=str, help="The name of the repository to keep in sync with the local `output_dir`."
)
parser.add_argument("--hub_token", type=str,
help="The token to use to push to the Model Hub.")
parser.add_argument(
"--local_rank",
type=int,
default=-1,
help="Deepspeed auto local rank.",
)
parser = deepspeed.add_config_arguments(parser)
parser = deepspeed.add_tuning_arguments(parser)
args = parser.parse_args()
# Sanity checks
if args.task_name is None and args.train_file is None and args.validation_file is None:
raise ValueError(
"Need either a task name or a training/validation file.")
else:
if args.train_file is not None:
extension = args.train_file.split(".")[-1]
assert extension in [
"csv", "json"], "`train_file` should be a csv or a json file."
if args.validation_file is not None:
extension = args.validation_file.split(".")[-1]
assert extension in [
"csv", "json"], "`validation_file` should be a csv or a json file."
if args.push_to_hub:
assert args.output_dir is not None, "Need an `output_dir` to create a repo when `--push_to_hub` is passed."
return args
def main():
args = parse_args()
deepspeed_config = HfDeepSpeedConfig(args.deepspeed_config)
# mpu.get_model_parallel_world_size = lambda: torch.cuda.device_count()
# mpu.get_model_parallel_rank = lambda: args.local_rank
# mpu.initialize_model_parallel(torch.cuda.device_count())
# Initialize the accelerator. We will let the accelerator handle device placement for us in this example.
# accelerator = Accelerator()
# Make one log on every process with the configuration for debugging.
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
level=logging.INFO,
)
# logger.info(accelerator.state)
# Setup logging, we only want one process per machine to log things on the screen.
# accelerator.is_local_main_process is only True for one process per machine.
logger.setLevel(logging.INFO if args.local_rank <= 0 else logging.ERROR)
if args.local_rank <= 0:
datasets.utils.logging.set_verbosity_warning()
transformers.utils.logging.set_verbosity_info()
else:
datasets.utils.logging.set_verbosity_error()
transformers.utils.logging.set_verbosity_error()
# If passed along, set the training seed now.
if args.seed is not None:
set_seed(args.seed)
# Get the datasets: you can either provide your own CSV/JSON training and evaluation files (see below)
# or specify a GLUE benchmark task (the dataset will be downloaded automatically from the datasets Hub).
# For CSV/JSON files, this script will use as labels the column called 'label' and as pair of sentences the
# sentences in columns called 'sentence1' and 'sentence2' if such column exists or the first two columns not named
# label if at least two columns are provided.
# If the CSVs/JSONs contain only one non-label column, the script does single sentence classification on this
# single column. You can easily tweak this behavior (see below)
# In distributed training, the load_dataset function guarantee that only one local process can concurrently
# download the dataset.
if args.task_name is not None:
# Downloading and loading a dataset from the hub.
raw_datasets = load_dataset("glue", args.task_name)
else:
# Loading the dataset from local csv or json file.
data_files = {}
if args.train_file is not None:
data_files["train"] = args.train_file
if args.validation_file is not None:
data_files["validation"] = args.validation_file
extension = (
args.train_file if args.train_file is not None else args.valid_file).split(".")[-1]
raw_datasets = load_dataset(extension, data_files=data_files)
# See more about loading any type of standard or custom dataset at
# https://huggingface.co/docs/datasets/loading_datasets.html.
# Labels
if args.task_name is not None:
is_regression = args.task_name == "stsb"
if not is_regression:
label_list = raw_datasets["train"].features["label"].names
num_labels = len(label_list)
else:
num_labels = 1
else:
# Trying to have good defaults here, don't hesitate to tweak to your needs.
is_regression = raw_datasets["train"].features["label"].dtype in [
"float32", "float64"]
if is_regression:
num_labels = 1
else:
# A useful fast method:
# https://huggingface.co/docs/datasets/package_reference/main_classes.html#datasets.Dataset.unique
label_list = raw_datasets["train"].unique("label")
label_list.sort() # Let's sort it for determinism
num_labels = len(label_list)
# Load pretrained model and tokenizer
#
# In distributed training, the .from_pretrained methods guarantee that only one local process can concurrently
# download model & vocab.
config = AutoConfig.from_pretrained(
args.model_name_or_path, num_labels=num_labels, finetuning_task=args.task_name)
tokenizer = AutoTokenizer.from_pretrained(
args.model_name_or_path, use_fast=not args.use_slow_tokenizer)
model = AutoModelForSequenceClassification.from_pretrained(
args.model_name_or_path,
from_tf=bool(".ckpt" in args.model_name_or_path),
config=config,
)
# hack gpt2 token
if tokenizer.pad_token is None:
tokenizer.pad_token = tokenizer.eos_token
model.config.pad_token_id = 50256
# Preprocessing the datasets
if args.task_name is not None:
sentence1_key, sentence2_key = task_to_keys[args.task_name]
else:
# Again, we try to have some nice defaults but don't hesitate to tweak to your use case.
non_label_column_names = [
name for name in raw_datasets["train"].column_names if name != "label"]
if "sentence1" in non_label_column_names and "sentence2" in non_label_column_names:
sentence1_key, sentence2_key = "sentence1", "sentence2"
else:
if len(non_label_column_names) >= 2:
sentence1_key, sentence2_key = non_label_column_names[:2]
else:
sentence1_key, sentence2_key = non_label_column_names[0], None
# Some models have set the order of the labels to use, so let's make sure we do use it.
label_to_id = None
if (
model.config.label2id != PretrainedConfig(
num_labels=num_labels).label2id
and args.task_name is not None
and not is_regression
):
# Some have all caps in their config, some don't.
label_name_to_id = {
k.lower(): v for k, v in model.config.label2id.items()}
if list(sorted(label_name_to_id.keys())) == list(sorted(label_list)):
logger.info(
f"The configuration of the model provided the following label correspondence: {label_name_to_id}. "
"Using it!"
)
label_to_id = {
i: label_name_to_id[label_list[i]] for i in range(num_labels)}
else:
logger.warning(
"Your model seems to have been trained with labels, but they don't match the dataset: ",
f"model labels: {list(sorted(label_name_to_id.keys()))}, dataset labels: {list(sorted(label_list))}."
"\nIgnoring the model labels as a result.",
)
elif args.task_name is None:
label_to_id = {v: i for i, v in enumerate(label_list)}
if label_to_id is not None:
model.config.label2id = label_to_id
model.config.id2label = {
id: label for label, id in config.label2id.items()}
elif args.task_name is not None and not is_regression:
model.config.label2id = {l: i for i, l in enumerate(label_list)}
model.config.id2label = {
id: label for label, id in config.label2id.items()}
padding = "max_length" if args.pad_to_max_length else False
def preprocess_function(examples):
# Tokenize the texts
texts = (
(examples[sentence1_key],) if sentence2_key is None else (
examples[sentence1_key], examples[sentence2_key])
)
result = tokenizer(*texts, padding=padding,
max_length=args.max_length, truncation=True)
if "label" in examples:
if label_to_id is not None:
# Map labels to IDs (not necessary for GLUE tasks)
result["labels"] = [label_to_id[l] for l in examples["label"]]
else:
# In all cases, rename the column to labels because the model will expect that.
result["labels"] = examples["label"]
return result
processed_datasets = raw_datasets.map(
preprocess_function,
batched=True,
remove_columns=raw_datasets["train"].column_names,
desc="Running tokenizer on dataset",
)
train_dataset = processed_datasets["train"]
eval_dataset = processed_datasets["validation_matched" if args.task_name ==
"mnli" else "validation"]
# Log a few random samples from the training set:
for index in random.sample(range(len(train_dataset)), 3):
logger.info(
f"Sample {index} of the training set: {train_dataset[index]}.")
# DataLoaders creation:
if args.pad_to_max_length:
# If padding was already done ot max length, we use the default data collator that will just convert everything
# to tensors.
data_collator = default_data_collator
else:
# Otherwise, `DataCollatorWithPadding` will apply dynamic padding for us (by padding to the maximum length of
# the samples passed). When using mixed precision, we add `pad_to_multiple_of=8` to pad all tensors to multiple
# of 8s, which will enable the use of Tensor Cores on NVIDIA hardware with compute capability >= 7.5 (Volta).
data_collator = DataCollatorWithPadding(
tokenizer, pad_to_multiple_of=(8 if args.fp16 else None))
train_dataloader = DataLoader(
train_dataset, shuffle=True, collate_fn=data_collator, batch_size=args.per_device_train_batch_size, drop_last=True
)
eval_dataloader = DataLoader(
eval_dataset, collate_fn=data_collator, batch_size=args.per_device_eval_batch_size, drop_last=True)
# Optimizer
# Split weights in two groups, one with weight decay and the other not.
# no_decay = ["bias", "LayerNorm.weight"]
# optimizer_grouped_parameters = [
# {
# "params": [p for n, p in model.named_parameters() if not any(nd in n for nd in no_decay)],
# "weight_decay": args.weight_decay,
# },
# {
# "params": [p for n, p in model.named_parameters() if any(nd in n for nd in no_decay)],
# "weight_decay": 0.0,
# },
# ]
# optimizer = AdamW(optimizer_grouped_parameters, lr=args.learning_rate)
# Prepare everything with our `accelerator`.
# model, optimizer, train_dataloader, eval_dataloader = accelerator.prepare(
# model, optimizer, train_dataloader, eval_dataloader
# )
# create pipeline module
# model.train()
model_pipe = GPTModelPipe(model.config, "classification", model)
# # model_pipe.train()
# model_pipe.copy_weights(model)
# print(train_dataset[0])
loss_fn = get_loss_fn(model, next(iter(train_dataloader))['labels'])
optimizer_grouped_parameters = get_optimizer_grouped_parameters(args, model_pipe)
deepspeed.init_distributed()
initialize_model_parallel(1)
# model = GPT2ModelPipe(model.config, "classification", loss_fn, model=model,
# partition_method="uniform", num_stages=torch.cuda.device_count())
model_pipe = PipelineModule(
model_pipe.to_layers(),
loss_fn=loss_fn,
num_stages=torch.cuda.device_count(),
partition_method="uniform",
)
# model = model_pipe
engine, optimizer, _, _ = deepspeed.initialize(
args, model_pipe,
model_parameters=optimizer_grouped_parameters,
# training_data=train_dataset,
# collate_fn=data_collator
)
# Note -> the training dataloader needs to be prepared before we grab his length below (cause its length will be
# shorter in multiprocess)
# Scheduler and math around the number of training steps.
num_update_steps_per_epoch = math.ceil(
len(train_dataloader) / args.gradient_accumulation_steps)
if args.max_train_steps is None:
args.max_train_steps = args.num_train_epochs * num_update_steps_per_epoch
else:
args.num_train_epochs = math.ceil(
args.max_train_steps / num_update_steps_per_epoch)
# Get the metric function
if args.task_name is not None:
metric = load_metric("glue", args.task_name)
else:
metric = load_metric("accuracy")
# Train!
total_batch_size = args.per_device_train_batch_size * \
torch.cuda.device_count() * args.gradient_accumulation_steps
logger.info("***** Running training *****")
logger.info(f" Num examples = {len(train_dataset)}")
logger.info(f" Num Epochs = {args.num_train_epochs}")
logger.info(
f" Instantaneous batch size per device = {args.per_device_train_batch_size}")
logger.info(
f" Total train batch size (w. parallel, distributed & accumulation) = {total_batch_size}")
logger.info(
f" Gradient Accumulation steps = {args.gradient_accumulation_steps}")
logger.info(f" Total optimization steps = {args.max_train_steps}")
# Only show the progress bar once on each machine.
progress_bar = tqdm(range(args.max_train_steps),
disable=not args.local_rank <= 0)
completed_steps = 0
def batch_iterator(dataloader):
for batch in dataloader:
batch = BatchEncoding(batch).to(torch.cuda.current_device())
# print(batch)
yield (
(batch.get('input_ids'), batch.get('attention_mask')),
batch.get('labels')
)
train_dataloader = RepeatingLoader(train_dataloader)
train_data_iter = batch_iterator(train_dataloader)
eval_dataloader = RepeatingLoader(eval_dataloader)
eval_data_iter = batch_iterator(eval_dataloader)
# model.train()
m_bsz = deepspeed_config.get_value('train_micro_batch_size_per_gpu')
gas = deepspeed_config.get_value('gradient_accumulation_steps')
steps_per_epoch = len(train_dataset) // (gas * m_bsz)
eval_loss_list = []
train_loss_list = []
for step in trange(steps_per_epoch * args.num_train_epochs , desc="Training"):
train_loss_list = []
train_loss = engine.train_batch(train_data_iter)
train_loss_list.append(train_loss.detach().cpu().item())
if (step + 1) % steps_per_epoch == 0:
for step in trange(len(eval_dataset) // (gas * m_bsz), desc="Evaluating"):
eval_loss = engine.eval_batch(eval_data_iter)
eval_loss_list.append(eval_loss.detach().cpu().item())
print("epoch (%s) train_loss %s, eval_loss %s" % ((step + 1) // steps_per_epoch, stats.describe(train_loss_list), stats.describe(eval_loss_list)))
train_loss_list = []
eval_loss_list = []
engine.save_checkpoint(args.output_dir, tag=str(step))
return
for epoch in range(args.num_train_epochs):
train_loss_list = []
eval_loss_list = []
for step in trange(len(train_dataset) // (gas * m_bsz) , desc="Training"):
train_loss = model.train_batch(train_data_iter)
train_loss_list.append(train_loss.detach().cpu().item())
for step in trange(len(eval_dataset) // (gas * m_bsz), desc="Evaluating"):
eval_loss = model.eval_batch(eval_data_iter)
eval_loss_list.append(eval_loss.detach().cpu().item())
# print("========================================================")
print("epoch (%s) train_loss %s, eval_loss %s" % (
epoch+1, stats.describe(train_loss_list), stats.describe(eval_loss_list)))
return
print("========================================================")
for epoch in range(args.num_train_epochs):
model.train()
for step, batch in enumerate(train_dataloader):
batch = BatchEncoding(batch).to(torch.cuda.current_device())
# outputs = model(**batch)
# loss = outputs.loss
# loss = model.train
loss = loss / args.gradient_accumulation_steps
model.backward(loss)
if step % args.gradient_accumulation_steps == 0 or step == len(train_dataloader) - 1:
optimizer.step()
optimizer.zero_grad()
progress_bar.update(1)
completed_steps += 1
if completed_steps >= args.max_train_steps:
break
model.eval()
with torch.no_grad():
for step, batch in enumerate(eval_dataloader):
batch = BatchEncoding(batch).to(torch.cuda.current_device())
outputs = model(**batch)
predictions = outputs.logits.argmax(
dim=-1) if not is_regression else outputs.logits.squeeze()
metric.add_batch(
predictions=predictions,
references=batch["labels"],
)
eval_metric = metric.compute()
logger.info(f"epoch {epoch}: {eval_metric}")
if args.output_dir is not None:
model.save_checkpoint(args.output_dir, save_latest=True)
if args.local_rank <= 0:
tokenizer.save_pretrained(args.output_dir)
if args.task_name == "mnli":
# Final evaluation on mismatched validation set
eval_dataset = processed_datasets["validation_mismatched"]
eval_dataloader = DataLoader(
eval_dataset, collate_fn=data_collator, batch_size=args.per_device_eval_batch_size
)
# eval_dataloader = accelerator.prepare(eval_dataloader)
model.eval()
with torch.no_grad():
for step, batch in enumerate(eval_dataloader):
batch = BatchEncoding(batch).to(torch.cuda.current_device())
outputs = model(**batch)
predictions = outputs.logits.argmax(dim=-1)
metric.add_batch(
predictions=predictions,
references=batch["labels"],
)
eval_metric = metric.compute()
logger.info(f"mnli-mm: {eval_metric}")
if __name__ == "__main__":
main()
| 40.322632
| 158
| 0.649536
|
28b8d4c47bd9985ba99b32ae2a99cdd5d3905606
| 489
|
py
|
Python
|
bioviz/exceptions.py
|
BioWiz/msa
|
634a99b2a36393dbec75ff008997de0ebd6cb2cb
|
[
"BSD-3-Clause"
] | 1
|
2021-04-01T05:50:44.000Z
|
2021-04-01T05:50:44.000Z
|
bioviz/exceptions.py
|
BioWiz/msa
|
634a99b2a36393dbec75ff008997de0ebd6cb2cb
|
[
"BSD-3-Clause"
] | null | null | null |
bioviz/exceptions.py
|
BioWiz/msa
|
634a99b2a36393dbec75ff008997de0ebd6cb2cb
|
[
"BSD-3-Clause"
] | null | null | null |
class InvalidColorMapException(Exception):
def __init__(self, *args):
if len(args)==1 and isinstance(args[0], str):
self.message = args[0]
else:
self.message = "Invalid colormap."
class InvalidFileFormatException(Exception):
def __init__(self, *args):
if len(args)==1 and isinstance(args[0], str):
self.message = args[0]
else:
self.message = "Invalid fileformat for this type of diagram."
| 30.5625
| 73
| 0.597137
|
0e851fc61b3596d7f982daa5493433558ea98213
| 2,352
|
py
|
Python
|
utilities/permissions.py
|
BenitzCoding/Utility-Bot
|
a3959f5fc9a13e2e282d68ce6dd3b0266fc2b1b2
|
[
"MIT"
] | null | null | null |
utilities/permissions.py
|
BenitzCoding/Utility-Bot
|
a3959f5fc9a13e2e282d68ce6dd3b0266fc2b1b2
|
[
"MIT"
] | null | null | null |
utilities/permissions.py
|
BenitzCoding/Utility-Bot
|
a3959f5fc9a13e2e282d68ce6dd3b0266fc2b1b2
|
[
"MIT"
] | null | null | null |
import discord
from utils import default
from discord.ext import commands
owners = default.get("config.json").dev_ids
def is_owner(ctx):
return ctx.author.id in owners
async def check_permissions(ctx, perms, *, check=all):
if ctx.author.id in owners:
return True
resolved = ctx.channel.permissions_for(ctx.author)
return check(getattr(resolved, name, None) == value for name, value in perms.items())
def has_permissions(*, check=all, **perms):
async def pred(ctx):
return await check_permissions(ctx, perms, check=check)
return commands.check(pred)
async def check_priv(ctx, member):
try:
# Self checks
if member == ctx.author:
return await ctx.send(f"You can't {ctx.command.name} yourself")
if member.id == ctx.bot.user.id:
return await ctx.send("So that's what you think of me huh..? sad ;-;")
# Check if user bypasses
if ctx.author.id == ctx.guild.owner.id:
return False
# Now permission check
if member.id in owners:
if ctx.author.id not in owners:
return await ctx.send(f"I can't {ctx.command.name} my creator ;-;")
else:
pass
if member.id == ctx.guild.owner.id:
return await ctx.send(f"You can't {ctx.command.name} the owner, lol")
if ctx.author.top_role == member.top_role:
return await ctx.send(f"You can't {ctx.command.name} someone who has the same permissions as you...")
if ctx.author.top_role < member.top_role:
return await ctx.send(f"Nope, you can't {ctx.command.name} someone higher than yourself.")
except Exception:
pass
def can_send(ctx):
return isinstance(ctx.channel, discord.DMChannel) or ctx.channel.permissions_for(ctx.guild.me).send_messages
def can_embed(ctx):
return isinstance(ctx.channel, discord.DMChannel) or ctx.channel.permissions_for(ctx.guild.me).embed_links
def can_upload(ctx):
return isinstance(ctx.channel, discord.DMChannel) or ctx.channel.permissions_for(ctx.guild.me).attach_files
def can_react(ctx):
return isinstance(ctx.channel, discord.DMChannel) or ctx.channel.permissions_for(ctx.guild.me).add_reactions
def is_nsfw(ctx):
return isinstance(ctx.channel, discord.DMChannel) or ctx.channel.is_nsfw()
| 32.219178
| 113
| 0.671344
|
da8f86503b58036ab1b3aecab6a973cb9d95018e
| 347
|
py
|
Python
|
src/perspective.py
|
jamesfulford/better-lane-finding
|
8b17f25b208b062694ced081fad89873b4ff2356
|
[
"MIT"
] | null | null | null |
src/perspective.py
|
jamesfulford/better-lane-finding
|
8b17f25b208b062694ced081fad89873b4ff2356
|
[
"MIT"
] | null | null | null |
src/perspective.py
|
jamesfulford/better-lane-finding
|
8b17f25b208b062694ced081fad89873b4ff2356
|
[
"MIT"
] | null | null | null |
import cv2
def get_transformers(src, dst):
M = cv2.getPerspectiveTransform(src, dst)
Minv = cv2.getPerspectiveTransform(dst, src)
return (
# Transform
lambda i: cv2.warpPerspective(i, M, (i.shape[1], i.shape[0])),
# Undo transform
lambda i: cv2.warpPerspective(i, Minv, (i.shape[1], i.shape[0])),
)
| 31.545455
| 73
| 0.622478
|
918369b6bab2d73ca520367e3cf1e3932903cffc
| 2,233
|
py
|
Python
|
lib/clr.py
|
HarveyYan/RNAonGraph
|
0056cc465f7bc4a89c4955d2cee88d6a858cef71
|
[
"MIT"
] | 15
|
2020-06-27T08:08:20.000Z
|
2022-02-22T03:29:45.000Z
|
lib/clr.py
|
HarveyYan/RNAonGraph
|
0056cc465f7bc4a89c4955d2cee88d6a858cef71
|
[
"MIT"
] | 2
|
2020-08-10T00:46:06.000Z
|
2020-11-20T21:25:27.000Z
|
lib/clr.py
|
HarveyYan/RNAonGraph
|
0056cc465f7bc4a89c4955d2cee88d6a858cef71
|
[
"MIT"
] | 6
|
2020-02-19T16:04:00.000Z
|
2021-12-09T22:32:23.000Z
|
'''
Downloaded from https://github.com/mhmoodlan/cyclic-learning-rate
Thanks Mahmoud!
'''
import tensorflow as tf
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
from tensorflow.python.eager import context
def cyclic_learning_rate(global_step,
learning_rate,
max_lr,
step_size,
gamma=0.99994,
mode='triangular',
name=None):
if global_step is None:
raise ValueError("global_step is required for cyclic_learning_rate.")
with ops.name_scope(name, "CyclicLearningRate",
[learning_rate, global_step]) as name:
learning_rate = ops.convert_to_tensor(learning_rate, name="learning_rate")
dtype = learning_rate.dtype
global_step = math_ops.cast(global_step, dtype)
step_size = math_ops.cast(step_size, dtype)
def cyclic_lr():
"""Helper to recompute learning rate; most helpful in eager-mode."""
# computing: cycle = floor( 1 + global_step / ( 2 * step_size ) )
double_step = math_ops.multiply(2., step_size)
global_div_double_step = math_ops.divide(global_step, double_step)
cycle = math_ops.floor(math_ops.add(1., global_div_double_step))
# computing: x = abs( global_step / step_size – 2 * cycle + 1 )
double_cycle = math_ops.multiply(2., cycle)
global_div_step = math_ops.divide(global_step, step_size)
tmp = math_ops.subtract(global_div_step, double_cycle)
x = math_ops.abs(math_ops.add(1., tmp))
# computing: clr = learning_rate + ( max_lr – learning_rate ) * max( 0, 1 - x )
a1 = math_ops.maximum(0., math_ops.subtract(1., x))
a2 = math_ops.subtract(max_lr, learning_rate)
clr = math_ops.multiply(a1, a2)
if mode == 'triangular2':
clr = math_ops.divide(clr, math_ops.cast(math_ops.pow(2, math_ops.cast(
cycle-1, tf.int32)), tf.float32))
if mode == 'exp_range':
clr = math_ops.multiply(math_ops.pow(gamma, global_step), clr)
return math_ops.add(clr, learning_rate, name=name)
if not context.executing_eagerly():
cyclic_lr = cyclic_lr()
return cyclic_lr
| 45.571429
| 85
| 0.652933
|
c343c69daa12a382203ff3c282c1160e402f0c18
| 4,729
|
py
|
Python
|
chromecast/tools/build/generate_test_lists.py
|
Wzzzx/chromium-crosswalk
|
768dde8efa71169f1c1113ca6ef322f1e8c9e7de
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2
|
2019-01-28T08:09:58.000Z
|
2021-11-15T15:32:10.000Z
|
chromecast/tools/build/generate_test_lists.py
|
Wzzzx/chromium-crosswalk
|
768dde8efa71169f1c1113ca6ef322f1e8c9e7de
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
chromecast/tools/build/generate_test_lists.py
|
Wzzzx/chromium-crosswalk
|
768dde8efa71169f1c1113ca6ef322f1e8c9e7de
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 6
|
2020-09-23T08:56:12.000Z
|
2021-11-18T03:40:49.000Z
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Helper script to generate unit test lists for the Chromecast build scripts.
"""
import glob
import optparse
import sys
def CombineList(test_files_dir, list_output_file, include_filters,
additional_runtime_options):
"""Writes a unit test file in a format compatible for Chromecast scripts.
If include_filters is True, uses filters to create a test runner list
and also include additional options, if any.
Otherwise, creates a list only of the tests to build.
Args:
test_files_dir: Path to the intermediate directory containing tests/filters.
list_output_file: Path to write the unit test file out to.
include_filters: Whether or not to include the filters when generating
the test list.
additional_runtime_options: Arguments to be applied to all tests. These are
applied before filters (so test-specific filters take precedence).
"""
# GYP targets may provide a numbered priority for the filename. Sort to
# use that priority.
test_files = sorted(glob.glob(test_files_dir + "/*.tests"))
filter_files = sorted(glob.glob(test_files_dir + "/*.filters"))
test_bin_set = set()
for test_filename in test_files:
with open(test_filename, "r") as test_file:
for test_file_line in test_file:
# Binary name may be a simple test target (cast_net_unittests) or be a
# qualified gyp path (../base.gyp:base_unittests).
test_binary_name = test_file_line.split(":")[-1].strip()
test_bin_set.add(test_binary_name)
test_filters = {}
if include_filters:
for filter_filename in filter_files:
with open(filter_filename, "r") as filter_file:
for filter_line in filter_file:
(test_binary_name, filter) = filter_line.strip().split(" ", 1)
if test_binary_name not in test_bin_set:
raise Exception("Filter found for unknown target: " +
test_binary_name)
if test_binary_name in test_filters:
test_filters[test_binary_name] += " " + filter
else:
test_filters[test_binary_name] = filter
test_binaries = [
binary + " " + (additional_runtime_options or "")
+ (" " + test_filters[binary] if binary in test_filters else "")
for binary in test_bin_set]
with open(list_output_file, "w") as f:
f.write("\n".join(sorted(test_binaries)))
def CreateList(inputs, list_output_file):
with open(list_output_file, "w") as f:
f.write("\n".join(inputs))
def DoMain(argv):
"""Main method. Runs helper commands for generating unit test lists."""
parser = optparse.OptionParser(
"""usage: %prog [<options>] <command> [<test names>]
Valid commands:
create_list prints all given test names/args to a file, one line
per string
pack_build packs all test files from the given output directory
into a single test list file
pack_run packs all test and filter files from the given
output directory into a single test list file
""")
parser.add_option("-o", action="store", dest="list_output_file",
help="Output path in which to write the test list.")
parser.add_option("-t", action="store", dest="test_files_dir",
help="Intermediate test list directory.")
parser.add_option("-a", action="store", dest="additional_runtime_options",
help="Additional options applied to all tests.")
options, inputs = parser.parse_args(argv)
list_output_file = options.list_output_file
test_files_dir = options.test_files_dir
additional_runtime_options = options.additional_runtime_options
if len(inputs) < 1:
parser.error("No command given.\n")
command = inputs[0]
test_names = inputs[1:]
if not list_output_file:
parser.error("Output path (-o) is required.\n")
if command == "create_list":
return CreateList(test_names, list_output_file)
if command == "pack_build":
if not test_files_dir:
parser.error("pack_build require a test files directory (-t).\n")
return CombineList(test_files_dir, list_output_file, False, None)
if command == "pack_run":
if not test_files_dir:
parser.error("pack_run require a test files directory (-t).\n")
return CombineList(test_files_dir, list_output_file, True,
additional_runtime_options)
parser.error("Invalid command specified.")
if __name__ == "__main__":
DoMain(sys.argv[1:])
| 37.23622
| 80
| 0.67879
|
e2b7a02e428f4d991efacfb37fa306f80c1637b6
| 1,132
|
py
|
Python
|
Machine_Learning/Introduction/03-Basic/Module_4/DenseNetX/image_input_fn.py
|
mkolod/Vitis-Tutorials
|
33d6cf9686398ef1179778dc0da163291c68b465
|
[
"Apache-2.0"
] | 1
|
2020-09-19T00:58:05.000Z
|
2020-09-19T00:58:05.000Z
|
Machine_Learning/Introduction/03-Basic/Module_4/DenseNetX/image_input_fn.py
|
mkolod/Vitis-Tutorials
|
33d6cf9686398ef1179778dc0da163291c68b465
|
[
"Apache-2.0"
] | null | null | null |
Machine_Learning/Introduction/03-Basic/Module_4/DenseNetX/image_input_fn.py
|
mkolod/Vitis-Tutorials
|
33d6cf9686398ef1179778dc0da163291c68b465
|
[
"Apache-2.0"
] | 2
|
2021-10-18T01:28:29.000Z
|
2022-01-07T16:20:51.000Z
|
'''
Copyright 2020 Xilinx Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import os
import cv2
calib_image_list = './build/quantize/images/calib_list.txt'
calib_batch_size = 100
def calib_input(iter):
images = []
line = open(calib_image_list).readlines()
for index in range(0, calib_batch_size):
curline = line[iter * calib_batch_size + index]
calib_image_name = curline.strip()
# open image as BGR
image = cv2.imread(calib_image_name)
# change to RGB
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
# normalize
image = image/255.0
images.append(image)
return {"input_1": images}
| 26.325581
| 73
| 0.729682
|
15fcf3db6ffd93e92bdd1d31c4a5c71ace595964
| 2,745
|
py
|
Python
|
copilot/frame.py
|
aphistic/copilot
|
d434a801f321dfd0865448751a26f755b41f9037
|
[
"MIT"
] | null | null | null |
copilot/frame.py
|
aphistic/copilot
|
d434a801f321dfd0865448751a26f755b41f9037
|
[
"MIT"
] | 1
|
2016-04-12T01:19:52.000Z
|
2016-04-12T01:19:52.000Z
|
copilot/frame.py
|
aphistic/copilot
|
d434a801f321dfd0865448751a26f755b41f9037
|
[
"MIT"
] | null | null | null |
from tkinter import Toplevel, VERTICAL
from tkinter.ttk import Frame, Button, Label, Scrollbar
class CopilotBaseFrame(Frame):
def __init__(self, master, config):
super(CopilotBaseFrame, self).__init__(master)
self._master = master
self._config = config
def _make_full(self, root):
w, h = root.winfo_screenwidth(), root.winfo_screenheight()
root.overrideredirect(1)
root.geometry("%dx%d+0+0" % (w, h))
def _new_window_cb(self, frame_type):
def _cb(cb_self, cb_type):
new_window = Toplevel(cb_self._master)
cb_type(new_window, self._config)
return lambda: _cb(self, frame_type)
def _new_state_window(self, frame_type, state):
new_window = Toplevel(self._master)
frame_type(new_window, self._config, state)
def _new_state_window_cb(self, frame_type, state):
def _cb(cb_self, cb_type, cb_state):
new_window = Toplevel(cb_self._master)
cb_type(new_window, self._config, cb_state)
return lambda: _cb(self, frame_type, state)
class CopilotMainFrame(CopilotBaseFrame):
def __init__(self, master, config):
super(CopilotMainFrame, self).__init__(master, config)
if config.full_screen:
self._make_full(master)
class CopilotInnerFrame(CopilotBaseFrame):
def __init__(self, master, config):
super(CopilotInnerFrame, self).__init__(master, config)
if config.full_screen:
self._make_full(master)
self.master.grid_rowconfigure(1, weight=1)
self.master.grid_columnconfigure(1, weight=1)
self._create_header()
self._sb = Scrollbar(self._master, orient=VERTICAL)
self._sb.grid(row=1, column=3, sticky='nse')
self._next_hidden = False
def _cmd_back(self):
self._master.destroy()
def _create_header(self):
self.back_btn = Button(
self._master,
text='< Back',
command=self._cmd_back
)
self.back_btn.grid(row=0, column=0, sticky='w')
self._frame_lbl = Label(
self.master,
text='',
anchor='center',
font=self._config.item_font
)
self._frame_lbl.grid(row=0, column=1, sticky='ew')
self._next_btn = Button(
self.master,
text='Next >'
)
self._next_btn.grid(row=0, column=2, sticky='e')
def _hide_next(self):
if not self._next_hidden:
self._next_btn.grid_remove()
self._next_hidden = True
def _show_next(self):
if self._next_hidden:
self._next_btn.grid(row=0, column=2, sticky='e')
self._next_hidden = False
| 30.164835
| 66
| 0.621129
|
a1ab54ba301ea9cccc7d216e622e5e4b98de6155
| 35
|
py
|
Python
|
pulp/tests/__init__.py
|
atomassi/pulp
|
ec7105f515c0dc8058493677061b6d8d8c4b5be8
|
[
"MIT"
] | 1,418
|
2015-04-23T12:33:11.000Z
|
2022-03-31T22:26:43.000Z
|
pulp/tests/__init__.py
|
atomassi/pulp
|
ec7105f515c0dc8058493677061b6d8d8c4b5be8
|
[
"MIT"
] | 360
|
2015-04-14T19:09:47.000Z
|
2022-03-20T13:05:14.000Z
|
pulp/tests/__init__.py
|
atomassi/pulp
|
ec7105f515c0dc8058493677061b6d8d8c4b5be8
|
[
"MIT"
] | 352
|
2015-04-16T15:45:41.000Z
|
2022-03-12T09:16:04.000Z
|
from .run_tests import pulpTestAll
| 17.5
| 34
| 0.857143
|
1f80b8c91dbf6adcb703e93c9726eb9dac64373f
| 25,368
|
py
|
Python
|
sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/aio/_search_indexer_client.py
|
aalapatirvbd/azure-sdk-for-python
|
aae867a31f53286b123cf008a43cf0cd3746f8ba
|
[
"MIT"
] | null | null | null |
sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/aio/_search_indexer_client.py
|
aalapatirvbd/azure-sdk-for-python
|
aae867a31f53286b123cf008a43cf0cd3746f8ba
|
[
"MIT"
] | null | null | null |
sdk/search/azure-search-documents/azure/search/documents/indexes/_internal/aio/_search_indexer_client.py
|
aalapatirvbd/azure-sdk-for-python
|
aae867a31f53286b123cf008a43cf0cd3746f8ba
|
[
"MIT"
] | null | null | null |
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.core import MatchConditions
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.exceptions import ClientAuthenticationError, ResourceNotFoundError
from .._generated.aio import SearchServiceClient as _SearchServiceClient
from .._generated.models import SearchIndexerSkillset
from .._utils import (
get_access_conditions,
normalize_endpoint,
pack_search_indexer_data_source,
unpack_search_indexer_data_source,
)
from ...._headers_mixin import HeadersMixin
from ...._version import SDK_MONIKER
if TYPE_CHECKING:
# pylint:disable=unused-import,ungrouped-imports
from .._generated.models import SearchIndexer, SearchIndexerStatus
from typing import Any, Optional, Sequence
from azure.core.credentials import AzureKeyCredential
class SearchIndexerClient(HeadersMixin): # pylint: disable=R0904
"""A client to interact with Azure search service Indexers.
"""
_ODATA_ACCEPT = "application/json;odata.metadata=minimal" # type: str
def __init__(self, endpoint, credential, **kwargs):
# type: (str, AzureKeyCredential, **Any) -> None
self._endpoint = normalize_endpoint(endpoint) # type: str
self._credential = credential # type: AzureKeyCredential
self._client = _SearchServiceClient(
endpoint=endpoint, sdk_moniker=SDK_MONIKER, **kwargs
) # type: _SearchServiceClient
async def __aenter__(self):
# type: () -> SearchIndexersClient
await self._client.__aenter__() # pylint:disable=no-member
return self
async def __aexit__(self, *args):
# type: (*Any) -> None
return await self._client.__aexit__(*args) # pylint:disable=no-member
async def close(self):
# type: () -> None
"""Close the :class:`~azure.search.documents.aio.SearchIndexerClient` session.
"""
return await self._client.close()
@distributed_trace_async
async def create_indexer(self, indexer, **kwargs):
# type: (SearchIndexer, **Any) -> SearchIndexer
"""Creates a new SearchIndexer.
:param indexer: The definition of the indexer to create.
:type indexer: ~azure.search.documents.SearchIndexer
:return: The created SearchIndexer
:rtype: ~azure.search.documents.indexes.models.SearchIndexer
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_indexer_operations_async.py
:start-after: [START create_indexer_async]
:end-before: [END create_indexer_async]
:language: python
:dedent: 4
:caption: Create a SearchIndexer
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = await self._client.indexers.create(indexer, **kwargs)
return result
@distributed_trace_async
async def create_or_update_indexer(self, indexer, name=None, **kwargs):
# type: (SearchIndexer, Optional[str], **Any) -> SearchIndexer
"""Creates a new indexer or updates a indexer if it already exists.
:param name: The name of the indexer to create or update.
:type name: str
:param indexer: The definition of the indexer to create or update.
:type indexer: ~azure.search.documents.SearchIndexer
:return: The created SearchIndexer
:rtype: ~azure.search.documents.indexes.models.SearchIndexer
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
error_map, access_condition = get_access_conditions(
indexer, kwargs.pop("match_condition", MatchConditions.Unconditionally)
)
kwargs.update(access_condition)
if not name:
name = indexer.name
result = await self._client.indexers.create_or_update(
indexer_name=name, indexer=indexer, error_map=error_map, **kwargs
)
return result
@distributed_trace_async
async def get_indexer(self, name, **kwargs):
# type: (str, **Any) -> SearchIndexer
"""Retrieves a indexer definition.
:param name: The name of the indexer to retrieve.
:type name: str
:return: The SearchIndexer that is fetched.
:rtype: ~azure.search.documents.indexes.models.SearchIndexer
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_indexer_operations_async.py
:start-after: [START get_indexer_async]
:end-before: [END get_indexer_async]
:language: python
:dedent: 4
:caption: Retrieve a SearchIndexer
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = await self._client.indexers.get(name, **kwargs)
return result
@distributed_trace_async
async def get_indexers(self, **kwargs):
# type: (**Any) -> Sequence[SearchIndexer]
"""Lists all indexers available for a search service.
:return: List of all the SearchIndexers.
:rtype: `list[~azure.search.documents.indexes.models.SearchIndexer]`
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_indexer_operations_async.py
:start-after: [START list_indexer_async]
:end-before: [END list_indexer_async]
:language: python
:dedent: 4
:caption: List all the SearchIndexers
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = await self._client.indexers.list(**kwargs)
return result.indexers
@distributed_trace_async
async def get_indexer_names(self, **kwargs):
# type: (**Any) -> Sequence[str]
"""Lists all indexer names available for a search service.
:return: List of all the SearchIndexer names.
:rtype: `list[str]`
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = await self._client.indexers.list(**kwargs)
return [x.name for x in result.indexers]
@distributed_trace_async
async def delete_indexer(self, indexer, **kwargs):
# type: (Union[str, SearchIndexer], **Any) -> None
"""Deletes an indexer. To use access conditions, the SearchIndexer model
must be provided instead of the name. It is enough to provide
the name of the indexer to delete unconditionally.
:param name: The name of the indexer to delete.
:type name: str
:keyword match_condition: The match condition to use upon the etag
:type match_condition: ~azure.core.MatchConditions
:return: None
:rtype: None
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_indexer_operations_async.py
:start-after: [START delete_indexer_async]
:end-before: [END delete_indexer_async]
:language: python
:dedent: 4
:caption: Delete a SearchIndexer
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
error_map, access_condition = get_access_conditions(
indexer, kwargs.pop("match_condition", MatchConditions.Unconditionally)
)
kwargs.update(access_condition)
try:
name = indexer.name
except AttributeError:
name = indexer
await self._client.indexers.delete(name, error_map=error_map, **kwargs)
@distributed_trace_async
async def run_indexer(self, name, **kwargs):
# type: (str, **Any) -> None
"""Run an indexer.
:param name: The name of the indexer to run.
:type name: str
:return: None
:rtype: None
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_indexer_operations_async.py
:start-after: [START run_indexer_async]
:end-before: [END run_indexer_async]
:language: python
:dedent: 4
:caption: Run a SearchIndexer
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
await self._client.indexers.run(name, **kwargs)
@distributed_trace_async
async def reset_indexer(self, name, **kwargs):
# type: (str, **Any) -> None
"""Resets the change tracking state associated with an indexer.
:param name: The name of the indexer to reset.
:type name: str
:return: None
:rtype: None
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_indexer_operations_async.py
:start-after: [START reset_indexer_async]
:end-before: [END reset_indexer_async]
:language: python
:dedent: 4
:caption: Reset a SearchIndexer's change tracking state
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
await self._client.indexers.reset(name, **kwargs)
@distributed_trace_async
async def get_indexer_status(self, name, **kwargs):
# type: (str, **Any) -> SearchIndexerStatus
"""Get the status of the indexer.
:param name: The name of the indexer to fetch the status.
:type name: str
:return: SearchIndexerStatus
:rtype: SearchIndexerStatus
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_indexer_operations_async.py
:start-after: [START get_indexer_status_async]
:end-before: [END get_indexer_status_async]
:language: python
:dedent: 4
:caption: Get a SearchIndexer's status
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
return await self._client.indexers.get_status(name, **kwargs)
@distributed_trace_async
async def create_data_source_connection(self, data_source_connection, **kwargs):
# type: (SearchIndexerDataSourceConnection, **Any) -> SearchIndexerDataSourceConnection
"""Creates a new data source connection.
:param data_source_connection: The definition of the data source connection to create.
:type data_source_connection: ~search.models.SearchIndexerDataSourceConnection
:return: The created SearchIndexerDataSourceConnection
:rtype: ~search.models.SearchIndexerDataSourceConnection
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_data_source_operations_async.py
:start-after: [START create_data_source_connection_async]
:end-before: [END create_data_source_connection_async]
:language: python
:dedent: 4
:caption: Create a SearchIndexerDataSourceConnection
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
packed_data_source = pack_search_indexer_data_source(data_source_connection)
result = await self._client.data_sources.create(packed_data_source, **kwargs)
return unpack_search_indexer_data_source(result)
@distributed_trace_async
async def create_or_update_data_source_connection(self, data_source_connection, name=None, **kwargs):
# type: (SearchIndexerDataSourceConnection, Optional[str], **Any) -> SearchIndexerDataSourceConnection
"""Creates a new data source connection or updates a data source connection if it already exists.
:param name: The name of the data source connection to create or update.
:type name: str
:param data_source_connection: The definition of the data source connection to create or update.
:type data_source_connection: ~search.models.SearchIndexerDataSourceConnection
:keyword match_condition: The match condition to use upon the etag
:type match_condition: ~azure.core.MatchConditions
:return: The created SearchIndexerDataSourceConnection
:rtype: ~search.models.SearchIndexerDataSourceConnection
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
error_map, access_condition = get_access_conditions(
data_source_connection, kwargs.pop("match_condition", MatchConditions.Unconditionally)
)
kwargs.update(access_condition)
if not name:
name = data_source_connection.name
packed_data_source = pack_search_indexer_data_source(data_source_connection)
result = await self._client.data_sources.create_or_update(
data_source_name=name,
data_source=packed_data_source,
error_map=error_map,
**kwargs
)
return unpack_search_indexer_data_source(result)
@distributed_trace_async
async def delete_data_source_connection(self, data_source_connection, **kwargs):
# type: (Union[str, SearchIndexerDataSourceConnection], **Any) -> None
"""Deletes a data source connection. To use access conditions, the
SearchIndexerDataSourceConnection model must be provided instead of the name.
It is enough to provide the name of the data source connection to delete unconditionally
:param data_source_connection: The data source connection to delete.
:type data_source_connection: str or ~search.models.SearchIndexerDataSourceConnection
:keyword match_condition: The match condition to use upon the etag
:type match_condition: ~azure.core.MatchConditions
:return: None
:rtype: None
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_data_source_operations_async.py
:start-after: [START delete_data_source_async]
:end-before: [END delete_data_source_async]
:language: python
:dedent: 4
:caption: Delete a SearchIndexerDataSourceConnection
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
error_map, access_condition = get_access_conditions(
data_source_connection, kwargs.pop("match_condition", MatchConditions.Unconditionally)
)
kwargs.update(access_condition)
try:
name = data_source_connection.name
except AttributeError:
name = data_source_connection
await self._client.data_sources.delete(
data_source_name=name, error_map=error_map, **kwargs
)
@distributed_trace_async
async def get_data_source_connection(self, name, **kwargs):
# type: (str, **Any) -> SearchIndexerDataSourceConnection
"""Retrieves a data source connection definition.
:param name: The name of the data source connection to retrieve.
:type name: str
:return: The SearchIndexerDataSourceConnection that is fetched.
:rtype: ~search.models.SearchIndexerDataSourceConnection
.. literalinclude:: ../samples/async_samples/sample_data_source_operations_async.py
:start-after: [START get_data_source_connection_async]
:end-before: [END get_data_source_connection_async]
:language: python
:dedent: 4
:caption: Retrieve a SearchIndexerDataSourceConnection
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = await self._client.data_sources.get(name, **kwargs)
return unpack_search_indexer_data_source(result)
@distributed_trace_async
async def get_data_source_connections(self, **kwargs):
# type: (**Any) -> Sequence[SearchIndexerDataSourceConnection]
"""Lists all data source connections available for a search service.
:return: List of all the data source connections.
:rtype: `list[~search.models.SearchIndexerDataSourceConnection]`
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_data_source_operations_async.py
:start-after: [START list_data_source_connection_async]
:end-before: [END list_data_source_connection_async]
:language: python
:dedent: 4
:caption: List all SearchIndexerDataSourceConnections
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = await self._client.data_sources.list(**kwargs)
return [unpack_search_indexer_data_source(x) for x in result.data_sources]
@distributed_trace_async
async def get_data_source_connection_names(self, **kwargs):
# type: (**Any) -> Sequence[str]
"""Lists all data source connection names available for a search service.
:return: List of all the data source connection names.
:rtype: `list[str]`
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = await self._client.data_sources.list(**kwargs)
return [x.name for x in result.data_sources]
@distributed_trace_async
async def get_skillsets(self, **kwargs):
# type: (**Any) -> List[SearchIndexerSkillset]
"""List the SearchIndexerSkillsets in an Azure Search service.
:return: List of SearchIndexerSkillsets
:rtype: list[~azure.search.documents.indexes.models.SearchIndexerSkillset]
:raises: ~azure.core.exceptions.HttpResponseError
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_skillset_operations_async.py
:start-after: [START get_skillsets]
:end-before: [END get_skillsets]
:language: python
:dedent: 4
:caption: List SearchIndexerSkillsets
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = await self._client.skillsets.list(**kwargs)
return result.skillsets
@distributed_trace_async
async def get_skillset_names(self, **kwargs):
# type: (**Any) -> List[str]
"""List the SearchIndexerSkillset names in an Azure Search service.
:return: List of SearchIndexerSkillset names
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
result = await self._client.skillsets.list(**kwargs)
return [x.name for x in result.skillsets]
@distributed_trace_async
async def get_skillset(self, name, **kwargs):
# type: (str, **Any) -> SearchIndexerSkillset
"""Retrieve a named SearchIndexerSkillset in an Azure Search service
:param name: The name of the SearchIndexerSkillset to get
:type name: str
:return: The retrieved SearchIndexerSkillset
:rtype: ~azure.search.documents.indexes.models.SearchIndexerSkillset
:raises: :class:`~azure.core.exceptions.ResourceNotFoundError`
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_skillset_operations_async.py
:start-after: [START get_skillset]
:end-before: [END get_skillset]
:language: python
:dedent: 4
:caption: Get a SearchIndexerSkillset
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
return await self._client.skillsets.get(name, **kwargs)
@distributed_trace_async
async def delete_skillset(self, skillset, **kwargs):
# type: (Union[str, SearchIndexerSkillset], **Any) -> None
"""Delete a named SearchIndexerSkillset in an Azure Search service. To use access conditions,
the SearchIndexerSkillset model must be provided instead of the name. It is enough to provide
the name of the skillset to delete unconditionally
:param name: The SearchIndexerSkillset to delete
:type name: str or ~search.models.SearchIndexerSkillset
:keyword match_condition: The match condition to use upon the etag
:type match_condition: ~azure.core.MatchConditions
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_skillset_operations_async.py
:start-after: [START delete_skillset]
:end-before: [END delete_skillset]
:language: python
:dedent: 4
:caption: Delete a SearchIndexerSkillset
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
error_map, access_condition = get_access_conditions(
skillset, kwargs.pop("match_condition", MatchConditions.Unconditionally)
)
kwargs.update(access_condition)
try:
name = skillset.name
except AttributeError:
name = skillset
await self._client.skillsets.delete(name, error_map=error_map, **kwargs)
@distributed_trace_async
async def create_skillset(self, name, skills, description, **kwargs):
# type: (str, Sequence[SearchIndexerSkill], str, **Any) -> SearchIndexerSkillset
"""Create a new SearchIndexerSkillset in an Azure Search service
:param name: The name of the SearchIndexerSkillset to create
:type name: str
:param skills: A list of Skill objects to include in the SearchIndexerSkillset
:type skills: List[SearchIndexerSkill]]
:param description: A description for the SearchIndexerSkillset
:type description: Optional[str]
:return: The created SearchIndexerSkillset
:rtype: ~azure.search.documents.indexes.models.SearchIndexerSkillset
.. admonition:: Example:
.. literalinclude:: ../samples/async_samples/sample_skillset_operations_async.py
:start-after: [START create_skillset]
:end-before: [END create_skillset]
:language: python
:dedent: 4
:caption: Create a SearchIndexerSkillset
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
skillset = SearchIndexerSkillset(
name=name, skills=list(skills), description=description
)
return await self._client.skillsets.create(skillset, **kwargs)
@distributed_trace_async
async def create_or_update_skillset(self, name, **kwargs):
# type: (str, **Any) -> SearchIndexerSkillset
"""Create a new SearchIndexerSkillset in an Azure Search service, or update an
existing one. The skillset param must be provided to perform the
operation with access conditions.
:param name: The name of the SearchIndexerSkillset to create or update
:type name: str
:keyword skills: A list of Skill objects to include in the SearchIndexerSkillset
:type skills: List[SearchIndexerSkill]
:keyword description: A description for the SearchIndexerSkillset
:type description: Optional[str]
:keyword skillset: A SearchIndexerSkillset to create or update.
:type skillset: :class:`~azure.search.documents.SearchIndexerSkillset`
:keyword match_condition: The match condition to use upon the etag
:type match_condition: ~azure.core.MatchConditions
:return: The created or updated SearchIndexerSkillset
:rtype: ~azure.search.documents.indexes.models.SearchIndexerSkillset
If a `skillset` is passed in, any optional `skills`, or
`description` parameter values will override it.
"""
kwargs["headers"] = self._merge_client_headers(kwargs.get("headers"))
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError}
access_condition = None
if "skillset" in kwargs:
skillset = kwargs.pop("skillset")
error_map, access_condition = get_access_conditions(
skillset, kwargs.pop("match_condition", MatchConditions.Unconditionally)
)
kwargs.update(access_condition)
skillset = SearchIndexerSkillset.deserialize(skillset.serialize())
skillset.name = name
for param in ("description", "skills"):
if param in kwargs:
setattr(skillset, param, kwargs.pop(param))
else:
skillset = SearchIndexerSkillset(
name=name,
description=kwargs.pop("description", None),
skills=kwargs.pop("skills", None),
)
return await self._client.skillsets.create_or_update(
skillset_name=name, skillset=skillset, error_map=error_map, **kwargs
)
| 42.99661
| 110
| 0.654762
|
ddfb1bc39fc47fb4be3bd1dd75efbbfdc8e62294
| 11,421
|
py
|
Python
|
nemo_text_processing/text_normalization/en/taggers/tokenize_and_classify_with_audio.py
|
sudhakarsingh27/NeMo
|
9d75710f58d59d778e88aa4ddbaf28a022801620
|
[
"Apache-2.0"
] | 4,145
|
2019-09-13T08:29:43.000Z
|
2022-03-31T18:31:44.000Z
|
nemo_text_processing/text_normalization/en/taggers/tokenize_and_classify_with_audio.py
|
sudhakarsingh27/NeMo
|
9d75710f58d59d778e88aa4ddbaf28a022801620
|
[
"Apache-2.0"
] | 2,031
|
2019-09-17T16:51:39.000Z
|
2022-03-31T23:52:41.000Z
|
nemo_text_processing/text_normalization/en/taggers/tokenize_and_classify_with_audio.py
|
sudhakarsingh27/NeMo
|
9d75710f58d59d778e88aa4ddbaf28a022801620
|
[
"Apache-2.0"
] | 1,041
|
2019-09-13T10:08:21.000Z
|
2022-03-30T06:37:38.000Z
|
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
# Copyright 2015 and onwards Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from nemo_text_processing.text_normalization.en.graph_utils import (
NEMO_CHAR,
NEMO_DIGIT,
NEMO_NOT_SPACE,
NEMO_WHITE_SPACE,
GraphFst,
delete_extra_space,
delete_space,
generator_main,
)
from nemo_text_processing.text_normalization.en.taggers.abbreviation import AbbreviationFst
from nemo_text_processing.text_normalization.en.taggers.cardinal import CardinalFst
from nemo_text_processing.text_normalization.en.taggers.date import DateFst
from nemo_text_processing.text_normalization.en.taggers.decimal import DecimalFst
from nemo_text_processing.text_normalization.en.taggers.electronic import ElectronicFst
from nemo_text_processing.text_normalization.en.taggers.fraction import FractionFst
from nemo_text_processing.text_normalization.en.taggers.measure import MeasureFst
from nemo_text_processing.text_normalization.en.taggers.money import MoneyFst
from nemo_text_processing.text_normalization.en.taggers.ordinal import OrdinalFst
from nemo_text_processing.text_normalization.en.taggers.punctuation import PunctuationFst
from nemo_text_processing.text_normalization.en.taggers.roman import RomanFst
from nemo_text_processing.text_normalization.en.taggers.telephone import TelephoneFst
from nemo_text_processing.text_normalization.en.taggers.time import TimeFst
from nemo_text_processing.text_normalization.en.taggers.whitelist import WhiteListFst
from nemo_text_processing.text_normalization.en.taggers.word import WordFst
from nemo_text_processing.text_normalization.en.verbalizers.abbreviation import AbbreviationFst as vAbbreviation
from nemo_text_processing.text_normalization.en.verbalizers.cardinal import CardinalFst as vCardinal
from nemo_text_processing.text_normalization.en.verbalizers.date import DateFst as vDate
from nemo_text_processing.text_normalization.en.verbalizers.decimal import DecimalFst as vDecimal
from nemo_text_processing.text_normalization.en.verbalizers.electronic import ElectronicFst as vElectronic
from nemo_text_processing.text_normalization.en.verbalizers.fraction import FractionFst as vFraction
from nemo_text_processing.text_normalization.en.verbalizers.measure import MeasureFst as vMeasure
from nemo_text_processing.text_normalization.en.verbalizers.money import MoneyFst as vMoney
from nemo_text_processing.text_normalization.en.verbalizers.ordinal import OrdinalFst as vOrdinal
from nemo_text_processing.text_normalization.en.verbalizers.roman import RomanFst as vRoman
from nemo_text_processing.text_normalization.en.verbalizers.telephone import TelephoneFst as vTelephone
from nemo_text_processing.text_normalization.en.verbalizers.time import TimeFst as vTime
from nemo.utils import logging
try:
import pynini
from pynini.lib import pynutil
PYNINI_AVAILABLE = True
except (ModuleNotFoundError, ImportError):
PYNINI_AVAILABLE = False
class ClassifyFst(GraphFst):
"""
Final class that composes all other classification grammars. This class can process an entire sentence including punctuation.
For deployment, this grammar will be compiled and exported to OpenFst Finate State Archiv (FAR) File.
More details to deployment at NeMo/tools/text_processing_deployment.
Args:
input_case: accepting either "lower_cased" or "cased" input.
deterministic: if True will provide a single transduction option,
for False multiple options (used for audio-based normalization)
cache_dir: path to a dir with .far grammar file. Set to None to avoid using cache.
overwrite_cache: set to True to overwrite .far files
whitelist: path to a file with whitelist replacements
"""
def __init__(
self,
input_case: str,
deterministic: bool = True,
cache_dir: str = None,
overwrite_cache: bool = True,
whitelist: str = None,
):
super().__init__(name="tokenize_and_classify", kind="classify", deterministic=deterministic)
far_file = None
if cache_dir is not None and cache_dir != 'None':
os.makedirs(cache_dir, exist_ok=True)
whitelist_file = os.path.basename(whitelist) if whitelist else ""
far_file = os.path.join(
cache_dir, f"_{input_case}_en_tn_{deterministic}_deterministic{whitelist_file}.far"
)
if not overwrite_cache and far_file and os.path.exists(far_file):
self.fst = pynini.Far(far_file, mode='r')['tokenize_and_classify']
no_digits = pynini.closure(pynini.difference(NEMO_CHAR, NEMO_DIGIT))
self.fst_no_digits = pynini.compose(self.fst, no_digits).optimize()
logging.info(f'ClassifyFst.fst was restored from {far_file}.')
else:
logging.info(f'Creating ClassifyFst grammars. This might take some time...')
# TAGGERS
cardinal = CardinalFst(deterministic=deterministic)
cardinal_graph = cardinal.fst
ordinal = OrdinalFst(cardinal=cardinal, deterministic=deterministic)
ordinal_graph = ordinal.fst
decimal = DecimalFst(cardinal=cardinal, deterministic=deterministic)
decimal_graph = decimal.fst
fraction = FractionFst(deterministic=deterministic, cardinal=cardinal)
fraction_graph = fraction.fst
measure = MeasureFst(cardinal=cardinal, decimal=decimal, fraction=fraction, deterministic=deterministic)
measure_graph = measure.fst
date_graph = DateFst(cardinal=cardinal, deterministic=deterministic).fst
word_graph = WordFst(deterministic=deterministic).graph
time_graph = TimeFst(cardinal=cardinal, deterministic=deterministic).fst
telephone_graph = TelephoneFst(deterministic=deterministic).fst
electronic_graph = ElectronicFst(deterministic=deterministic).fst
money_graph = MoneyFst(cardinal=cardinal, decimal=decimal, deterministic=deterministic).fst
whitelist = WhiteListFst(input_case=input_case, deterministic=deterministic, input_file=whitelist)
whitelist_graph = whitelist.graph
punct_graph = PunctuationFst(deterministic=deterministic).graph
# VERBALIZERS
cardinal = vCardinal(deterministic=deterministic)
v_cardinal_graph = cardinal.fst
decimal = vDecimal(cardinal=cardinal, deterministic=deterministic)
v_decimal_graph = decimal.fst
ordinal = vOrdinal(deterministic=deterministic)
v_ordinal_graph = ordinal.fst
fraction = vFraction(deterministic=deterministic)
v_fraction_graph = fraction.fst
v_telephone_graph = vTelephone(deterministic=deterministic).fst
v_electronic_graph = vElectronic(deterministic=deterministic).fst
measure = vMeasure(decimal=decimal, cardinal=cardinal, fraction=fraction, deterministic=deterministic)
v_measure_graph = measure.fst
v_time_graph = vTime(deterministic=deterministic).fst
v_date_graph = vDate(ordinal=ordinal, deterministic=deterministic).fst
v_money_graph = vMoney(decimal=decimal, deterministic=deterministic).fst
v_roman_graph = vRoman(deterministic=deterministic).fst
v_abbreviation = vAbbreviation(deterministic=deterministic).fst
classify_and_verbalize = (
pynutil.add_weight(whitelist_graph, 1.01)
| pynutil.add_weight(pynini.compose(time_graph, v_time_graph), 1.1)
| pynutil.add_weight(pynini.compose(decimal_graph, v_decimal_graph), 1.1)
| pynutil.add_weight(pynini.compose(measure_graph, v_measure_graph), 1.1)
| pynutil.add_weight(pynini.compose(cardinal_graph, v_cardinal_graph), 1.1)
| pynutil.add_weight(pynini.compose(ordinal_graph, v_ordinal_graph), 1.1)
| pynutil.add_weight(pynini.compose(telephone_graph, v_telephone_graph), 1.1)
| pynutil.add_weight(pynini.compose(electronic_graph, v_electronic_graph), 1.1)
| pynutil.add_weight(pynini.compose(fraction_graph, v_fraction_graph), 1.1)
| pynutil.add_weight(pynini.compose(money_graph, v_money_graph), 1.1)
| pynutil.add_weight(word_graph, 100)
| pynutil.add_weight(pynini.compose(date_graph, v_date_graph), 1.09)
).optimize()
if not deterministic:
roman_graph = RomanFst(deterministic=deterministic).fst
# the weight matches the word_graph weight for "I" cases in long sentences with multiple semiotic tokens
classify_and_verbalize |= pynutil.add_weight(pynini.compose(roman_graph, v_roman_graph), 100)
abbreviation_graph = AbbreviationFst(whitelist=whitelist, deterministic=deterministic).fst
classify_and_verbalize |= pynutil.add_weight(pynini.compose(abbreviation_graph, v_abbreviation), 100)
punct_only = pynutil.add_weight(punct_graph, weight=2.1)
punct = pynini.closure(
pynini.compose(pynini.closure(NEMO_WHITE_SPACE, 1), delete_extra_space)
| (pynutil.insert(" ") + punct_only),
1,
)
token_plus_punct = (
pynini.closure(punct + pynutil.insert(" "))
+ classify_and_verbalize
+ pynini.closure(pynutil.insert(" ") + punct)
)
graph = token_plus_punct + pynini.closure(
(
pynini.compose(pynini.closure(NEMO_WHITE_SPACE, 1), delete_extra_space)
| (pynutil.insert(" ") + punct + pynutil.insert(" "))
)
+ token_plus_punct
)
graph |= punct_only + pynini.closure(punct)
graph = delete_space + graph + delete_space
remove_extra_spaces = pynini.closure(NEMO_NOT_SPACE, 1) + pynini.closure(
delete_extra_space + pynini.closure(NEMO_NOT_SPACE, 1)
)
remove_extra_spaces |= (
pynini.closure(pynutil.delete(" "), 1)
+ pynini.closure(NEMO_NOT_SPACE, 1)
+ pynini.closure(delete_extra_space + pynini.closure(NEMO_NOT_SPACE, 1))
)
graph = pynini.compose(graph.optimize(), remove_extra_spaces).optimize()
self.fst = graph
no_digits = pynini.closure(pynini.difference(NEMO_CHAR, NEMO_DIGIT))
self.fst_no_digits = pynini.compose(graph, no_digits).optimize()
if far_file:
generator_main(far_file, {"tokenize_and_classify": self.fst})
logging.info(f'ClassifyFst grammars are saved to {far_file}.')
| 53.619718
| 129
| 0.713773
|
e98bcd7c72803fcaa77f127e5682b22ad690110a
| 15,296
|
py
|
Python
|
scripts/sct_analyze_texture.py
|
valosekj/spinalcordtoolbox
|
266bfc88d6eb6e96a2c2f1ec88c2e185c6f88e09
|
[
"MIT"
] | 1
|
2020-05-17T00:39:47.000Z
|
2020-05-17T00:39:47.000Z
|
scripts/sct_analyze_texture.py
|
valosekj/spinalcordtoolbox
|
266bfc88d6eb6e96a2c2f1ec88c2e185c6f88e09
|
[
"MIT"
] | null | null | null |
scripts/sct_analyze_texture.py
|
valosekj/spinalcordtoolbox
|
266bfc88d6eb6e96a2c2f1ec88c2e185c6f88e09
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Analyse texture
#
# Copyright (c) 2017 Polytechnique Montreal <www.neuro.polymtl.ca>
# Author: Charley
# Modified: 2017-06-22
#
# About the license: see the file LICENSE.TXT
from __future__ import absolute_import
import os
import shutil
import sys
import numpy as np
import itertools
import argparse
import tqdm
from skimage.feature import greycomatrix, greycoprops
import sct_utils as sct
import spinalcordtoolbox.image as msct_image
from spinalcordtoolbox.image import Image
from spinalcordtoolbox.utils import Metavar, SmartFormatter, ActionCreateFolder
def get_parser():
# Initialize the parser
parser = argparse.ArgumentParser(
description='Extraction of grey level co-occurence matrix (GLCM) texture features from an image within a given '
'mask. The textures features are those defined in the sckit-image implementation: '
'http://scikit-image.org/docs/dev/api/skimage.feature.html#greycoprops. This function outputs '
'one nifti file per texture metric (' + ParamGLCM().feature + ') and per orientation called '
'fnameInput_feature_distance_angle.nii.gz. Also, a file averaging each metric across the angles, '
'called fnameInput_feature_distance_mean.nii.gz, is output.',
add_help=None,
formatter_class=SmartFormatter,
prog=os.path.basename(__file__).strip(".py")
)
mandatoryArguments = parser.add_argument_group("\nMANDATORY ARGUMENTS")
mandatoryArguments.add_argument(
"-i",
required=True,
help='Image to analyze. Example: t2.nii.gz',
metavar=Metavar.file,
)
mandatoryArguments.add_argument(
"-m",
required=True,
metavar=Metavar.file,
help='Image mask Example: t2_seg.nii.gz',
)
optional = parser.add_argument_group("\nOPTIONALS ARGUMENTS")
optional.add_argument(
"-h",
"--help",
action="help",
help="Show this help message and exit")
optional.add_argument(
"-feature",
metavar=Metavar.str,
help='List of GLCM texture features (separate arguments with ",").',
required=False,
default=ParamGLCM().feature)
optional.add_argument(
"-distance",
metavar=Metavar.int,
help='Distance offset for GLCM computation, in pixel (suggested distance values between 1 and 5). Example: 1',
required=False,
default=ParamGLCM().distance)
optional.add_argument(
"-angle",
metavar=Metavar.list,
help='List of angles for GLCM computation, separate arguments with ",", in degrees (suggested distance values '
'between 0 and 179). Example: 0,90',
required=False,
default=ParamGLCM().angle)
optional.add_argument(
"-dim",
help="Compute the texture on the axial (ax), sagittal (sag) or coronal (cor) slices.",
required=False,
choices=('ax', 'sag', 'cor'),
default=Param().dim)
optional.add_argument(
"-ofolder",
metavar=Metavar.folder,
help='Output folder. Example: /my_texture/',
action=ActionCreateFolder,
required=False,
default=Param().path_results)
optional.add_argument(
"-igt",
metavar=Metavar.str,
help="File name of ground-truth texture metrics.",
required=False)
optional.add_argument(
"-r",
help="Remove temporary files.",
required=False,
type=int,
choices=(0, 1),
default=int(Param().rm_tmp))
optional.add_argument(
"-v",
help="Verbose: 0 = nothing, 1 = classic, 2 = expended.",
required=False,
type=int,
choices=(0, 1, 2),
default=Param().verbose)
return parser
class ExtractGLCM:
def __init__(self, param=None, param_glcm=None):
self.param = param if param is not None else Param()
self.param_glcm = param_glcm if param_glcm is not None else ParamGLCM()
# create tmp directory
self.tmp_dir = sct.tmp_create(verbose=self.param.verbose) # path to tmp directory
if self.param.dim == 'ax':
self.orientation_extraction = 'RPI'
elif self.param.dim == 'sag':
self.orientation_extraction = 'IPR'
else:
self.orientation_extraction = 'IRP'
# metric_lst=['property_distance_angle']
self.metric_lst = []
for m in list(itertools.product(self.param_glcm.feature.split(','), self.param_glcm.angle.split(','))):
text_name = m[0] if m[0].upper() != 'asm'.upper() else m[0].upper()
self.metric_lst.append(text_name + '_' + str(self.param_glcm.distance) + '_' + str(m[1]))
# dct_im_seg{'im': list_of_axial_slice, 'seg': list_of_axial_masked_slice}
self.dct_im_seg = {'im': None, 'seg': None}
# to re-orient the data at the end if needed
self.orientation_im = Image(self.param.fname_im).orientation
self.fname_metric_lst = {}
def extract(self):
self.ifolder2tmp()
# fill self.dct_metric --> for each key_metric: create an Image with zero values
# self.init_metric_im()
# fill self.dct_im_seg --> extract axial slices from self.param.fname_im and self.param.fname_seg
self.extract_slices()
# compute texture
self.compute_texture()
# reorient data
if self.orientation_im != self.orientation_extraction:
self.reorient_data()
# mean across angles
self.mean_angle()
# save results to ofolder
self.tmp2ofolder()
return [os.path.join(self.param.path_results, self.fname_metric_lst[f]) for f in self.fname_metric_lst]
def tmp2ofolder(self):
os.chdir(self.curdir) # go back to original directory
sct.printv('\nSave resulting files...', self.param.verbose, 'normal')
for f in self.fname_metric_lst: # Copy from tmp folder to ofolder
sct.copy(os.path.join(self.tmp_dir, self.fname_metric_lst[f]),
os.path.join(self.param.path_results, self.fname_metric_lst[f]))
def ifolder2tmp(self):
self.curdir = os.getcwd()
# copy input image
if self.param.fname_im is not None:
sct.copy(self.param.fname_im, self.tmp_dir)
self.param.fname_im = ''.join(sct.extract_fname(self.param.fname_im)[1:])
else:
sct.printv('ERROR: No input image', self.param.verbose, 'error')
# copy masked image
if self.param.fname_seg is not None:
sct.copy(self.param.fname_seg, self.tmp_dir)
self.param.fname_seg = ''.join(sct.extract_fname(self.param.fname_seg)[1:])
else:
sct.printv('ERROR: No mask image', self.param.verbose, 'error')
os.chdir(self.tmp_dir) # go to tmp directory
def mean_angle(self):
im_metric_lst = [self.fname_metric_lst[f].split('_' + str(self.param_glcm.distance) + '_')[0] + '_' for f in self.fname_metric_lst]
im_metric_lst = list(set(im_metric_lst))
sct.printv('\nMean across angles...', self.param.verbose, 'normal')
extension = sct.extract_fname(self.param.fname_im)[2]
for im_m in im_metric_lst: # Loop across GLCM texture properties
# List images to mean
im2mean_lst = [im_m + str(self.param_glcm.distance) + '_' + a + extension for a in self.param_glcm.angle.split(',')]
# Average across angles and save it as wrk_folder/fnameIn_feature_distance_mean.extension
fname_out = im_m + str(self.param_glcm.distance) + '_mean' + extension
sct.run('sct_image -i ' + ' '.join(im2mean_lst) + ' -concat t -o ' + fname_out)
sct.run('sct_maths -i ' + fname_out + ' -mean t -o ' + fname_out)
self.fname_metric_lst[im_m + str(self.param_glcm.distance) + '_mean'] = fname_out
def extract_slices(self):
# open image and re-orient it to RPI if needed
im, seg = Image(self.param.fname_im), Image(self.param.fname_seg)
if self.orientation_im != self.orientation_extraction:
im.change_orientation(self.orientation_extraction)
seg.change_orientation(self.orientation_extraction)
# extract axial slices in self.dct_im_seg
self.dct_im_seg['im'], self.dct_im_seg['seg'] = [im.data[:, :, z] for z in range(im.dim[2])], [seg.data[:, :, z] for z in range(im.dim[2])]
# def init_metric_im(self):
# # open image and re-orient it to RPI if needed
# im_tmp = Image(self.param.fname_im)
# if self.orientation_im != self.orientation_extraction:
# im_tmp = msct_image.change_orientation(im_tmp, self.orientation_extraction)
# # create Image objects with zeros values for each output image needed
# for m in self.metric_lst:
# im_2save = msct_image.zeros_like(im_tmp, dtype=np.float64)
# fname_out = sct.add_suffix(''.join(sct.extract_fname(self.param.fname_im)[1:]), '_' + m)
# im_2save.save(fname_out)
# self.fname_metric_lst[m] = fname_out
def compute_texture(self):
offset = int(self.param_glcm.distance)
sct.printv('\nCompute texture metrics...', self.param.verbose, 'normal')
# open image and re-orient it to RPI if needed
im_tmp = Image(self.param.fname_im)
if self.orientation_im != self.orientation_extraction:
im_tmp.change_orientation(self.orientation_extraction)
dct_metric = {}
for m in self.metric_lst:
im_2save = msct_image.zeros_like(im_tmp, dtype='float64')
dct_metric[m] = im_2save
# dct_metric[m] = Image(self.fname_metric_lst[m])
with tqdm.tqdm() as pbar:
for im_z, seg_z, zz in zip(self.dct_im_seg['im'], self.dct_im_seg['seg'], range(len(self.dct_im_seg['im']))):
for xx in range(im_z.shape[0]):
for yy in range(im_z.shape[1]):
if not seg_z[xx, yy]:
continue
if xx < offset or yy < offset:
continue
if xx > (im_z.shape[0] - offset - 1) or yy > (im_z.shape[1] - offset - 1):
continue # to check if the whole glcm_window is in the axial_slice
if False in np.unique(seg_z[xx - offset: xx + offset + 1, yy - offset: yy + offset + 1]):
continue # to check if the whole glcm_window is in the mask of the axial_slice
glcm_window = im_z[xx - offset: xx + offset + 1, yy - offset: yy + offset + 1]
glcm_window = glcm_window.astype(np.uint8)
dct_glcm = {}
for a in self.param_glcm.angle.split(','): # compute the GLCM for self.param_glcm.distance and for each self.param_glcm.angle
dct_glcm[a] = greycomatrix(glcm_window,
[self.param_glcm.distance], [np.radians(int(a))],
symmetric=self.param_glcm.symmetric,
normed=self.param_glcm.normed)
for m in self.metric_lst: # compute the GLCM property (m.split('_')[0]) of the voxel xx,yy,zz
dct_metric[m].data[xx, yy, zz] = greycoprops(dct_glcm[m.split('_')[2]], m.split('_')[0])[0][0]
pbar.set_postfix(pos="{}/{}".format(zz, len(self.dct_im_seg["im"])))
pbar.update(1)
for m in self.metric_lst:
fname_out = sct.add_suffix("".join(sct.extract_fname(self.param.fname_im)[1:]), '_' + m)
dct_metric[m].save(fname_out)
self.fname_metric_lst[m] = fname_out
def reorient_data(self):
for f in self.fname_metric_lst:
os.rename(self.fname_metric_lst[f], sct.add_suffix("".join(sct.extract_fname(self.param.fname_im)[1:]), '_2reorient'))
im = Image(sct.add_suffix("".join(sct.extract_fname(self.param.fname_im)[1:]), '_2reorient')) \
.change_orientation(self.orientation_im) \
.save(self.fname_metric_lst[f])
class Param:
def __init__(self):
self.fname_im = None
self.fname_seg = None
self.path_results = './texture/'
self.verbose = 1
self.dim = 'ax'
self.rm_tmp = True
class ParamGLCM(object):
def __init__(self, symmetric=True, normed=True, feature='contrast,dissimilarity,homogeneity,energy,correlation,ASM', distance=1, angle='0,45,90,135'):
self.symmetric = True # If True, the output matrix P[:, :, d, theta] is symmetric.
self.normed = True # If True, normalize each matrix P[:, :, d, theta] by dividing by the total number of accumulated co-occurrences for the given offset. The elements of the resulting matrix sum to 1.
self.feature = 'contrast,dissimilarity,homogeneity,energy,correlation,ASM' # The property formulae are detailed here: http://scikit-image.org/docs/dev/api/skimage.feature.html#greycoprops
self.distance = 1 # Size of the window: distance = 1 --> a reference pixel and its immediate neighbor
self.angle = '0,45,90,135' # Rotation angles for co-occurrence matrix
def main(args=None):
"""
Main function
:param args:
:return:
"""
# get parser args
if args is None:
args = None if sys.argv[1:] else ['--help']
parser = get_parser()
arguments = parser.parse_args(args=args)
# create param object
param = Param()
param_glcm = ParamGLCM()
# set param arguments ad inputted by user
param.fname_im = arguments.i
param.fname_seg = arguments.m
if arguments.ofolder is not None:
param.path_results = arguments.ofolder
if not os.path.isdir(param.path_results) and os.path.exists(param.path_results):
sct.printv("ERROR output directory %s is not a valid directory" % param.path_results, 1, 'error')
if not os.path.exists(param.path_results):
os.makedirs(param.path_results)
if arguments.feature is not None:
param_glcm.feature = arguments.feature
if arguments.distance is not None:
param_glcm.distance = arguments.distance
if arguments.angle is not None:
param_glcm.angle = arguments.angle
if arguments.dim is not None:
param.dim = arguments.dim
if arguments.r is not None:
param.rm_tmp = bool(arguments.r)
verbose = arguments.v
sct.init_sct(log_level=verbose, update=True) # Update log level
# create the GLCM constructor
glcm = ExtractGLCM(param=param, param_glcm=param_glcm)
# run the extraction
fname_out_lst = glcm.extract()
# remove tmp_dir
if param.rm_tmp:
sct.rmtree(glcm.tmp_dir)
sct.printv('\nDone! To view results, type:', param.verbose)
sct.printv('fsleyes ' + arguments.i + ' ' + ' -cm red-yellow -a 70.0 '.join(fname_out_lst) + ' -cm Red-Yellow -a 70.0 & \n', param.verbose, 'info')
if __name__ == "__main__":
sct.init_sct()
main()
| 41.11828
| 209
| 0.620358
|
1a004c3a867509e5c9c83f68e7dfe55dcb2b3ce7
| 552
|
py
|
Python
|
LD2/Python/Hadamard matrix.py
|
0x4C4A/SS-2014
|
54bd34da340cba7b12717b87c5cd4efa3d93e35e
|
[
"CC0-1.0"
] | null | null | null |
LD2/Python/Hadamard matrix.py
|
0x4C4A/SS-2014
|
54bd34da340cba7b12717b87c5cd4efa3d93e35e
|
[
"CC0-1.0"
] | null | null | null |
LD2/Python/Hadamard matrix.py
|
0x4C4A/SS-2014
|
54bd34da340cba7b12717b87c5cd4efa3d93e35e
|
[
"CC0-1.0"
] | null | null | null |
import math
matrix_size = pow(2,5);
#Allocate array
matrix = [[0 for x in xrange(matrix_size)] for x in xrange(matrix_size)]
matrix[0][0] = 1
for stage in range(0,int(math.log(matrix_size,2))):
block_edge = pow(2, stage)
for x in range(0, block_edge):
for y in range(0, block_edge):
matrix[x + block_edge][y] = matrix[x][y]
matrix[x][y + block_edge] = matrix[x][y]
matrix[x + block_edge][y + block_edge] = -matrix[x][y]
for y in range(0,matrix_size):
for x in range(0,matrix_size):
print '{:>2}'.format(matrix[x][y]),
print '\n'
| 25.090909
| 73
| 0.655797
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.