code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1 value |
|---|---|---|---|---|---|
from __future__ import annotations
import logging
import os
from dataclasses import dataclass, field
from typing import Iterable
import numpy as np
from sample_id import util
logger = logging.getLogger(__name__)
def from_file(audio_path, id, sr, hop_length=512, feature="sift", dedupe=False, **kwargs) -> Fingerprint:
"""Generate a fingerprint from an audio file."""
if feature == "sift":
from . import sift
fp = sift.from_file(audio_path, id, sr, hop_length=hop_length, **kwargs)
if dedupe:
fp.remove_similar_keypoints()
return fp
else:
raise NotImplementedError
def load(filepath: str) -> Fingerprint:
"""Load a fingerprint from file."""
with np.load(filepath) as data:
constructor_args = Fingerprint.__init__.__code__.co_varnames[1:]
# TODO: replace this hack to be backwards compatible with misnamed hop arg
if "hop" in data.keys():
constructor_args = ["hop" if arg == "hop_length" else arg for arg in constructor_args]
arg_data = tuple(data.get(arg) for arg in constructor_args)
arg_values = [arg if arg is None or arg.shape else arg.item() for arg in arg_data]
return Fingerprint(*arg_values)
class Fingerprint:
spectrogram = NotImplemented
def __init__(self, keypoints, descriptors, id, sr, hop_length, is_deduped=False, octave_bins=None):
self.keypoints = keypoints
self.descriptors = descriptors
self.id = id
self.sr = sr
self.hop_length = hop_length
self.is_deduped = is_deduped
self.size = len(keypoints)
self.octave_bins = octave_bins
def remove_similar_keypoints(self, rounding_factor: float = 10.0):
if len(self.descriptors) > 0:
logger.info(f"{self.id}: Removing duplicate/similar keypoints...")
rounded = (self.descriptors / rounding_factor).round() * rounding_factor
_, idx = np.unique(rounded, axis=0, return_index=True)
deduped_descriptors = self.descriptors[sorted(idx)]
deduped_keypoints = self.keypoints[sorted(idx)]
logger.info(f"{self.id}: Removed {self.keypoints.shape[0] - idx.shape[0]} duplicate keypoints")
self.keypoints = deduped_keypoints
self.descriptors = deduped_descriptors
self.is_deduped = True
def keypoint_ms(self, kp) -> int:
return int(kp[0] * self.hop_length * 1000.0 / self.sr)
def keypoint_index_ids(self):
return np.repeat(self.id, self.keypoints.shape[0])
def keypoint_index_ms(self):
return np.array([self.keypoint_ms(kp) for kp in self.keypoints], dtype=np.uint32)
def save_to_dir(self, dir: str, compress: bool = True):
filepath = os.path.join(dir, self.id)
self.save(filepath, compress=compress)
def save(self, filepath: str, compress: bool = True):
save_fn = np.savez_compressed if compress else np.savez
# save all attributes used in constructor
constructor_arg_names = self.__init__.__code__.co_varnames[1:]
constructor_arg_names = constructor_arg_names + Fingerprint.__init__.__code__.co_varnames[1:]
constructor_kwargs = {name: getattr(self, name, None) for name in constructor_arg_names}
constructor_kwargs = {key: value for key, value in constructor_kwargs.items() if value is not None}
save_fn(filepath, **constructor_kwargs)
def __repr__(self):
return util.class_repr(self)
def save_fingerprints(fingerprints: Iterable[Fingerprint], filepath: str, compress=True):
# TODO: try structured arrays: https://docs.scipy.org/doc/numpy-1.13.0/user/basics.rec.html
keypoints = np.vstack([fp.keypoints for fp in fingerprints])
descriptors = np.vstack([fp.descriptors for fp in fingerprints])
index_to_id = np.hstack([fp.keypoint_index_ids() for fp in fingerprints])
# index_to_ms = np.hstack([fp.keypoint_index_ms() for fp in fingerprints])
sr = next(fp.sr for fp in fingerprints)
hop_length = next(fp.hop_length for fp in fingerprints)
save_fn = np.savez_compressed if compress else np.savez
save_fn(
filepath,
keypoints=keypoints,
descriptors=descriptors,
index_to_id=index_to_id,
# index_to_ms=index_to_ms,
sr=sr,
hop=hop_length,
)
def load_fingerprints(filepath: str) -> Fingerprints:
with np.load(filepath) as data:
return Fingerprints(data["keypoints"], data["descriptors"], data["index_to_id"], data["index_to_ms"])
class Fingerprints:
def __init__(self, keypoints, descriptors, index_to_id, index_to_ms):
self.keypoints = keypoints
self.descriptors = descriptors
self.index_to_id = index_to_id
self.index_to_ms = index_to_ms
class LazyFingerprints(Fingerprints):
def __init__(self, npz_filepath: str):
self.data = np.load(npz_filepath, mmap_mode="r")
@property
def keypoints(self):
return self.data["keypoints"]
@property
def descriptors(self):
return self.data["descriptors"]
@property
def index_to_id(self):
return self.data["index_to_id"]
@property
def index_to_ms(self):
return self.data["index_to_ms"]
@dataclass(unsafe_hash=True)
class Keypoint:
"""A fingerprint keypoint."""
kp: np.ndarray[np.float32] = field(repr=False, compare=False)
x: float = field(init=False)
y: float = field(init=False)
scale: float = field(init=False)
orientation: float = field(init=False)
def __post_init__(self):
self.x = self.kp[0].item()
self.y = self.kp[1].item()
self.scale = self.kp[2].item()
self.orientation = self.kp[3].item() | /sample_id-0.1.27-py3-none-any.whl/sample_id/fingerprint/fingerprint.py | 0.728748 | 0.16043 | fingerprint.py | pypi |
import logging
from typing import Optional, Tuple
import librosa
import numpy as np
from sample_id import audio, fingerprint
logger = logging.getLogger(__name__)
def from_file(audio_path, id, sr, hop_length=512, implementation="vlfeat", **kwargs):
if implementation == "vlfeat":
from sample_id.fingerprint.sift import vlfeat
return vlfeat.SiftVlfeat(audio_path, id, sr, hop_length, **kwargs)
else:
raise NotImplementedError(implementation)
class SiftFingerprint(fingerprint.Fingerprint):
def __init__(self, audio_path, id, sr, hop_length, octave_bins=36, **kwargs):
self.id = id
kp, desc, s = self.sift_file(audio_path, sr, hop_length, octave_bins=octave_bins, **kwargs)
super().__init__(kp, desc, id, sr, hop_length, octave_bins=octave_bins)
# self.spectrogram = s
def sift_spectrogram(self, s, id, **kwargs):
raise NotImplementedError
def sift_file(self, audio_path, sr, hop_length, octave_bins=36, n_octaves=6, fmin=41.2, **kwargs):
logger.info("{}: Loading signal into memory...".format(audio_path.encode("ascii", "ignore")))
y, sr = librosa.load(audio_path, sr=sr)
# logger.info('{}: Trimming silence...'.format(audio_path))
# y = np.concatenate([[0], np.trim_zeros(y), [0]])
logger.info(f"{self.id}: Generating Spectrogram...")
specgram = audio.cqtgram(y, sr, hop_length=hop_length, octave_bins=octave_bins, n_octaves=n_octaves, fmin=fmin)
# s = audio.chromagram(y, hop_length=256, n_fft=4096, n_chroma=36)
keypoints, descriptors = self.sift_spectrogram(specgram, id=self.id, **kwargs)
keypoints, descriptors = self.remove_edge_keypoints(keypoints, descriptors, specgram, octave_bins * n_octaves)
return keypoints, descriptors, specgram
def remove_edge_keypoints(self, keypoints, descriptors, specgram, height):
logger.info(f"{self.id}: Removing edge keypoints...")
min_value = np.min(specgram)
start = next(
(index for index, frame in enumerate(specgram.T) if sum(value > min_value for value in frame) > height / 2),
0,
)
end = specgram.shape[1] - next(
(
index
for index, frame in enumerate(reversed(specgram.T))
if sum(value > min_value for value in frame) > height / 2
),
0,
)
start = start + height / 16
end = end - height / 4
out_kp = []
out_desc = []
for keypoint, descriptor in zip(keypoints, descriptors):
# Skip keypoints on the left and right edges of spectrogram
if start < keypoint[0] < end:
out_kp.append(keypoint)
out_desc.append(descriptor)
logger.info(f"{self.id}: Edge keypoints removed: {len(keypoints) - len(out_kp)}, remaining: {len(out_kp)}")
return np.array(out_kp), np.array(out_desc) | /sample_id-0.1.27-py3-none-any.whl/sample_id/fingerprint/sift/sift.py | 0.757391 | 0.150903 | sift.py | pypi |
import logging
import numpy as np
logger = logging.getLogger(__name__)
FLANN_ALGS = ["kdtree", "kmeans", "composite", "lsh", "autotuned"]
CV_ALGS = ["kdtree", "kmeans", "composite", "lsh", "autotuned"]
SKLEARN_ALGS = ["kd_tree", "ball_tree", "brute", "auto"]
def train_matcher(data, algorithm="kdtree"):
# if algorithm in FLANN_ALGS:
# matcher = fit_flann(data, algorithm)
# el
if algorithm in CV_ALGS:
matcher = fit_cv2(data, algorithm)
elif algorithm in SKLEARN_ALGS:
matcher = fit_sklearn(data, algorithm)
elif algorithm == "lshf":
matcher = fit_lshf(data)
elif algorithm == "annoy":
matcher = fit_annoy(data)
if not matcher:
raise ValueError("Invalid matching algorithm: {}".format(algorithm))
return matcher
def find_neighbors(matcher, data, algorithm="lshf", k=2):
logger.info("Finding (approximate) nearest neighbors...")
if algorithm in FLANN_ALGS:
matches = matcher.nn_index(np.float32(data), k=k)
distances, indices = zip(*(((n1.distance, n2.distance), (n1.trainIdx, n2.trainIdx)) for n1, n2 in matches))
elif algorithm in CV_ALGS:
matches = matcher.knnMatch(np.float32(data), k=k)
distances, indices = zip(*(((n1.distance, n2.distance), (n1.trainIdx, n2.trainIdx)) for n1, n2 in matches))
elif algorithm in SKLEARN_ALGS:
distances, indices = matcher.kneighbors(data, n_neighbors=k)
elif algorithm == "lshf":
distances, indices = matcher.kneighbors(data, n_neighbors=k)
elif algorithm == "annoy":
indices = []
distances = []
for d in data:
index, distance = matcher.get_nns_by_vector(d, k, include_distances=True)
indices.append(index)
distances.append(distance)
return distances, indices
def nearest_neighbors(test, train, algorithm="lshf", k=2):
matcher = train_matcher(train, algorithm)
distances, indices = find_neighbors(matcher, test, algorithm, k=k)
return distances, indices
def fit_cv2(data, algorithm):
logger.info("Fitting cv2 FLANN...")
from cv2 import FlannBasedMatcher
KDTREE = 0
index_params = {
"algorithm": KDTREE,
"trees": 5,
#'target_precision': 0.9,
#'build_weight': 0.01,
#'memory_weight': 0,
#'sample_fraction': 0.1,
}
search_params = {"checks": 5}
flann = FlannBasedMatcher(index_params, search_params)
flann.add(np.float32(data))
flann.train()
return flann
def fit_flann(data, algorithm):
logger.info("Fitting FLANN...")
from pyflann import FLANN
matcher = FLANN(
algorithm=algorithm,
checks=32,
eps=0.0,
cb_index=0.5,
trees=1,
leaf_max_size=4,
branching=32,
iterations=5,
centers_init="random",
target_precision=0.9,
build_weight=0.01,
memory_weight=0.0,
sample_fraction=0.1,
log_level="warning",
random_seed=-1,
)
matcher.build_index(data)
return matcher
def fit_sklearn(data, algorithm):
logger.info("Fitting Sklearn Matcher: {}...".format(algorithm))
from sklearn.neighbors import NearestNeighbors
matcher = NearestNeighbors(
algorithm=algorithm,
n_neighbors=2,
radius=1.0,
leaf_size=30,
metric="minkowski",
p=2,
metric_params=None,
n_jobs=-1,
)
matcher.fit(data)
return matcher
def fit_annoy(data, n_trees=-1):
logger.info("Fitting Annoy Matcher...")
from annoy import AnnoyIndex
logger.info("Building Annoy index...")
matcher = AnnoyIndex(data.shape[1], metric="euclidean")
for i, d in enumerate(data):
matcher.add_item(i, d)
logger.info("Building Annoy Matcher...")
matcher.build(n_trees)
return matcher
def load_annoy(path, n_features=128):
logger.info("Loading Annoy Index {}...".format(path))
from annoy import AnnoyIndex
matcher = AnnoyIndex(n_features, metric="euclidean")
matcher.load(path)
return matcher
def fit_lshf(data):
logger.info("Fitting LSHForest...")
from sklearn.neighbors import LSHForest
lshf = LSHForest(
n_estimators=20,
min_hash_match=4,
n_candidates=200,
n_neighbors=2,
radius=1.0,
radius_cutoff_ratio=0.9,
random_state=None,
)
lshf.fit(data)
return lshf | /sample_id-0.1.27-py3-none-any.whl/sample_id/deprecated/deprecated_ann.py | 0.481941 | 0.590602 | deprecated_ann.py | pypi |
class OpenApiException(Exception):
"""The base exception class for all OpenAPIExceptions"""
class ApiTypeError(OpenApiException, TypeError):
def __init__(self, msg, path_to_item=None, valid_classes=None,
key_type=None):
""" Raises an exception for TypeErrors
Args:
msg (str): the exception message
Keyword Args:
path_to_item (list): a list of keys an indices to get to the
current_item
None if unset
valid_classes (tuple): the primitive classes that current item
should be an instance of
None if unset
key_type (bool): False if our value is a value in a dict
True if it is a key in a dict
False if our item is an item in a list
None if unset
"""
self.path_to_item = path_to_item
self.valid_classes = valid_classes
self.key_type = key_type
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiTypeError, self).__init__(full_msg)
class ApiValueError(OpenApiException, ValueError):
def __init__(self, msg, path_to_item=None):
"""
Args:
msg (str): the exception message
Keyword Args:
path_to_item (list) the path to the exception in the
received_data dict. None if unset
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiValueError, self).__init__(full_msg)
class ApiAttributeError(OpenApiException, AttributeError):
def __init__(self, msg, path_to_item=None):
"""
Raised when an attribute reference or assignment fails.
Args:
msg (str): the exception message
Keyword Args:
path_to_item (None/list) the path to the exception in the
received_data dict
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiAttributeError, self).__init__(full_msg)
class ApiKeyError(OpenApiException, KeyError):
def __init__(self, msg, path_to_item=None):
"""
Args:
msg (str): the exception message
Keyword Args:
path_to_item (None/list) the path to the exception in the
received_data dict
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiKeyError, self).__init__(full_msg)
class ApiException(OpenApiException):
def __init__(self, status=None, reason=None, http_resp=None):
if http_resp:
self.status = http_resp.status
self.reason = http_resp.reason
self.body = http_resp.data
self.headers = http_resp.getheaders()
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
def __str__(self):
"""Custom error messages for exception"""
error_message = "({0})\n"\
"Reason: {1}\n".format(self.status, self.reason)
if self.headers:
error_message += "HTTP response headers: {0}\n".format(
self.headers)
if self.body:
error_message += "HTTP response body: {0}\n".format(self.body)
return error_message
class NotFoundException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(NotFoundException, self).__init__(status, reason, http_resp)
class UnauthorizedException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(UnauthorizedException, self).__init__(status, reason, http_resp)
class ForbiddenException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(ForbiddenException, self).__init__(status, reason, http_resp)
class ServiceException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(ServiceException, self).__init__(status, reason, http_resp)
def render_path(path_to_item):
"""Returns a string representation of a path"""
result = ""
for pth in path_to_item:
if isinstance(pth, int):
result += "[{0}]".format(pth)
else:
result += "['{0}']".format(pth)
return result | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/exceptions.py | 0.769514 | 0.279315 | exceptions.py | pypi |
import asyncio
import logging
import re
import shlex
from functools import reduce
from io import StringIO
from itertools import groupby
from typing import Dict, List, Optional, Any, Tuple, Union
import click
from sample_metadata.model.sample_type import SampleType
from sample_metadata.model.sequence_status import SequenceStatus
from sample_metadata.model.sequence_type import SequenceType
from sample_metadata.parser.generic_parser import (
GenericParser,
GroupedRow,
ParticipantMetaGroup,
SampleMetaGroup,
SequenceMetaGroup,
SingleRow,
run_as_sync,
) # noqa
__DOC = """
Parse CSV / TSV manifest of arbitrary format.
This script allows you to specify HOW you want the manifest
to be mapped onto individual data.
This script loads the WHOLE file into memory
It groups rows by the sample ID, and collapses metadata from rows.
EG:
Sample ID sample-collection-date depth qc_quality Fastqs
<sample-id> 2021-09-16 30x 0.997 <sample-id>.filename-R1.fastq.gz,<sample-id>.filename-R2.fastq.gz
# OR
<sample-id2> 2021-09-16 30x 0.997 <sample-id2>.filename-R1.fastq.gz
<sample-id2> 2021-09-16 30x 0.997 <sample-id2>.filename-R2.fastq.gz
Given the files are in a bucket called 'gs://cpg-upload-bucket/collaborator',
and we want to achieve the following:
- Import this manifest into the "$dataset" project of SM
- Map the following to `sample.meta`:
- "sample-collection-date" -> "collection_date"
- Map the following to `sequence.meta`:
- "depth" -> "depth"
- "qc_quality" -> "qc.quality" (ie: {"qc": {"quality": 0.997}})
- Add a qc analysis object with the following mapped `analysis.meta`:
- "qc_quality" -> "quality"
python parse_generic_metadata.py \
--project $dataset \
--sample-name-column "Sample ID" \
--reads-column "Fastqs" \
--sample-meta-field-map "sample-collection-date" "collection_date" \
--sequence-meta-field "depth" \
--sequence-meta-field-map "qc_quality" "qc.quality" \
--qc-meta-field-map "qc_quality" "quality" \
--search-path "gs://cpg-upload-bucket/collaborator" \
<manifest-path>
"""
logger = logging.getLogger(__file__)
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.INFO)
RE_FILENAME_SPLITTER = re.compile('[,;]')
class GenericMetadataParser(GenericParser):
"""Parser for GenericMetadataParser"""
def __init__(
self,
search_locations: List[str],
participant_meta_map: Dict[str, str],
sample_meta_map: Dict[str, str],
sequence_meta_map: Dict[str, str],
qc_meta_map: Dict[str, str],
project: str,
sample_name_column: str,
participant_column: Optional[str] = None,
reported_sex_column: Optional[str] = None,
reported_gender_column: Optional[str] = None,
karyotype_column: Optional[str] = None,
reads_column: Optional[str] = None,
checksum_column: Optional[str] = None,
seq_type_column: Optional[str] = None,
gvcf_column: Optional[str] = None,
meta_column: Optional[str] = None,
seq_meta_column: Optional[str] = None,
batch_number: Optional[str] = None,
reference_assembly_location_column: Optional[str] = None,
default_reference_assembly_location: Optional[str] = None,
default_sequence_type='genome',
default_sequence_status='uploaded',
default_sample_type='blood',
allow_extra_files_in_search_path=False,
**kwargs,
):
super().__init__(
path_prefix=None,
search_paths=search_locations,
project=project,
default_sequence_type=default_sequence_type,
default_sequence_status=default_sequence_status,
default_sample_type=default_sample_type,
**kwargs,
)
if not sample_name_column:
raise ValueError('A sample name column MUST be provided')
self.cpg_id_column = 'Internal CPG Sample ID'
self.sample_name_column = sample_name_column
self.participant_column = participant_column
self.reported_sex_column = reported_sex_column
self.reported_gender_column = reported_gender_column
self.karyotype_column = karyotype_column
self.seq_type_column = seq_type_column
self.reference_assembly_location_column = reference_assembly_location_column
self.default_reference_assembly_location = default_reference_assembly_location
self.participant_meta_map = participant_meta_map or {}
self.sample_meta_map = sample_meta_map or {}
self.sequence_meta_map = sequence_meta_map or {}
self.qc_meta_map = qc_meta_map or {}
self.reads_column = reads_column
self.checksum_column = checksum_column
self.gvcf_column = gvcf_column
self.meta_column = meta_column
self.seq_meta_column = seq_meta_column
self.allow_extra_files_in_search_path = allow_extra_files_in_search_path
self.batch_number = batch_number
def get_sample_id(self, row: SingleRow) -> Optional[str]:
"""Get external sample ID from row"""
return row.get(self.sample_name_column, None)
async def get_cpg_sample_id_from_row(self, row: SingleRow) -> Optional[str]:
"""Get internal cpg id from a row using get_sample_id and an api call"""
return row.get(self.cpg_id_column, None)
def get_sample_type(self, row: GroupedRow) -> SampleType:
"""Get sample type from row"""
return SampleType(self.default_sample_type)
def get_sequence_types(self, row: GroupedRow) -> List[SequenceType]:
"""
Get sequence types from grouped row
if SingleRow: return sequence type
if GroupedRow: return sequence types for all rows
"""
if isinstance(row, dict):
return [self.get_sequence_type(row)]
return [
SequenceType(r.get(self.seq_type_column, self.default_sequence_type))
for r in row
]
def get_sequence_type(self, row: SingleRow) -> SequenceType:
"""Get sequence type from row"""
value = row.get(self.seq_type_column, None) or self.default_sequence_type
value = value.lower()
if value == 'wgs':
value = 'genome'
elif value == 'wes':
value = 'exome'
elif 'mt' in value:
value = 'mtseq'
return SequenceType(value)
def get_sequence_status(self, row: GroupedRow) -> SequenceStatus:
"""Get sequence status from row"""
return SequenceStatus(self.default_sequence_status)
def get_participant_id(self, row: SingleRow) -> Optional[str]:
"""Get external participant ID from row"""
if not self.participant_column or self.participant_column not in row:
raise ValueError('Participant column does not exist')
return row[self.participant_column]
def get_reported_sex(self, row: GroupedRow) -> Optional[int]:
"""Get reported sex from grouped row"""
if not self.reported_sex_column:
return None
reported_sex = row[0].get(self.reported_sex_column, None)
if reported_sex is None:
return None
if reported_sex == '':
return None
if reported_sex.lower() == 'female':
return 2
if reported_sex.lower() == 'male':
return 1
raise ValueError(
f'{reported_sex} could not be identified as an input for reported_sex'
)
def get_reported_gender(self, row: GroupedRow) -> Optional[str]:
"""Get reported gender from grouped row"""
return row[0].get(self.reported_gender_column, None)
def get_karyotype(self, row: GroupedRow) -> Optional[str]:
"""Get karyotype from grouped row"""
return row[0].get(self.karyotype_column, None)
def has_participants(self, file_pointer, delimiter: str) -> bool:
"""Returns True if the file has a Participants column"""
reader = self._get_dict_reader(file_pointer, delimiter=delimiter)
first_line = next(reader)
has_participants = self.participant_column in first_line
file_pointer.seek(0)
return has_participants
async def validate_participant_map(
self, participant_map: Dict[Any, Dict[str, List[Dict[str, Any]]]]
):
await super().validate_participant_map(participant_map)
if not self.reads_column:
return
ungrouped_rows: List[Dict[str, Any]] = []
for sample_map in participant_map.values():
for row in sample_map.values():
if isinstance(row, list):
ungrouped_rows.extend(row)
elif isinstance(row, dict):
ungrouped_rows.append(row)
else:
raise ValueError(f'Unexpected type {type(row)} {row}')
errors = []
errors.extend(await self.check_files_covered_by_rows(ungrouped_rows))
if errors:
raise ValueError(', '.join(errors))
async def validate_sample_map(self, sample_map: Dict[str, List[Dict[str, Any]]]):
await super().validate_sample_map(sample_map)
if not self.reads_column:
return
ungrouped_rows: List[Dict[str, Any]] = []
for row in sample_map.values():
if isinstance(row, list):
ungrouped_rows.extend(row)
elif isinstance(row, dict):
ungrouped_rows.append(row)
else:
raise ValueError(f'Unexpected type {type(row)} {row}')
errors = []
errors.extend(await self.check_files_covered_by_rows(ungrouped_rows))
if errors:
raise ValueError(', '.join(errors))
@staticmethod
def flatten_irregular_list(irregular_list):
"""
Flatten an irregular list: [1, [2, 3], 4]
>>> GenericMetadataParser.flatten_irregular_list([1, [2, 3], [4,5]])
[1, 2, 3, 4, 5]
"""
return (
[
element
for item in irregular_list
for element in GenericMetadataParser.flatten_irregular_list(item)
]
if isinstance(irregular_list, list)
else [irregular_list]
)
async def get_all_files_from_row(self, sample_id: str, row):
"""Get all files from row, to allow subparsers to include other files"""
fns = await self.get_read_filenames(sample_id, row)
return self.flatten_irregular_list(fns)
async def check_files_covered_by_rows(
self, rows: List[Dict[str, Any]]
) -> List[str]:
"""
Check that the files in the search_paths are completely covered by the sample_map
"""
filename_promises = []
for grp in rows:
for r in grp if isinstance(grp, list) else [grp]:
filename_promises.append(
self.get_all_files_from_row(self.get_sample_id(r), r)
)
files_from_rows: List[str] = sum(await asyncio.gather(*filename_promises), [])
filenames_from_rows = set(f.strip() for f in files_from_rows if f and f.strip())
relevant_extensions = ('.cram', '.fastq.gz', '.bam')
def filename_filter(f):
return any(f.endswith(ext) for ext in relevant_extensions)
file_from_search_paths = set(filter(filename_filter, self.filename_map.keys()))
files_in_search_path_not_in_map = file_from_search_paths - filenames_from_rows
missing_files = filenames_from_rows - file_from_search_paths
errors = []
if missing_files:
errors.append(
'There are files specified in the map, but not found in '
f'the search paths: {", ".join(missing_files)}'
)
if files_in_search_path_not_in_map:
m = (
'There are files in the search path that are NOT covered by the file map: '
f'{", ".join(files_in_search_path_not_in_map)}'
)
if self.allow_extra_files_in_search_path:
logger.warning(m)
else:
errors.append(m)
return errors
@staticmethod
def merge_dicts(a: Dict, b: Dict):
"""
Recursively merge two dictionaries:
- collapse equal values
- put differing values into a list (not guaranteeing order)
"""
if b is None:
return a
if a is None:
return b
res = {}
for key in set(a.keys()).union(b.keys()):
a_val = a.get(key)
b_val = b.get(key)
if a_val is not None and b_val is not None:
# combine values
a_is_dict = isinstance(a_val, dict)
b_is_dict = isinstance(b_val, dict)
if a_is_dict and b_is_dict:
# merge dict
res[key] = GenericMetadataParser.merge_dicts(a_val, b_val)
elif a_val == b_val:
res[key] = a_val
else:
res[key] = [a_val, b_val]
else:
res[key] = a_val or b_val
return res
@staticmethod
def collapse_arbitrary_meta(key_map: Dict[str, str], row: GroupedRow):
"""
This is a little bit tricky
>>> GenericMetadataParser.collapse_arbitrary_meta({'key1': 'new_key'}, {'key1': True})
{'new_key': True}
>>> GenericMetadataParser.collapse_arbitrary_meta({'key1': 'new_key'}, [{'key1': True}, {'key1': True}])
{'new_key': True}
>>> GenericMetadataParser.collapse_arbitrary_meta({'key1': 'new_key'}, [{'key1': True}, {'key1': None}])
{'new_key': True}
>>> GenericMetadataParser.collapse_arbitrary_meta({'key1': 'new_key'}, [{'key1': True}])
{'new_key': True}
>>> GenericMetadataParser.collapse_arbitrary_meta({'key1': 'new.key'}, [{'key1': True}])
{'new': {'key': True}}
>>> GenericMetadataParser.collapse_arbitrary_meta({'key1': 'new.key'}, [{'key1': 1}, {'key1': 2}, {'key1': 3}])
{'new': {'key': [1, 2, 3]}}
# multiple keys sometimes is ordered, so check the sorted(dict.items())
>>> import json; json.dumps(GenericMetadataParser.collapse_arbitrary_meta({'key1': 'new.key', 'key2': 'new.another'}, [{'key1': 1}, {'key1': 2}, {'key2': False}]), sort_keys=True)
'{"new": {"another": false, "key": [1, 2]}}'
"""
if not key_map or not row:
return {}
def prepare_dict_from_keys(key_parts: List[str], val):
"""Recursive production of dictionary"""
if len(key_parts) == 1:
return {key_parts[0]: val}
return {key_parts[0]: prepare_dict_from_keys(key_parts[1:], val)}
dicts = []
for row_key, dict_key in key_map.items():
if isinstance(row, list):
inner_values = [r[row_key] for r in row if r.get(row_key) is not None]
if any(isinstance(inner, list) for inner in inner_values):
# lists are unhashable
value = inner_values
else:
value = list(set(inner_values))
if len(value) == 0:
continue
if len(value) == 1:
value = value[0]
else:
if row_key not in row:
continue
value = row[row_key]
dicts.append(prepare_dict_from_keys(dict_key.split('.'), value))
return reduce(GenericMetadataParser.merge_dicts, dicts)
@staticmethod
def process_filename_value(string: Union[str, List[str]]) -> List[str]:
"""
Split on multiple delimiters, ;,
>>> GenericMetadataParser.process_filename_value('Filename1-fastq.gz;Filename2.fastq.gz')
['Filename1-fastq.gz', 'Filename2.fastq.gz']
>>> GenericMetadataParser.process_filename_value('Filename1-fastq.gz, Filename2.fastq.gz')
['Filename1-fastq.gz', 'Filename2.fastq.gz']
>>> GenericMetadataParser.process_filename_value('Filename1 with spaces fastq.gz')
['Filename1 with spaces fastq.gz']
>>> GenericMetadataParser.process_filename_value(['filename ;filename2, filename3', ' filename4'])
['filename', 'filename2', 'filename3', 'filename4']
"""
if not string:
return []
if isinstance(string, list):
return sorted(
set(
r
for f in string
for r in GenericMetadataParser.process_filename_value(f)
)
)
filenames = [f.strip() for f in RE_FILENAME_SPLITTER.split(string)]
filenames = [f for f in filenames if f]
whitespace_filenames = [f for f in filenames if ' ' in f]
if whitespace_filenames:
logger.warning(
'Whitespace detected in filenames: '
+ ','.join(shlex.quote(str(s)) for s in whitespace_filenames)
)
return filenames
async def get_read_filenames(
self, sample_id: Optional[str], row: SingleRow
) -> List[str]:
"""Get paths to reads from a row"""
if not self.reads_column or self.reads_column not in row:
return []
# more post processing
return self.process_filename_value(row[self.reads_column])
async def get_checksums_from_row(
self, sample_id: Optional[str], row: SingleRow, read_filenames: List[str]
) -> Optional[List[Optional[str]]]:
"""
Get checksums for some row, you must either return:
- no elements, or
- number of elements equal to read_filenames
Each element should be a string or None.
"""
if not self.checksum_column or self.checksum_column not in row:
return []
return self.process_filename_value(row[self.checksum_column])
async def get_gvcf_filenames(self, sample_id: str, row: GroupedRow) -> List[str]:
"""Get paths to gvcfs from a row"""
if not self.gvcf_column:
return []
gvcf_filenames: List[str] = []
for r in row if isinstance(row, list) else [row]:
if self.gvcf_column in r:
gvcf_filenames.extend(self.process_filename_value(r[self.gvcf_column]))
return gvcf_filenames
async def get_grouped_sample_meta(self, rows: GroupedRow) -> List[SampleMetaGroup]:
"""Return list of grouped by sample metadata from the rows"""
sample_metadata = []
for sid, row_group in groupby(rows, self.get_sample_id):
sample_group = SampleMetaGroup(sample_id=sid, rows=row_group, meta=None)
sample_metadata.append(await self.get_sample_meta(sample_group))
return sample_metadata
async def get_sample_meta(self, sample_group: SampleMetaGroup) -> SampleMetaGroup:
"""Get sample-metadata from row"""
rows = sample_group.rows
meta = self.collapse_arbitrary_meta(self.sample_meta_map, rows)
sample_group.meta = meta
return sample_group
async def get_participant_meta(
self, participant_id: int, rows: GroupedRow
) -> ParticipantMetaGroup:
"""Get participant-metadata from rows then set it in the ParticipantMetaGroup"""
meta = self.collapse_arbitrary_meta(self.participant_meta_map, rows)
return ParticipantMetaGroup(participant_id=participant_id, rows=rows, meta=meta)
async def get_grouped_sequence_meta(
self,
sample_id: str,
rows: GroupedRow,
) -> List[SequenceMetaGroup]:
"""
Takes a collection of SingleRows and groups them by sequence type
For each sequence type, get_sequence_meta for that group and return the
resulting list of metadata
"""
sequence_meta = []
for stype, row_group in groupby(rows, self.get_sequence_type):
seq_group = SequenceMetaGroup(
rows=list(row_group),
sequence_type=stype,
)
sequence_meta.append(await self.get_sequence_meta(seq_group, sample_id))
return sequence_meta
async def get_sequence_meta(
self,
seq_group: SequenceMetaGroup,
sample_id: Optional[str] = None,
) -> SequenceMetaGroup:
"""Get sequence-metadata from row"""
rows = seq_group.rows
collapsed_sequence_meta = self.collapse_arbitrary_meta(
self.sequence_meta_map, rows
)
read_filenames: List[str] = []
gvcf_filenames: List[str] = []
read_checksums: List[str] = []
reference_assemblies: set[str] = set()
for r in rows:
_rfilenames = await self.get_read_filenames(sample_id=sample_id, row=r)
read_filenames.extend(_rfilenames)
if self.checksum_column and self.checksum_column in r:
checksums = await self.get_checksums_from_row(sample_id, r, _rfilenames)
if not checksums:
checksums = [None] * len(_rfilenames)
read_checksums.extend(checksums)
if self.gvcf_column and self.gvcf_column in r:
gvcf_filenames.extend(self.process_filename_value(r[self.gvcf_column]))
if self.reference_assembly_location_column:
ref = r.get(self.reference_assembly_location_column)
if ref:
reference_assemblies.add(ref)
# strip in case collaborator put "file1, file2"
full_read_filenames: List[str] = []
full_gvcf_filenames: List[str] = []
if read_filenames:
full_read_filenames.extend(
self.file_path(f.strip()) for f in read_filenames if f.strip()
)
if gvcf_filenames:
full_gvcf_filenames.extend(
self.file_path(f.strip()) for f in gvcf_filenames if f.strip()
)
if not sample_id:
sample_id = await self.get_cpg_sample_id_from_row(rows[0])
read_file_types: Dict[str, Dict[str, List]] = await self.parse_files(
sample_id, full_read_filenames, read_checksums
)
variant_file_types: Dict[str, Dict[str, List]] = await self.parse_files(
sample_id, full_gvcf_filenames, None
)
reads: Dict[str, List] = read_file_types.get('reads')
variants: Dict[str, List] = variant_file_types.get('variants')
if reads:
keys = list(reads.keys())
if len(keys) > 1:
# 2021-12-14 mfranklin: In future we should return multiple
# sequence meta, and handle that in the generic parser
raise ValueError(
f'Multiple types of reads found ({", ".join(keys)}), currently not supported'
)
reads_type = keys[0]
collapsed_sequence_meta['reads_type'] = reads_type
collapsed_sequence_meta['reads'] = reads[reads_type]
if reads_type == 'cram':
if len(reference_assemblies) > 1:
# sorted for consistent testing
str_ref_assemblies = ', '.join(sorted(reference_assemblies))
raise ValueError(
f'Multiple reference assemblies were defined for {sample_id}: {str_ref_assemblies}'
)
if len(reference_assemblies) == 1:
ref = next(iter(reference_assemblies))
else:
ref = self.default_reference_assembly_location
if not ref:
raise ValueError(
f'Reads type for "{sample_id}" is CRAM, but a reference is not defined, please set the default reference assembly path'
)
ref_fp = self.file_path(ref)
secondary_files = (
await self.create_secondary_file_objects_by_potential_pattern(
ref_fp, ['.fai']
)
)
cram_reference = await self.create_file_object(
ref_fp, secondary_files=secondary_files
)
collapsed_sequence_meta['reference_assembly'] = cram_reference
if variants:
if 'gvcf' in variants:
collapsed_sequence_meta['gvcfs'] = variants.get('gvcf')
collapsed_sequence_meta['gvcf_types'] = 'gvcf'
if 'vcf' in variants:
collapsed_sequence_meta['vcfs'] = variants['vcf']
collapsed_sequence_meta['vcf_type'] = 'vcf'
if self.batch_number is not None:
collapsed_sequence_meta['batch'] = self.batch_number
seq_group.meta = collapsed_sequence_meta
return seq_group
async def get_qc_meta(
self, sample_id: str, row: GroupedRow
) -> Optional[Dict[str, Any]]:
"""Get collapsed qc meta"""
if not self.qc_meta_map:
return None
return self.collapse_arbitrary_meta(self.qc_meta_map, row)
async def from_manifest_path(
self,
manifest: str,
confirm=False,
delimiter=None,
dry_run=False,
):
"""Parse manifest from path, and return result of parsing manifest"""
file = self.file_path(manifest)
_delimiter = delimiter or GenericMetadataParser.guess_delimiter_from_filename(
file
)
file_contents = await self.file_contents(file)
return await self.parse_manifest(
StringIO(file_contents),
delimiter=_delimiter,
confirm=confirm,
dry_run=dry_run,
)
@click.command(help=__DOC)
@click.option(
'--project',
required=True,
help='The sample-metadata project ($DATASET) to import manifest into',
)
@click.option('--sample-name-column', required=True)
@click.option(
'--reads-column',
help='Column where the reads information is held, comma-separated if multiple',
)
@click.option(
'--gvcf-column',
help='Column where the reads information is held, comma-separated if multiple',
)
@click.option(
'--qc-meta-field-map',
nargs=2,
multiple=True,
help='Two arguments per listing, eg: --qc-meta-field "name-in-manifest" "name-in-analysis.meta"',
)
@click.option(
'--participant-meta-field',
multiple=True,
help='Single argument, key to pull out of row to put in participant.meta',
)
@click.option(
'--participant-meta-field-map',
nargs=2,
multiple=True,
help='Two arguments per listing, eg: --participant-meta-field-map "name-in-manifest" "name-in-participant.meta"',
)
@click.option(
'--sample-meta-field',
multiple=True,
help='Single argument, key to pull out of row to put in sample.meta',
)
@click.option(
'--sample-meta-field-map',
nargs=2,
multiple=True,
help='Two arguments per listing, eg: --sample-meta-field-map "name-in-manifest" "name-in-sample.meta"',
)
@click.option(
'--sequence-meta-field',
multiple=True,
help='Single argument, key to pull out of row to put in sample.meta',
)
@click.option(
'--sequence-meta-field-map',
nargs=2,
multiple=True,
help='Two arguments per listing, eg: --sequence-meta-field "name-in-manifest" "name-in-sequence.meta"',
)
@click.option('--default-sample-type', default='blood')
@click.option('--default-sequence-type', default='wgs')
@click.option(
'--confirm', is_flag=True, help='Confirm with user input before updating server'
)
@click.option('--search-path', multiple=True, required=True)
@click.argument('manifests', nargs=-1)
@run_as_sync
async def main(
manifests,
search_path: List[str],
project,
sample_name_column: str,
participant_meta_field: List[str],
participant_meta_field_map: List[Tuple[str, str]],
sample_meta_field: List[str],
sample_meta_field_map: List[Tuple[str, str]],
sequence_meta_field: List[str],
sequence_meta_field_map: List[Tuple[str, str]],
qc_meta_field_map: List[Tuple[str, str]] = None,
reads_column: Optional[str] = None,
gvcf_column: Optional[str] = None,
default_sample_type='blood',
default_sequence_type='wgs',
confirm=False,
):
"""Run script from CLI arguments"""
if not manifests:
raise ValueError('Expected at least 1 manifest')
extra_seach_paths = [m for m in manifests if m.startswith('gs://')]
if extra_seach_paths:
search_path = list(set(search_path).union(set(extra_seach_paths)))
participant_meta_map: Dict[Any, Any] = {}
sample_meta_map: Dict[Any, Any] = {}
sequence_meta_map: Dict[Any, Any] = {}
qc_meta_map = dict(qc_meta_field_map or {})
if participant_meta_field_map:
participant_meta_map.update(dict(participant_meta_map))
if participant_meta_field:
participant_meta_map.update({k: k for k in participant_meta_field})
if sample_meta_field_map:
sample_meta_map.update(dict(sample_meta_field_map))
if sample_meta_field:
sample_meta_map.update({k: k for k in sample_meta_field})
if sequence_meta_field_map:
sequence_meta_map.update(dict(sequence_meta_field_map))
if sequence_meta_field:
sequence_meta_map.update({k: k for k in sequence_meta_field})
parser = GenericMetadataParser(
project=project,
sample_name_column=sample_name_column,
participant_meta_map=participant_meta_map,
sample_meta_map=sample_meta_map,
sequence_meta_map=sequence_meta_map,
qc_meta_map=qc_meta_map,
reads_column=reads_column,
gvcf_column=gvcf_column,
default_sample_type=default_sample_type,
default_sequence_type=default_sequence_type,
search_locations=search_path,
)
for manifest in manifests:
logger.info(f'Importing {manifest}')
await parser.from_manifest_path(manifest=manifest, confirm=confirm)
if __name__ == '__main__':
# pylint: disable=no-value-for-parameter
main() | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/parser/generic_metadata_parser.py | 0.832509 | 0.18101 | generic_metadata_parser.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class PagingLinks(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'_self': (str,), # noqa: E501
'next': (str,), # noqa: E501
'token': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'_self': 'self', # noqa: E501
'next': 'next', # noqa: E501
'token': 'token', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, _self, *args, **kwargs): # noqa: E501
"""PagingLinks - a model defined in OpenAPI
Args:
_self (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
next (str): [optional] # noqa: E501
token (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self._self = _self
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, _self, *args, **kwargs): # noqa: E501
"""PagingLinks - a model defined in OpenAPI
Args:
_self (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
next (str): [optional] # noqa: E501
token (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self._self = _self
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/paging_links.py | 0.566978 | 0.193471 | paging_links.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class SequenceStatus(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'RECEIVED': "received",
'SENT-TO-SEQUENCING': "sent-to-sequencing",
'COMPLETED-SEQUENCING': "completed-sequencing",
'COMPLETED-QC': "completed-qc",
'FAILED-QC': "failed-qc",
'UPLOADED': "uploaded",
'UNKNOWN': "unknown",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""SequenceStatus - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Status of sequencing., must be one of ["received", "sent-to-sequencing", "completed-sequencing", "completed-qc", "failed-qc", "uploaded", "unknown", ] # noqa: E501
Keyword Args:
value (str): Status of sequencing., must be one of ["received", "sent-to-sequencing", "completed-sequencing", "completed-qc", "failed-qc", "uploaded", "unknown", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""SequenceStatus - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Status of sequencing., must be one of ["received", "sent-to-sequencing", "completed-sequencing", "completed-qc", "failed-qc", "uploaded", "unknown", ] # noqa: E501
Keyword Args:
value (str): Status of sequencing., must be one of ["received", "sent-to-sequencing", "completed-sequencing", "completed-qc", "failed-qc", "uploaded", "unknown", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/sequence_status.py | 0.553626 | 0.236098 | sequence_status.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sample_type import SampleType
globals()['SampleType'] = SampleType
class NewSample(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'external_id': (str,), # noqa: E501
'type': (SampleType,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'participant_id': (int,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'external_id': 'external_id', # noqa: E501
'type': 'type', # noqa: E501
'meta': 'meta', # noqa: E501
'participant_id': 'participant_id', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, external_id, type, *args, **kwargs): # noqa: E501
"""NewSample - a model defined in OpenAPI
Args:
external_id (str):
type (SampleType):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] if omitted the server will use the default value of {} # noqa: E501
participant_id (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.external_id = external_id
self.type = type
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, external_id, type, *args, **kwargs): # noqa: E501
"""NewSample - a model defined in OpenAPI
Args:
external_id (str):
type (SampleType):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] if omitted the server will use the default value of {} # noqa: E501
participant_id (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.external_id = external_id
self.type = type
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/new_sample.py | 0.593256 | 0.168823 | new_sample.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class SampleType(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'BLOOD': "blood",
'SALIVA': "saliva",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""SampleType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Enum describing types of physical samples., must be one of ["blood", "saliva", ] # noqa: E501
Keyword Args:
value (str): Enum describing types of physical samples., must be one of ["blood", "saliva", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""SampleType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Enum describing types of physical samples., must be one of ["blood", "saliva", ] # noqa: E501
Keyword Args:
value (str): Enum describing types of physical samples., must be one of ["blood", "saliva", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/sample_type.py | 0.565419 | 0.23895 | sample_type.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class SequenceType(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'GENOME': "genome",
'EXOME': "exome",
'SINGLE-CELL': "single-cell",
'MTSEQ': "mtseq",
'ONT': "ont",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""SequenceType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Type of sequencing., must be one of ["genome", "exome", "single-cell", "mtseq", "ont", ] # noqa: E501
Keyword Args:
value (str): Type of sequencing., must be one of ["genome", "exome", "single-cell", "mtseq", "ont", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""SequenceType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Type of sequencing., must be one of ["genome", "exome", "single-cell", "mtseq", "ont", ] # noqa: E501
Keyword Args:
value (str): Type of sequencing., must be one of ["genome", "exome", "single-cell", "mtseq", "ont", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/sequence_type.py | 0.570092 | 0.247601 | sequence_type.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class NestedFamily(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'id': (int,), # noqa: E501
'external_id': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'external_id': 'external_id', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, id, external_id, *args, **kwargs): # noqa: E501
"""NestedFamily - a model defined in OpenAPI
Args:
id (int):
external_id (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.external_id = external_id
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, id, external_id, *args, **kwargs): # noqa: E501
"""NestedFamily - a model defined in OpenAPI
Args:
id (int):
external_id (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.external_id = external_id
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/nested_family.py | 0.599485 | 0.184804 | nested_family.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class FamilyUpdateModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'id': (int,), # noqa: E501
'external_id': (str,), # noqa: E501
'description': (str,), # noqa: E501
'coded_phenotype': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'external_id': 'external_id', # noqa: E501
'description': 'description', # noqa: E501
'coded_phenotype': 'coded_phenotype', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, id, *args, **kwargs): # noqa: E501
"""FamilyUpdateModel - a model defined in OpenAPI
Args:
id (int):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
description (str): [optional] # noqa: E501
coded_phenotype (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, id, *args, **kwargs): # noqa: E501
"""FamilyUpdateModel - a model defined in OpenAPI
Args:
id (int):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
description (str): [optional] # noqa: E501
coded_phenotype (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/family_update_model.py | 0.582966 | 0.172102 | family_update_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class ExtraParticipantImporterHandler(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'FAIL': "fail",
'IGNORE': "ignore",
'ADD': "add",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""ExtraParticipantImporterHandler - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): How to handle extra participants during metadata import., must be one of ["fail", "ignore", "add", ] # noqa: E501
Keyword Args:
value (str): How to handle extra participants during metadata import., must be one of ["fail", "ignore", "add", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""ExtraParticipantImporterHandler - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): How to handle extra participants during metadata import., must be one of ["fail", "ignore", "add", ] # noqa: E501
Keyword Args:
value (str): How to handle extra participants during metadata import., must be one of ["fail", "ignore", "add", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/extra_participant_importer_handler.py | 0.608012 | 0.228404 | extra_participant_importer_handler.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class AnalysisStatus(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'QUEUED': "queued",
'IN-PROGRESS': "in-progress",
'FAILED': "failed",
'COMPLETED': "completed",
'UNKNOWN': "unknown",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""AnalysisStatus - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Status that an analysis can be run., must be one of ["queued", "in-progress", "failed", "completed", "unknown", ] # noqa: E501
Keyword Args:
value (str): Status that an analysis can be run., must be one of ["queued", "in-progress", "failed", "completed", "unknown", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""AnalysisStatus - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Status that an analysis can be run., must be one of ["queued", "in-progress", "failed", "completed", "unknown", ] # noqa: E501
Keyword Args:
value (str): Status that an analysis can be run., must be one of ["queued", "in-progress", "failed", "completed", "unknown", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/analysis_status.py | 0.564098 | 0.222827 | analysis_status.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sample_type import SampleType
globals()['SampleType'] = SampleType
class SampleUpdateModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'type': (SampleType,), # noqa: E501
'participant_id': (int,), # noqa: E501
'active': (bool,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'meta': 'meta', # noqa: E501
'type': 'type', # noqa: E501
'participant_id': 'participant_id', # noqa: E501
'active': 'active', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""SampleUpdateModel - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] if omitted the server will use the default value of {} # noqa: E501
type (SampleType): [optional] # noqa: E501
participant_id (int): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""SampleUpdateModel - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] if omitted the server will use the default value of {} # noqa: E501
type (SampleType): [optional] # noqa: E501
participant_id (int): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/sample_update_model.py | 0.577495 | 0.195325 | sample_update_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class ExportType(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'CSV': "csv",
'TSV': "tsv",
'JSON': "json",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""ExportType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Wraps up common properties and allows for parameterisation of some table exports. ., must be one of ["csv", "tsv", "json", ] # noqa: E501
Keyword Args:
value (str): Wraps up common properties and allows for parameterisation of some table exports. ., must be one of ["csv", "tsv", "json", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""ExportType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Wraps up common properties and allows for parameterisation of some table exports. ., must be one of ["csv", "tsv", "json", ] # noqa: E501
Keyword Args:
value (str): Wraps up common properties and allows for parameterisation of some table exports. ., must be one of ["csv", "tsv", "json", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/export_type.py | 0.559049 | 0.248022 | export_type.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sequence_status import SequenceStatus
from sample_metadata.model.sequence_type import SequenceType
globals()['SequenceStatus'] = SequenceStatus
globals()['SequenceType'] = SequenceType
class NestedSequence(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'id': (int,), # noqa: E501
'type': (SequenceType,), # noqa: E501
'status': (SequenceStatus,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'type': 'type', # noqa: E501
'status': 'status', # noqa: E501
'meta': 'meta', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, id, type, status, meta, *args, **kwargs): # noqa: E501
"""NestedSequence - a model defined in OpenAPI
Args:
id (int):
type (SequenceType):
status (SequenceStatus):
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.type = type
self.status = status
self.meta = meta
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, id, type, status, meta, *args, **kwargs): # noqa: E501
"""NestedSequence - a model defined in OpenAPI
Args:
id (int):
type (SequenceType):
status (SequenceStatus):
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.type = type
self.status = status
self.meta = meta
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/nested_sequence.py | 0.612657 | 0.204401 | nested_sequence.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sequence_status import SequenceStatus
from sample_metadata.model.sequence_type import SequenceType
globals()['SequenceStatus'] = SequenceStatus
globals()['SequenceType'] = SequenceType
class SequenceUpsert(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'sample_id': (int,), # noqa: E501
'status': (SequenceStatus,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'type': (SequenceType,), # noqa: E501
'id': (int,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'sample_id': 'sample_id', # noqa: E501
'status': 'status', # noqa: E501
'meta': 'meta', # noqa: E501
'type': 'type', # noqa: E501
'id': 'id', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""SequenceUpsert - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_id (int): [optional] # noqa: E501
status (SequenceStatus): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
type (SequenceType): [optional] # noqa: E501
id (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""SequenceUpsert - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_id (int): [optional] # noqa: E501
status (SequenceStatus): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
type (SequenceType): [optional] # noqa: E501
id (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/sequence_upsert.py | 0.642432 | 0.194808 | sequence_upsert.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.analysis_status import AnalysisStatus
from sample_metadata.model.analysis_type import AnalysisType
globals()['AnalysisStatus'] = AnalysisStatus
globals()['AnalysisType'] = AnalysisType
class AnalysisQueryModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'projects': ([str],), # noqa: E501
'sample_ids': ([str],), # noqa: E501
'type': (AnalysisType,), # noqa: E501
'status': (AnalysisStatus,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'output': (str,), # noqa: E501
'active': (bool,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'projects': 'projects', # noqa: E501
'sample_ids': 'sample_ids', # noqa: E501
'type': 'type', # noqa: E501
'status': 'status', # noqa: E501
'meta': 'meta', # noqa: E501
'output': 'output', # noqa: E501
'active': 'active', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, projects, *args, **kwargs): # noqa: E501
"""AnalysisQueryModel - a model defined in OpenAPI
Args:
projects ([str]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_ids ([str]): [optional] # noqa: E501
type (AnalysisType): [optional] # noqa: E501
status (AnalysisStatus): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
output (str): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.projects = projects
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, projects, *args, **kwargs): # noqa: E501
"""AnalysisQueryModel - a model defined in OpenAPI
Args:
projects ([str]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_ids ([str]): [optional] # noqa: E501
type (AnalysisType): [optional] # noqa: E501
status (AnalysisStatus): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
output (str): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.projects = projects
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/analysis_query_model.py | 0.546254 | 0.197039 | analysis_query_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class ParticipantUpdateModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'external_id': (str,), # noqa: E501
'reported_sex': (int,), # noqa: E501
'reported_gender': (str,), # noqa: E501
'karyotype': (str,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'external_id': 'external_id', # noqa: E501
'reported_sex': 'reported_sex', # noqa: E501
'reported_gender': 'reported_gender', # noqa: E501
'karyotype': 'karyotype', # noqa: E501
'meta': 'meta', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""ParticipantUpdateModel - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
reported_sex (int): [optional] # noqa: E501
reported_gender (str): [optional] # noqa: E501
karyotype (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""ParticipantUpdateModel - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
reported_sex (int): [optional] # noqa: E501
reported_gender (str): [optional] # noqa: E501
karyotype (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/participant_update_model.py | 0.542379 | 0.177989 | participant_update_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.participant_upsert import ParticipantUpsert
globals()['ParticipantUpsert'] = ParticipantUpsert
class ParticipantUpsertBody(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'participants': ([ParticipantUpsert],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'participants': 'participants', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, participants, *args, **kwargs): # noqa: E501
"""ParticipantUpsertBody - a model defined in OpenAPI
Args:
participants ([ParticipantUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.participants = participants
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, participants, *args, **kwargs): # noqa: E501
"""ParticipantUpsertBody - a model defined in OpenAPI
Args:
participants ([ParticipantUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.participants = participants
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/participant_upsert_body.py | 0.592431 | 0.184657 | participant_upsert_body.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sample_batch_upsert import SampleBatchUpsert
globals()['SampleBatchUpsert'] = SampleBatchUpsert
class SampleBatchUpsertBody(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'samples': ([SampleBatchUpsert],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'samples': 'samples', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, samples, *args, **kwargs): # noqa: E501
"""SampleBatchUpsertBody - a model defined in OpenAPI
Args:
samples ([SampleBatchUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.samples = samples
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, samples, *args, **kwargs): # noqa: E501
"""SampleBatchUpsertBody - a model defined in OpenAPI
Args:
samples ([SampleBatchUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.samples = samples
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/sample_batch_upsert_body.py | 0.631026 | 0.201892 | sample_batch_upsert_body.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.analysis_status import AnalysisStatus
from sample_metadata.model.analysis_type import AnalysisType
globals()['AnalysisStatus'] = AnalysisStatus
globals()['AnalysisType'] = AnalysisType
class AnalysisModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'sample_ids': ([str],), # noqa: E501
'type': (AnalysisType,), # noqa: E501
'status': (AnalysisStatus,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'output': (str,), # noqa: E501
'active': (bool,), # noqa: E501
'author': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'sample_ids': 'sample_ids', # noqa: E501
'type': 'type', # noqa: E501
'status': 'status', # noqa: E501
'meta': 'meta', # noqa: E501
'output': 'output', # noqa: E501
'active': 'active', # noqa: E501
'author': 'author', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, sample_ids, type, status, *args, **kwargs): # noqa: E501
"""AnalysisModel - a model defined in OpenAPI
Args:
sample_ids ([str]):
type (AnalysisType):
status (AnalysisStatus):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
output (str): [optional] # noqa: E501
active (bool): [optional] if omitted the server will use the default value of True # noqa: E501
author (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.sample_ids = sample_ids
self.type = type
self.status = status
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, sample_ids, type, status, *args, **kwargs): # noqa: E501
"""AnalysisModel - a model defined in OpenAPI
Args:
sample_ids ([str]):
type (AnalysisType):
status (AnalysisStatus):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
output (str): [optional] # noqa: E501
active (bool): [optional] if omitted the server will use the default value of True # noqa: E501
author (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.sample_ids = sample_ids
self.type = type
self.status = status
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/analysis_model.py | 0.562297 | 0.187114 | analysis_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.analysis_status import AnalysisStatus
globals()['AnalysisStatus'] = AnalysisStatus
class AnalysisUpdateModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'status': (AnalysisStatus,), # noqa: E501
'output': (str,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'active': (bool,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'status': 'status', # noqa: E501
'output': 'output', # noqa: E501
'meta': 'meta', # noqa: E501
'active': 'active', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, status, *args, **kwargs): # noqa: E501
"""AnalysisUpdateModel - a model defined in OpenAPI
Args:
status (AnalysisStatus):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
output (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.status = status
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, status, *args, **kwargs): # noqa: E501
"""AnalysisUpdateModel - a model defined in OpenAPI
Args:
status (AnalysisStatus):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
output (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.status = status
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/analysis_update_model.py | 0.596081 | 0.203213 | analysis_update_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class BodyGetSamples(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'sample_ids': ([str],), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'participant_ids': ([int],), # noqa: E501
'project_ids': ([str],), # noqa: E501
'active': (bool,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'sample_ids': 'sample_ids', # noqa: E501
'meta': 'meta', # noqa: E501
'participant_ids': 'participant_ids', # noqa: E501
'project_ids': 'project_ids', # noqa: E501
'active': 'active', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""BodyGetSamples - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_ids ([str]): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
participant_ids ([int]): [optional] # noqa: E501
project_ids ([str]): [optional] # noqa: E501
active (bool): [optional] if omitted the server will use the default value of True # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""BodyGetSamples - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_ids ([str]): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
participant_ids ([int]): [optional] # noqa: E501
project_ids ([str]): [optional] # noqa: E501
active (bool): [optional] if omitted the server will use the default value of True # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/body_get_samples.py | 0.554229 | 0.179315 | body_get_samples.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class BodyGetParticipants(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'external_participant_ids': ([str],), # noqa: E501
'internal_participant_ids': ([int],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'external_participant_ids': 'external_participant_ids', # noqa: E501
'internal_participant_ids': 'internal_participant_ids', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""BodyGetParticipants - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_participant_ids ([str]): [optional] # noqa: E501
internal_participant_ids ([int]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""BodyGetParticipants - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_participant_ids ([str]): [optional] # noqa: E501
internal_participant_ids ([int]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/body_get_participants.py | 0.572245 | 0.194578 | body_get_participants.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class ValidationError(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'loc': ([bool, date, datetime, dict, float, int, list, str, none_type],), # noqa: E501
'msg': (str,), # noqa: E501
'type': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'loc': 'loc', # noqa: E501
'msg': 'msg', # noqa: E501
'type': 'type', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, loc, msg, type, *args, **kwargs): # noqa: E501
"""ValidationError - a model defined in OpenAPI
Args:
loc ([bool, date, datetime, dict, float, int, list, str, none_type]):
msg (str):
type (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.loc = loc
self.msg = msg
self.type = type
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, loc, msg, type, *args, **kwargs): # noqa: E501
"""ValidationError - a model defined in OpenAPI
Args:
loc ([bool, date, datetime, dict, float, int, list, str, none_type]):
msg (str):
type (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.loc = loc
self.msg = msg
self.type = type
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/validation_error.py | 0.588534 | 0.188063 | validation_error.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class BodyGetSequencesByCriteria(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'sample_ids': ([str],), # noqa: E501
'sequence_ids': ([int],), # noqa: E501
'seq_meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'sample_meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'project_ids': ([str],), # noqa: E501
'types': ([str],), # noqa: E501
'statuses': ([str],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'sample_ids': 'sample_ids', # noqa: E501
'sequence_ids': 'sequence_ids', # noqa: E501
'seq_meta': 'seq_meta', # noqa: E501
'sample_meta': 'sample_meta', # noqa: E501
'project_ids': 'project_ids', # noqa: E501
'types': 'types', # noqa: E501
'statuses': 'statuses', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""BodyGetSequencesByCriteria - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_ids ([str]): [optional] # noqa: E501
sequence_ids ([int]): [optional] # noqa: E501
seq_meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
sample_meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
project_ids ([str]): [optional] # noqa: E501
types ([str]): [optional] # noqa: E501
statuses ([str]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""BodyGetSequencesByCriteria - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_ids ([str]): [optional] # noqa: E501
sequence_ids ([int]): [optional] # noqa: E501
seq_meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
sample_meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
project_ids ([str]): [optional] # noqa: E501
types ([str]): [optional] # noqa: E501
statuses ([str]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/body_get_sequences_by_criteria.py | 0.508544 | 0.184584 | body_get_sequences_by_criteria.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sample_type import SampleType
from sample_metadata.model.sequence_upsert import SequenceUpsert
globals()['SampleType'] = SampleType
globals()['SequenceUpsert'] = SequenceUpsert
class SampleBatchUpsert(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'sequences': ([SequenceUpsert],), # noqa: E501
'external_id': (str,), # noqa: E501
'type': (SampleType,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'participant_id': (int,), # noqa: E501
'active': (bool,), # noqa: E501
'id': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'sequences': 'sequences', # noqa: E501
'external_id': 'external_id', # noqa: E501
'type': 'type', # noqa: E501
'meta': 'meta', # noqa: E501
'participant_id': 'participant_id', # noqa: E501
'active': 'active', # noqa: E501
'id': 'id', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, sequences, *args, **kwargs): # noqa: E501
"""SampleBatchUpsert - a model defined in OpenAPI
Args:
sequences ([SequenceUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
type (SampleType): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] if omitted the server will use the default value of {} # noqa: E501
participant_id (int): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
id (bool, date, datetime, dict, float, int, list, str, none_type): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.sequences = sequences
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, sequences, *args, **kwargs): # noqa: E501
"""SampleBatchUpsert - a model defined in OpenAPI
Args:
sequences ([SequenceUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
type (SampleType): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] if omitted the server will use the default value of {} # noqa: E501
participant_id (int): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
id (bool, date, datetime, dict, float, int, list, str, none_type): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.sequences = sequences
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/sample_batch_upsert.py | 0.586049 | 0.187411 | sample_batch_upsert.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class AnalysisType(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'QC': "qc",
'JOINT-CALLING': "joint-calling",
'GVCF': "gvcf",
'CRAM': "cram",
'CUSTOM': "custom",
'ES-INDEX': "es-index",
'SV': "sv",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""AnalysisType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Types of analysis., must be one of ["qc", "joint-calling", "gvcf", "cram", "custom", "es-index", "sv", ] # noqa: E501
Keyword Args:
value (str): Types of analysis., must be one of ["qc", "joint-calling", "gvcf", "cram", "custom", "es-index", "sv", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""AnalysisType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Types of analysis., must be one of ["qc", "joint-calling", "gvcf", "cram", "custom", "es-index", "sv", ] # noqa: E501
Keyword Args:
value (str): Types of analysis., must be one of ["qc", "joint-calling", "gvcf", "cram", "custom", "es-index", "sv", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/analysis_type.py | 0.586878 | 0.22792 | analysis_type.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sequence_status import SequenceStatus
from sample_metadata.model.sequence_type import SequenceType
globals()['SequenceStatus'] = SequenceStatus
globals()['SequenceType'] = SequenceType
class NewSequence(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'status': (SequenceStatus,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'type': (SequenceType,), # noqa: E501
'sample_id': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'status': 'status', # noqa: E501
'meta': 'meta', # noqa: E501
'type': 'type', # noqa: E501
'sample_id': 'sample_id', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, status, meta, type, sample_id, *args, **kwargs): # noqa: E501
"""NewSequence - a model defined in OpenAPI
Args:
status (SequenceStatus):
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}):
type (SequenceType):
sample_id (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.status = status
self.meta = meta
self.type = type
self.sample_id = sample_id
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, status, meta, type, sample_id, *args, **kwargs): # noqa: E501
"""NewSequence - a model defined in OpenAPI
Args:
status (SequenceStatus):
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}):
type (SequenceType):
sample_id (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.status = status
self.meta = meta
self.type = type
self.sample_id = sample_id
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/new_sequence.py | 0.644561 | 0.202364 | new_sequence.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class ProjectRow(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'id': (int,), # noqa: E501
'name': (str,), # noqa: E501
'gcp_id': (str,), # noqa: E501
'dataset': (str,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'read_secret_name': (str,), # noqa: E501
'write_secret_name': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'name': 'name', # noqa: E501
'gcp_id': 'gcp_id', # noqa: E501
'dataset': 'dataset', # noqa: E501
'meta': 'meta', # noqa: E501
'read_secret_name': 'read_secret_name', # noqa: E501
'write_secret_name': 'write_secret_name', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""ProjectRow - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
id (int): [optional] # noqa: E501
name (str): [optional] # noqa: E501
gcp_id (str): [optional] # noqa: E501
dataset (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
read_secret_name (str): [optional] # noqa: E501
write_secret_name (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""ProjectRow - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
id (int): [optional] # noqa: E501
name (str): [optional] # noqa: E501
gcp_id (str): [optional] # noqa: E501
dataset (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
read_secret_name (str): [optional] # noqa: E501
write_secret_name (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/project_row.py | 0.477798 | 0.160858 | project_row.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sequence_status import SequenceStatus
from sample_metadata.model.sequence_type import SequenceType
globals()['SequenceStatus'] = SequenceStatus
globals()['SequenceType'] = SequenceType
class SequenceUpdateModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'sample_id': (int,), # noqa: E501
'status': (SequenceStatus,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'type': (SequenceType,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'sample_id': 'sample_id', # noqa: E501
'status': 'status', # noqa: E501
'meta': 'meta', # noqa: E501
'type': 'type', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""SequenceUpdateModel - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_id (int): [optional] # noqa: E501
status (SequenceStatus): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
type (SequenceType): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""SequenceUpdateModel - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_id (int): [optional] # noqa: E501
status (SequenceStatus): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
type (SequenceType): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/sequence_update_model.py | 0.574037 | 0.212497 | sequence_update_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sample_batch_upsert import SampleBatchUpsert
globals()['SampleBatchUpsert'] = SampleBatchUpsert
class ParticipantUpsert(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'samples': ([SampleBatchUpsert],), # noqa: E501
'external_id': (str,), # noqa: E501
'reported_sex': (int,), # noqa: E501
'reported_gender': (str,), # noqa: E501
'karyotype': (str,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'id': (int,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'samples': 'samples', # noqa: E501
'external_id': 'external_id', # noqa: E501
'reported_sex': 'reported_sex', # noqa: E501
'reported_gender': 'reported_gender', # noqa: E501
'karyotype': 'karyotype', # noqa: E501
'meta': 'meta', # noqa: E501
'id': 'id', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, samples, *args, **kwargs): # noqa: E501
"""ParticipantUpsert - a model defined in OpenAPI
Args:
samples ([SampleBatchUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
reported_sex (int): [optional] # noqa: E501
reported_gender (str): [optional] # noqa: E501
karyotype (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
id (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.samples = samples
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, samples, *args, **kwargs): # noqa: E501
"""ParticipantUpsert - a model defined in OpenAPI
Args:
samples ([SampleBatchUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
reported_sex (int): [optional] # noqa: E501
reported_gender (str): [optional] # noqa: E501
karyotype (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
id (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.samples = samples
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/participant_upsert.py | 0.54698 | 0.164081 | participant_upsert.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.nested_participant import NestedParticipant
from sample_metadata.model.paging_links import PagingLinks
globals()['NestedParticipant'] = NestedParticipant
globals()['PagingLinks'] = PagingLinks
class ProjectSummaryResponse(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'participants': ([NestedParticipant],), # noqa: E501
'total_samples': (int,), # noqa: E501
'participant_keys': ([str],), # noqa: E501
'sample_keys': ([str],), # noqa: E501
'sequence_keys': ([str],), # noqa: E501
'links': (PagingLinks,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'participants': 'participants', # noqa: E501
'total_samples': 'total_samples', # noqa: E501
'participant_keys': 'participant_keys', # noqa: E501
'sample_keys': 'sample_keys', # noqa: E501
'sequence_keys': 'sequence_keys', # noqa: E501
'links': '_links', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, participants, total_samples, participant_keys, sample_keys, sequence_keys, *args, **kwargs): # noqa: E501
"""ProjectSummaryResponse - a model defined in OpenAPI
Args:
participants ([NestedParticipant]):
total_samples (int):
participant_keys ([str]):
sample_keys ([str]):
sequence_keys ([str]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
links (PagingLinks): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.participants = participants
self.total_samples = total_samples
self.participant_keys = participant_keys
self.sample_keys = sample_keys
self.sequence_keys = sequence_keys
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, participants, total_samples, participant_keys, sample_keys, sequence_keys, *args, **kwargs): # noqa: E501
"""ProjectSummaryResponse - a model defined in OpenAPI
Args:
participants ([NestedParticipant]):
total_samples (int):
participant_keys ([str]):
sample_keys ([str]):
sequence_keys ([str]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
links (PagingLinks): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.participants = participants
self.total_samples = total_samples
self.participant_keys = participant_keys
self.sample_keys = sample_keys
self.sequence_keys = sequence_keys
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/project_summary_response.py | 0.548432 | 0.163279 | project_summary_response.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.nested_sequence import NestedSequence
from sample_metadata.model.sample_type import SampleType
globals()['NestedSequence'] = NestedSequence
globals()['SampleType'] = SampleType
class NestedSample(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'id': (str,), # noqa: E501
'external_id': (str,), # noqa: E501
'type': (SampleType,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'sequences': ([NestedSequence],), # noqa: E501
'created_date': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'external_id': 'external_id', # noqa: E501
'type': 'type', # noqa: E501
'meta': 'meta', # noqa: E501
'sequences': 'sequences', # noqa: E501
'created_date': 'created_date', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, id, external_id, type, meta, sequences, *args, **kwargs): # noqa: E501
"""NestedSample - a model defined in OpenAPI
Args:
id (str):
external_id (str):
type (SampleType):
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}):
sequences ([NestedSequence]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
created_date (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.external_id = external_id
self.type = type
self.meta = meta
self.sequences = sequences
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, id, external_id, type, meta, sequences, *args, **kwargs): # noqa: E501
"""NestedSample - a model defined in OpenAPI
Args:
id (str):
external_id (str):
type (SampleType):
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}):
sequences ([NestedSequence]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
created_date (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.external_id = external_id
self.type = type
self.meta = meta
self.sequences = sequences
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/nested_sample.py | 0.564939 | 0.172974 | nested_sample.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.validation_error import ValidationError
globals()['ValidationError'] = ValidationError
class HTTPValidationError(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'detail': ([ValidationError],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'detail': 'detail', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""HTTPValidationError - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
detail ([ValidationError]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""HTTPValidationError - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
detail ([ValidationError]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample-metadata-ms-0.7.1.tar.gz/sample-metadata-ms-0.7.1/sample_metadata/model/http_validation_error.py | 0.603465 | 0.207034 | http_validation_error.py | pypi |
# Sample Metadata
[](https://codecov.io/gh/populationgenomics/sample-metadata)
The sample-metadata system is database that stores **de-identified** metadata.
There are three components to the sample-metadata system:
- System-versioned MariaDB database,
- Python web API to manage permissions, and store frequently used queries,
- An installable python library that wraps the Python web API (using OpenAPI generator)
Every resource in sample-metadata belongs to a project. All resources are access
controlled through membership of the google groups:
`$dataset-sample-metadata-main-{read,write}`. Note that members of google-groups
are cached in a secret as group-membership identity checks are slow.
## API
There are two ways to query metamist in Python:
1. Use the REST interface with the predefined requests
2. Use the GraphQL interface.
To use the GraphQL interface in Python with the `sample_metadata` library, you can do the following:
```python
from sample_metadata.graphql import query
_query = """
query YourQueryNameHere($sampleId: String!) {
sample(id: $sampleId) {
id
externalId
}
}
"""
print(query(_query, {"sampleId": "CPG18"}))
```
## Structure

### Sample IDs
In an effort to reduce our dependency on potentially mutable external sample IDs with inconsistent format,
the sample-metadata server generates an internal sample id for every sample. Internally they're an
incrementing integer, but they're transformed externally to have a prefix, and checksum - this allows durability
when transcribing sample IDs to reduce mistypes, and allow to quickly check whether a sample ID is valid.
> NB: The prefix and checksums are modified per environment (production, development, local) to avoid duplicates from these environments.
For example, let's consider the production environment which uses the prefix of `CPG` and a checksum offset of 0:
> A sample is given the internal ID `12345`, we calculate the Luhn checksum to be `5` (with no offset applied).
> We can then concatenate the results, for the final sample ID to be `CPG123455`.
### Reporting sex
To avoid ambiguity in reporting of gender, sex and karyotype - the sample metadata system
stores these values separately on the `participant` as:
- `reported_gender` (string, expected `male` | `female` | _other values_)
- `reported_sex` (follows pedigree convention: `unknown=0 | null`, `male=1`, `female=2`)
- `inferred_karyotype` (string, eg: `XX` | `XY` | _other karyotypes_)
If you import a pedigree, the sex value is written to the `reported_sex` attribute.
## Local develompent of SM
The recommended way to develop the sample-metadata system is to run a local copy of SM.
> There have been some reported issues of running a local SM environment on an M1 mac.
You can run MariaDB with a locally installed docker, or from within a docker container.
You can configure the MariaDB connection with environment variables.
### Creating the environment
Dependencies for the `sample-metadata` API package are listed in `setup.py`.
Additional dev requirements are listed in `requirements-dev.txt`, and packages for
the sever-side code are listed in `requirements.txt`.
To create the full dev environment, run:
```shell
virtualenv venv
source venv/bin/activate
pip install -r requirements.txt
pip install -r requirements-dev.txt
pip install --editable .
```
### Default DB set-up
These are the default values for the SM database connection.
Please alter them if you use any different values when setting up the database.
```shell
export SM_DEV_DB_USER=root
export SM_DEV_DB_PASSWORD= # empty password
export SM_DEV_DB_HOST=127.0.0.1
export SM_DEV_DB_PORT=3306 # default mariadb port
```
Create the database in MariaDB (by default, we call it `sm_dev`):
If you use a different databse name also set the following
```shell
export SM_DEV_DB_NAME=sm_database_name
```
> Sample-metadata stores all metadata in one database (_previously: one database per project_).
```shell
mysql -u root --execute 'CREATE DATABASE sm_dev'
```
Download the `mariadb-java-client` and create the schema using liquibase:
```shell
pushd db/
wget https://repo1.maven.org/maven2/org/mariadb/jdbc/mariadb-java-client/3.0.3/mariadb-java-client-3.0.3.jar
liquibase \
--changeLogFile project.xml \
--url jdbc:mariadb://localhost/sm_dev \
--driver org.mariadb.jdbc.Driver \
--classpath mariadb-java-client-3.0.3.jar \
--username root \
update
popd
```
#### Using Maria DB docker image
Pull mariadb image
```bash
docker pull mariadb
```
Run a mariadb container that will server your database. `-p 3307:3306` remaps the port to 3307 in case if you local MySQL is already using 3306
```bash
docker stop mysql-p3307 # stop and remove if the container already exists
docker rm mysql-p3307
# run with an empty root password
docker run -p 3307:3306 --name mysql-p3307 -e MYSQL_ALLOW_EMPTY_PASSWORD=true -d mariadb
```
```bash
mysql --host=127.0.0.1 --port=3307 -u root -e 'CREATE DATABASE sm_dev;'
mysql --host=127.0.0.1 --port=3307 -u root -e 'show databases;'
```
Go into the `db/` subdirectory, download the `mariadb-java-client` and create the schema using liquibase:
```bash
pushd db/
wget https://repo1.maven.org/maven2/org/mariadb/jdbc/mariadb-java-client/3.0.3/mariadb-java-client-3.0.3.jar
liquibase \
--changeLogFile project.xml \
--url jdbc:mariadb://127.0.0.1:3307/sm_dev \
--driver org.mariadb.jdbc.Driver \
--classpath mariadb-java-client-3.0.3.jar \
--username root \
update
popd
```
Finally, make sure you configure the server (making use of the environment variables) to point it to your local Maria DB server
```bash
export SM_DEV_DB_PORT=3307
```
### Running the server
You'll want to set the following environment variables (permanently) in your
local development environment.
```shell
# ensures the SWAGGER page (localhost:8000/docs) points to your local environment
export SM_ENVIRONMENT=LOCAL
# skips permission checks in your local environment
export SM_ALLOWALLACCESS=true
# start the server
python3 -m api.server
# OR
# uvicorn --port 8000 --host 0.0.0.0 api.server:app
```
In a different terminal, execute the following
request to create a new project called 'dev'
```shell
curl -X 'PUT' \
'http://localhost:8000/api/v1/project/?name=dev&dataset=dev&gcp_id=dev&create_test_project=false' \
-H 'accept: application/json' \
-H "Authorization: Bearer $(gcloud auth print-identity-token)"
```
#### Quickstart: Generate and install the installable API
It's best to do this with an already running server:
```shell
python3 regenerate_api.py \
&& pip install .
```
#### Debugging the server in VSCode
VSCode allows you to debug python modules, we could debug the web API at `api/server.py` by considering the following `launch.json`:
```json
{
"version": "0.2.0",
"configurations": [
{
"name": "API server",
"type": "python",
"request": "launch",
"module": "api.server"
}
]
}
```
We could now place breakpoints on the sample route (ie: `api/routes/sample.py`), and debug requests as they come in.
#### Developing the UI
```shell
# Ensure you have started sm locally on your computer already, then in another tab open the UI.
# This will automatically proxy request to the server.
cd web
npm install
npm start
```
#### Unauthenticated access
You'll want to set the `SM_LOCALONLY_DEFAULTUSER` environment variable along with `ALLOWALLACCESS` to allow access to a local sample-metadata server without providing a bearer token. This will allow you to test the front-end components that access data. This happens automatically on the production instance through the Google identity-aware-proxy.
```shell
export SM_ALLOWALLACCESS=1
export SM_LOCALONLY_DEFAULTUSER=$(whoami)
```
### OpenAPI and Swagger
The Web API uses `apispec` with OpenAPI3 annotations on each route to describe interactions with the server. We can generate a swagger UI and an installable
python module based on these annotations.
Some handy links:
- [OpenAPI specification](https://swagger.io/specification/)
- [Describing parameters](https://swagger.io/docs/specification/describing-parameters/)
- [Describing request body](https://swagger.io/docs/specification/describing-request-body/)
- [Media types](https://swagger.io/docs/specification/media-types/)
The web API exposes this schema in two ways:
- Swagger UI: `http://localhost:8000/docs`
- You can use this to construct requests to the server
- Make sure you fill in the Bearer token (at the top right )
- OpenAPI schema: `http://localhost:8000/schema.json`
- Returns a JSON with the full OpenAPI 3 compliant schema.
- You could put this into the [Swagger editor](https://editor.swagger.io/) to see the same "Swagger UI" that `/api/docs` exposes.
- We generate the sample_metadata installable Python API based on this schema.
#### Generating the installable API
The installable API is automatically generated through the `package.yml` GitHub action and uploaded to PyPI.
To generate the python api you'll need to install openapi generator v5.x.x
To install a specific version of the openapi-generator dow the following:
```bash
npm install @openapitools/openapi-generator-cli -g
openapi-generator-cli version-manager set 5.3.0
```
Then set your environment variable OPENAPI_COMMAND to the following.
You can also add an alias to your ~/.bash_profile or equivalent for running in the
terminal.
```bash
export OPENAPI_COMMAND="npx @openapitools/openapi-generator-cli"
alias openapi-generator="npx @openapitools/openapi-generator-cli"
```
You could generate the installable API and install it with pip by running:
```bash
# this will start the api.server, so make sure you have the dependencies installed,
python regenerate_api.py \
&& pip install .
```
Or you can build the docker file, and specify that
```bash
# SM_DOCKER is a known env variable to regenerate_api.py
export SM_DOCKER="cpg/sample-metadata-server:dev"
docker build --build-arg SM_ENVIRONMENT=local -t $SM_DOCKER -f deploy/api/Dockerfile .
python regenerate_apy.py
```
## Deployment
The sample-metadata server
You'll want to complete the following steps:
- Ensure there is a database created for each project (with the database name being the project),
- Ensure there are secrets in `projects/sample_metadata/secrets/databases/versions/latest`, that's an array of objects with keys `dbname, host, port, username, password`.
- Ensure `google-cloud` was installed
```bash
export SM_ENVIRONMENT='PRODUCTION'
# OR, point to the dev instance with
export SM_ENVIRONMENT='DEVELOPMENT'
```
| /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/README.md | 0.440469 | 0.89684 | README.md | pypi |
class OpenApiException(Exception):
"""The base exception class for all OpenAPIExceptions"""
class ApiTypeError(OpenApiException, TypeError):
def __init__(self, msg, path_to_item=None, valid_classes=None,
key_type=None):
""" Raises an exception for TypeErrors
Args:
msg (str): the exception message
Keyword Args:
path_to_item (list): a list of keys an indices to get to the
current_item
None if unset
valid_classes (tuple): the primitive classes that current item
should be an instance of
None if unset
key_type (bool): False if our value is a value in a dict
True if it is a key in a dict
False if our item is an item in a list
None if unset
"""
self.path_to_item = path_to_item
self.valid_classes = valid_classes
self.key_type = key_type
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiTypeError, self).__init__(full_msg)
class ApiValueError(OpenApiException, ValueError):
def __init__(self, msg, path_to_item=None):
"""
Args:
msg (str): the exception message
Keyword Args:
path_to_item (list) the path to the exception in the
received_data dict. None if unset
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiValueError, self).__init__(full_msg)
class ApiAttributeError(OpenApiException, AttributeError):
def __init__(self, msg, path_to_item=None):
"""
Raised when an attribute reference or assignment fails.
Args:
msg (str): the exception message
Keyword Args:
path_to_item (None/list) the path to the exception in the
received_data dict
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiAttributeError, self).__init__(full_msg)
class ApiKeyError(OpenApiException, KeyError):
def __init__(self, msg, path_to_item=None):
"""
Args:
msg (str): the exception message
Keyword Args:
path_to_item (None/list) the path to the exception in the
received_data dict
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiKeyError, self).__init__(full_msg)
class ApiException(OpenApiException):
def __init__(self, status=None, reason=None, http_resp=None):
if http_resp:
self.status = http_resp.status
self.reason = http_resp.reason
self.body = http_resp.data
self.headers = http_resp.getheaders()
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
def __str__(self):
"""Custom error messages for exception"""
error_message = "({0})\n"\
"Reason: {1}\n".format(self.status, self.reason)
if self.headers:
error_message += "HTTP response headers: {0}\n".format(
self.headers)
if self.body:
error_message += "HTTP response body: {0}\n".format(self.body)
return error_message
class NotFoundException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(NotFoundException, self).__init__(status, reason, http_resp)
class UnauthorizedException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(UnauthorizedException, self).__init__(status, reason, http_resp)
class ForbiddenException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(ForbiddenException, self).__init__(status, reason, http_resp)
class ServiceException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(ServiceException, self).__init__(status, reason, http_resp)
def render_path(path_to_item):
"""Returns a string representation of a path"""
result = ""
for pth in path_to_item:
if isinstance(pth, int):
result += "[{0}]".format(pth)
else:
result += "['{0}']".format(pth)
return result | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/exceptions.py | 0.769514 | 0.279315 | exceptions.py | pypi |
import asyncio
import logging
import re
import shlex
from functools import reduce
from io import StringIO
from typing import Dict, List, Optional, Any, Union
import click
from sample_metadata.model.sample_type import SampleType
from sample_metadata.model.sequence_status import SequenceStatus
from sample_metadata.model.sequence_technology import SequenceTechnology
from sample_metadata.model.sequence_type import SequenceType
from sample_metadata.parser.generic_parser import (
GenericParser,
GroupedRow,
ParticipantMetaGroup,
SampleMetaGroup,
SequenceMetaGroup,
SingleRow,
run_as_sync,
group_by,
) # noqa
__DOC = """
Parse CSV / TSV manifest of arbitrary format.
This script allows you to specify HOW you want the manifest
to be mapped onto individual data.
This script loads the WHOLE file into memory
It groups rows by the sample ID, and collapses metadata from rows.
EG:
Sample ID sample-collection-date depth qc_quality Fastqs
<sample-id> 2021-09-16 30x 0.997 <sample-id>.filename-R1.fastq.gz,<sample-id>.filename-R2.fastq.gz
# OR
<sample-id2> 2021-09-16 30x 0.997 <sample-id2>.filename-R1.fastq.gz
<sample-id2> 2021-09-16 30x 0.997 <sample-id2>.filename-R2.fastq.gz
Given the files are in a bucket called 'gs://cpg-upload-bucket/collaborator',
and we want to achieve the following:
- Import this manifest into the "$dataset" project of SM
- Map the following to `sample.meta`:
- "sample-collection-date" -> "collection_date"
- Map the following to `sequence.meta`:
- "depth" -> "depth"
- "qc_quality" -> "qc.quality" (ie: {"qc": {"quality": 0.997}})
- Add a qc analysis object with the following mapped `analysis.meta`:
- "qc_quality" -> "quality"
python parse_generic_metadata.py \
--project $dataset \
--sample-name-column "Sample ID" \
--reads-column "Fastqs" \
--sample-meta-field-map "sample-collection-date" "collection_date" \
--sequence-meta-field "depth" \
--sequence-meta-field-map "qc_quality" "qc.quality" \
--qc-meta-field-map "qc_quality" "quality" \
--search-path "gs://cpg-upload-bucket/collaborator" \
<manifest-path>
"""
logger = logging.getLogger(__file__)
logger.addHandler(logging.StreamHandler())
logger.setLevel(logging.INFO)
RE_FILENAME_SPLITTER = re.compile('[,;]')
class GenericMetadataParser(GenericParser):
"""Parser for GenericMetadataParser"""
def __init__(
self,
search_locations: List[str],
participant_meta_map: Dict[str, str],
sample_meta_map: Dict[str, str],
sequence_meta_map: Dict[str, str],
qc_meta_map: Dict[str, str],
project: str,
sample_name_column: str,
participant_column: Optional[str] = None,
sequence_id_column: Optional[str] = None,
reported_sex_column: Optional[str] = None,
reported_gender_column: Optional[str] = None,
karyotype_column: Optional[str] = None,
reads_column: Optional[str] = None,
checksum_column: Optional[str] = None,
seq_type_column: Optional[str] = None,
seq_technology_column: Optional[str] = None,
gvcf_column: Optional[str] = None,
meta_column: Optional[str] = None,
seq_meta_column: Optional[str] = None,
batch_number: Optional[str] = None,
reference_assembly_location_column: Optional[str] = None,
default_reference_assembly_location: Optional[str] = None,
default_sequence_type='genome',
default_sequence_status='uploaded',
default_sample_type='blood',
default_sequence_technology='short-read',
allow_extra_files_in_search_path=False,
**kwargs,
):
super().__init__(
path_prefix=None,
search_paths=search_locations,
project=project,
default_sequence_type=default_sequence_type,
default_sequence_status=default_sequence_status,
default_sample_type=default_sample_type,
default_sequence_technology=default_sequence_technology,
**kwargs,
)
if not sample_name_column:
raise ValueError('A sample name column MUST be provided')
self.cpg_id_column = 'Internal CPG Sample ID'
self.sample_name_column = sample_name_column
self.participant_column = participant_column
self.sequence_id_column = sequence_id_column
self.reported_sex_column = reported_sex_column
self.reported_gender_column = reported_gender_column
self.karyotype_column = karyotype_column
self.seq_type_column = seq_type_column
self.seq_technology_column = seq_technology_column
self.reference_assembly_location_column = reference_assembly_location_column
self.default_reference_assembly_location = default_reference_assembly_location
self.participant_meta_map = participant_meta_map or {}
self.sample_meta_map = sample_meta_map or {}
self.sequence_meta_map = sequence_meta_map or {}
self.qc_meta_map = qc_meta_map or {}
self.reads_column = reads_column
self.checksum_column = checksum_column
self.gvcf_column = gvcf_column
self.meta_column = meta_column
self.seq_meta_column = seq_meta_column
self.allow_extra_files_in_search_path = allow_extra_files_in_search_path
self.batch_number = batch_number
def get_sample_id(self, row: SingleRow) -> Optional[str]:
"""Get external sample ID from row"""
return row.get(self.sample_name_column, None)
async def get_cpg_sample_id_from_row(self, row: SingleRow) -> Optional[str]:
"""Get internal cpg id from a row using get_sample_id and an api call"""
return row.get(self.cpg_id_column, None)
def get_sample_type(self, row: GroupedRow) -> SampleType:
"""Get sample type from row"""
return SampleType(self.default_sample_type)
def get_sequence_types(self, row: GroupedRow) -> List[SequenceType]:
"""
Get sequence types from grouped row
if SingleRow: return sequence type
if GroupedRow: return sequence types for all rows
"""
if isinstance(row, dict):
return [self.get_sequence_type(row)]
return [
SequenceType(r.get(self.seq_type_column, self.default_sequence_type))
for r in row
]
def get_sequence_technologies(self, row: GroupedRow) -> list[SequenceTechnology]:
"""Get list of sequence technologies for rows"""
if isinstance(row, dict):
return [self.get_sequence_technology(row)]
return [self.get_sequence_technology(r) for r in row]
def get_sequence_technology(self, row: SingleRow) -> SequenceTechnology:
"""Get sequence technology for single row"""
value = row.get(self.seq_technology_column) or self.default_sequence_technology
value = value.lower()
if value == 'ont':
value = 'long-read'
return SequenceTechnology(value)
def get_sequence_type(self, row: SingleRow) -> SequenceType:
"""Get sequence type from row"""
value = row.get(self.seq_type_column, None) or self.default_sequence_type
value = value.lower()
if value == 'wgs':
value = 'genome'
elif value == 'wes':
value = 'exome'
elif 'mt' in value:
value = 'mtseq'
return SequenceType(value)
def get_sequence_status(self, row: GroupedRow) -> SequenceStatus:
"""Get sequence status from row"""
return SequenceStatus(self.default_sequence_status)
def get_sequence_id(self, row: GroupedRow) -> Optional[dict[str, str]]:
"""Get external sequence ID from row. Needs to be implemented per parser.
NOTE: To be re-thought after sequence group changes are applied"""
return None
def get_participant_id(self, row: SingleRow) -> Optional[str]:
"""Get external participant ID from row"""
if not self.participant_column or self.participant_column not in row:
raise ValueError('Participant column does not exist')
return row[self.participant_column]
def get_reported_sex(self, row: GroupedRow) -> Optional[int]:
"""Get reported sex from grouped row"""
if not self.reported_sex_column:
return None
reported_sex = row[0].get(self.reported_sex_column, None)
if reported_sex is None:
return None
if reported_sex == '':
return None
if reported_sex.lower() == 'female':
return 2
if reported_sex.lower() == 'male':
return 1
raise ValueError(
f'{reported_sex} could not be identified as an input for reported_sex'
)
def get_reported_gender(self, row: GroupedRow) -> Optional[str]:
"""Get reported gender from grouped row"""
return row[0].get(self.reported_gender_column, None)
def get_karyotype(self, row: GroupedRow) -> Optional[str]:
"""Get karyotype from grouped row"""
return row[0].get(self.karyotype_column, None)
def has_participants(self, file_pointer, delimiter: str) -> bool:
"""Returns True if the file has a Participants column"""
try:
reader = self._get_dict_reader(file_pointer, delimiter=delimiter)
first_line = next(reader)
has_participants = self.participant_column in first_line
file_pointer.seek(0)
return has_participants
except StopIteration as e:
raise ValueError(
f'The manifest file appears empty. Check that header metadata is enclosed by double quotes. {e}'
) from e
async def validate_participant_map(
self, participant_map: Dict[Any, Dict[str, List[Dict[str, Any]]]]
):
await super().validate_participant_map(participant_map)
if not self.reads_column:
return
ungrouped_rows: List[Dict[str, Any]] = []
for sample_map in participant_map.values():
for row in sample_map.values():
if isinstance(row, list):
ungrouped_rows.extend(row)
elif isinstance(row, dict):
ungrouped_rows.append(row)
else:
raise ValueError(f'Unexpected type {type(row)} {row}')
errors = []
errors.extend(await self.check_files_covered_by_rows(ungrouped_rows))
if errors:
raise ValueError(', '.join(errors))
async def validate_sample_map(self, sample_map: Dict[str, List[Dict[str, Any]]]):
await super().validate_sample_map(sample_map)
if not self.reads_column:
return
ungrouped_rows: List[Dict[str, Any]] = []
for row in sample_map.values():
if isinstance(row, list):
ungrouped_rows.extend(row)
elif isinstance(row, dict):
ungrouped_rows.append(row)
else:
raise ValueError(f'Unexpected type {type(row)} {row}')
errors = []
errors.extend(await self.check_files_covered_by_rows(ungrouped_rows))
if errors:
raise ValueError(', '.join(errors))
@staticmethod
def flatten_irregular_list(irregular_list):
"""
Flatten an irregular list: [1, [2, 3], 4]
>>> GenericMetadataParser.flatten_irregular_list([1, [2, 3], [4,5]])
[1, 2, 3, 4, 5]
"""
return (
[
element
for item in irregular_list
for element in GenericMetadataParser.flatten_irregular_list(item)
]
if isinstance(irregular_list, list)
else [irregular_list]
)
async def get_all_files_from_row(self, sample_id: str, row):
"""Get all files from row, to allow subparsers to include other files"""
fns = await self.get_read_filenames(sample_id, row)
return self.flatten_irregular_list(fns)
async def check_files_covered_by_rows(
self, rows: List[Dict[str, Any]]
) -> List[str]:
"""
Check that the files in the search_paths are completely covered by the sample_map
"""
filename_promises = []
for grp in rows:
for r in grp if isinstance(grp, list) else [grp]:
filename_promises.append(
self.get_all_files_from_row(self.get_sample_id(r), r)
)
files_from_rows: List[str] = sum(await asyncio.gather(*filename_promises), [])
filenames_from_rows = set(f.strip() for f in files_from_rows if f and f.strip())
relevant_extensions = ('.cram', '.fastq.gz', '.bam')
def filename_filter(f):
return any(f.endswith(ext) for ext in relevant_extensions)
file_from_search_paths = set(filter(filename_filter, self.filename_map.keys()))
files_in_search_path_not_in_map = file_from_search_paths - filenames_from_rows
missing_files = filenames_from_rows - file_from_search_paths
errors = []
if missing_files:
errors.append(
'There are files specified in the map, but not found in '
f'the search paths: {", ".join(missing_files)}'
)
if files_in_search_path_not_in_map:
m = (
'There are files in the search path that are NOT covered by the file map: '
f'{", ".join(files_in_search_path_not_in_map)}'
)
if self.allow_extra_files_in_search_path:
logger.warning(m)
else:
errors.append(m)
return errors
@staticmethod
def merge_dicts(a: Dict, b: Dict):
"""
Recursively merge two dictionaries:
- collapse equal values
- put differing values into a list (not guaranteeing order)
"""
if b is None:
return a
if a is None:
return b
res = {}
for key in set(a.keys()).union(b.keys()):
a_val = a.get(key)
b_val = b.get(key)
if a_val is not None and b_val is not None:
# combine values
a_is_dict = isinstance(a_val, dict)
b_is_dict = isinstance(b_val, dict)
if a_is_dict and b_is_dict:
# merge dict
res[key] = GenericMetadataParser.merge_dicts(a_val, b_val)
elif a_val == b_val:
res[key] = a_val
else:
res[key] = [a_val, b_val]
else:
res[key] = a_val or b_val
return res
@staticmethod
def collapse_arbitrary_meta(key_map: Dict[str, str], row: GroupedRow):
"""
This is a little bit tricky
>>> GenericMetadataParser.collapse_arbitrary_meta({'key1': 'new_key'}, {'key1': True})
{'new_key': True}
>>> GenericMetadataParser.collapse_arbitrary_meta({'key1': 'new_key'}, [{'key1': True}, {'key1': True}])
{'new_key': True}
>>> GenericMetadataParser.collapse_arbitrary_meta({'key1': 'new_key'}, [{'key1': True}, {'key1': None}])
{'new_key': True}
>>> GenericMetadataParser.collapse_arbitrary_meta({'key1': 'new_key'}, [{'key1': True}])
{'new_key': True}
>>> GenericMetadataParser.collapse_arbitrary_meta({'key1': 'new.key'}, [{'key1': True}])
{'new': {'key': True}}
>>> GenericMetadataParser.collapse_arbitrary_meta({'key1': 'new.key'}, [{'key1': 1}, {'key1': 2}, {'key1': 3}])
{'new': {'key': [1, 2, 3]}}
# multiple keys sometimes is ordered, so check the sorted(dict.items())
>>> import json; json.dumps(GenericMetadataParser.collapse_arbitrary_meta({'key1': 'new.key', 'key2': 'new.another'}, [{'key1': 1}, {'key1': 2}, {'key2': False}]), sort_keys=True)
'{"new": {"another": false, "key": [1, 2]}}'
"""
if not key_map or not row:
return {}
def prepare_dict_from_keys(key_parts: List[str], val):
"""Recursive production of dictionary"""
if len(key_parts) == 1:
return {key_parts[0]: val}
return {key_parts[0]: prepare_dict_from_keys(key_parts[1:], val)}
dicts = []
for row_key, dict_key in key_map.items():
if isinstance(row, list):
inner_values = [r[row_key] for r in row if r.get(row_key) is not None]
if any(isinstance(inner, list) for inner in inner_values):
# lists are unhashable
value = inner_values
else:
value = list(set(inner_values))
if len(value) == 0:
continue
if len(value) == 1:
value = value[0]
else:
if row_key not in row:
continue
value = row[row_key]
dicts.append(prepare_dict_from_keys(dict_key.split('.'), value))
return reduce(GenericMetadataParser.merge_dicts, dicts)
@staticmethod
def process_filename_value(string: Union[str, List[str]]) -> List[str]:
"""
Split on multiple delimiters, ;,
>>> GenericMetadataParser.process_filename_value('Filename1-fastq.gz;Filename2.fastq.gz')
['Filename1-fastq.gz', 'Filename2.fastq.gz']
>>> GenericMetadataParser.process_filename_value('Filename1-fastq.gz, Filename2.fastq.gz')
['Filename1-fastq.gz', 'Filename2.fastq.gz']
>>> GenericMetadataParser.process_filename_value('Filename1 with spaces fastq.gz')
['Filename1 with spaces fastq.gz']
>>> GenericMetadataParser.process_filename_value(['filename ;filename2, filename3', ' filename4'])
['filename', 'filename2', 'filename3', 'filename4']
"""
if not string:
return []
if isinstance(string, list):
return sorted(
set(
r
for f in string
for r in GenericMetadataParser.process_filename_value(f)
)
)
filenames = [f.strip() for f in RE_FILENAME_SPLITTER.split(string)]
filenames = [f for f in filenames if f]
whitespace_filenames = [f for f in filenames if ' ' in f]
if whitespace_filenames:
logger.warning(
'Whitespace detected in filenames: '
+ ','.join(shlex.quote(str(s)) for s in whitespace_filenames)
)
return filenames
async def get_read_filenames(
self, sample_id: Optional[str], row: SingleRow
) -> List[str]:
"""Get paths to reads from a row"""
if not self.reads_column or self.reads_column not in row:
return []
# more post processing
return self.process_filename_value(row[self.reads_column])
async def get_checksums_from_row(
self, sample_id: Optional[str], row: SingleRow, read_filenames: List[str]
) -> Optional[List[Optional[str]]]:
"""
Get checksums for some row, you must either return:
- no elements, or
- number of elements equal to read_filenames
Each element should be a string or None.
"""
if not self.checksum_column or self.checksum_column not in row:
return []
return self.process_filename_value(row[self.checksum_column])
async def get_gvcf_filenames(self, sample_id: str, row: GroupedRow) -> List[str]:
"""Get paths to gvcfs from a row"""
if not self.gvcf_column:
return []
gvcf_filenames: List[str] = []
for r in row if isinstance(row, list) else [row]:
if self.gvcf_column in r:
gvcf_filenames.extend(self.process_filename_value(r[self.gvcf_column]))
return gvcf_filenames
async def get_grouped_sample_meta(self, rows: GroupedRow) -> List[SampleMetaGroup]:
"""Return list of grouped by sample metadata from the rows"""
sample_metadata = []
for sid, row_group in group_by(rows, self.get_sample_id).items():
sample_group = SampleMetaGroup(sample_id=sid, rows=row_group, meta=None)
sample_metadata.append(await self.get_sample_meta(sample_group))
return sample_metadata
async def get_sample_meta(self, sample_group: SampleMetaGroup) -> SampleMetaGroup:
"""Get sample-metadata from row"""
rows = sample_group.rows
meta = self.collapse_arbitrary_meta(self.sample_meta_map, rows)
sample_group.meta = meta
return sample_group
async def get_participant_meta(
self, participant_id: int, rows: GroupedRow
) -> ParticipantMetaGroup:
"""Get participant-metadata from rows then set it in the ParticipantMetaGroup"""
meta = self.collapse_arbitrary_meta(self.participant_meta_map, rows)
return ParticipantMetaGroup(participant_id=participant_id, rows=rows, meta=meta)
async def get_grouped_sequence_meta(
self,
sample_id: str,
rows: GroupedRow,
) -> List[SequenceMetaGroup]:
"""
Takes a collection of SingleRows and groups them by sequence type
For each sequence type, get_sequence_meta for that group and return the
resulting list of metadata
"""
sequence_meta = []
def _seq_grouper(s):
return str(self.get_sequence_type(s)), str(self.get_sequence_technology(s))
for (stype, stech), row_group in group_by(rows, _seq_grouper).items():
seq_group = SequenceMetaGroup(
rows=list(row_group),
sequence_type=SequenceType(stype),
sequence_technology=SequenceTechnology(stech),
)
sequence_meta.append(await self.get_sequence_meta(seq_group, sample_id))
return sequence_meta
async def get_sequence_meta(
self,
seq_group: SequenceMetaGroup,
sample_id: Optional[str] = None,
) -> SequenceMetaGroup:
"""Get sequence-metadata from row"""
rows = seq_group.rows
collapsed_sequence_meta = self.collapse_arbitrary_meta(
self.sequence_meta_map, rows
)
read_filenames: List[str] = []
gvcf_filenames: List[str] = []
read_checksums: List[str] = []
reference_assemblies: set[str] = set()
for r in rows:
_rfilenames = await self.get_read_filenames(sample_id=sample_id, row=r)
read_filenames.extend(_rfilenames)
if self.checksum_column and self.checksum_column in r:
checksums = await self.get_checksums_from_row(sample_id, r, _rfilenames)
if not checksums:
checksums = [None] * len(_rfilenames)
read_checksums.extend(checksums)
if self.gvcf_column and self.gvcf_column in r:
gvcf_filenames.extend(self.process_filename_value(r[self.gvcf_column]))
if self.reference_assembly_location_column:
ref = r.get(self.reference_assembly_location_column)
if ref:
reference_assemblies.add(ref)
# strip in case collaborator put "file1, file2"
full_read_filenames: List[str] = []
full_gvcf_filenames: List[str] = []
if read_filenames:
full_read_filenames.extend(
self.file_path(f.strip()) for f in read_filenames if f.strip()
)
if gvcf_filenames:
full_gvcf_filenames.extend(
self.file_path(f.strip()) for f in gvcf_filenames if f.strip()
)
if not sample_id:
sample_id = await self.get_cpg_sample_id_from_row(rows[0])
read_file_types: Dict[str, Dict[str, List]] = await self.parse_files(
sample_id, full_read_filenames, read_checksums
)
variant_file_types: Dict[str, Dict[str, List]] = await self.parse_files(
sample_id, full_gvcf_filenames, None
)
reads: Dict[str, List] = read_file_types.get('reads')
variants: Dict[str, List] = variant_file_types.get('variants')
if reads:
keys = list(reads.keys())
if len(keys) > 1:
# 2021-12-14 mfranklin: In future we should return multiple
# sequence meta, and handle that in the generic parser
raise ValueError(
f'Multiple types of reads found ({", ".join(keys)}), currently not supported'
)
reads_type = keys[0]
collapsed_sequence_meta['reads_type'] = reads_type
collapsed_sequence_meta['reads'] = reads[reads_type]
if reads_type == 'cram':
if len(reference_assemblies) > 1:
# sorted for consistent testing
str_ref_assemblies = ', '.join(sorted(reference_assemblies))
raise ValueError(
f'Multiple reference assemblies were defined for {sample_id}: {str_ref_assemblies}'
)
if len(reference_assemblies) == 1:
ref = next(iter(reference_assemblies))
else:
ref = self.default_reference_assembly_location
if not ref:
raise ValueError(
f'Reads type for {sample_id!r} is CRAM, but a reference is '
f'not defined, please set the default reference assembly path'
)
ref_fp = self.file_path(ref)
secondary_files = (
await self.create_secondary_file_objects_by_potential_pattern(
ref_fp, ['.fai']
)
)
cram_reference = await self.create_file_object(
ref_fp, secondary_files=secondary_files
)
collapsed_sequence_meta['reference_assembly'] = cram_reference
if variants:
if 'gvcf' in variants:
collapsed_sequence_meta['gvcfs'] = variants.get('gvcf')
collapsed_sequence_meta['gvcf_types'] = 'gvcf'
if 'vcf' in variants:
collapsed_sequence_meta['vcfs'] = variants['vcf']
collapsed_sequence_meta['vcf_type'] = 'vcf'
if self.batch_number is not None:
collapsed_sequence_meta['batch'] = self.batch_number
seq_group.meta = collapsed_sequence_meta
return seq_group
async def get_qc_meta(
self, sample_id: str, row: GroupedRow
) -> Optional[Dict[str, Any]]:
"""Get collapsed qc meta"""
if not self.qc_meta_map:
return None
return self.collapse_arbitrary_meta(self.qc_meta_map, row)
async def from_manifest_path(
self,
manifest: str,
confirm=False,
delimiter=None,
dry_run=False,
):
"""Parse manifest from path, and return result of parsing manifest"""
file = self.file_path(manifest)
_delimiter = delimiter or GenericMetadataParser.guess_delimiter_from_filename(
file
)
file_contents = await self.file_contents(file)
return await self.parse_manifest(
StringIO(file_contents),
delimiter=_delimiter,
confirm=confirm,
dry_run=dry_run,
)
@click.command(help=__DOC)
@click.option(
'--project',
required=True,
help='The sample-metadata project ($DATASET) to import manifest into',
)
@click.option('--sample-name-column', required=True)
@click.option(
'--participant-column',
help='Column where the external participant id is held',
)
@click.option(
'--reads-column',
help='Column where the reads information is held, comma-separated if multiple',
)
@click.option(
'--gvcf-column',
help='Column where the reads information is held, comma-separated if multiple',
)
@click.option(
'--reported-sex-column',
help='Column where the reported sex is held',
)
@click.option(
'--reported-gender-column',
help='Column where the reported gender is held',
)
@click.option(
'--karyotype-column',
help='Column where the karyotype is held',
)
@click.option(
'--qc-meta-field-map',
nargs=2,
multiple=True,
help='Two arguments per listing, eg: --qc-meta-field "name-in-manifest" "name-in-analysis.meta"',
)
@click.option(
'--participant-meta-field',
multiple=True,
help='Single argument, key to pull out of row to put in participant.meta',
)
@click.option(
'--participant-meta-field-map',
nargs=2,
multiple=True,
help='Two arguments per listing, eg: --participant-meta-field-map "name-in-manifest" "name-in-participant.meta"',
)
@click.option(
'--sample-meta-field',
multiple=True,
help='Single argument, key to pull out of row to put in sample.meta',
)
@click.option(
'--sample-meta-field-map',
nargs=2,
multiple=True,
help='Two arguments per listing, eg: --sample-meta-field-map "name-in-manifest" "name-in-sample.meta"',
)
@click.option(
'--sequence-meta-field',
multiple=True,
help='Single argument, key to pull out of row to put in sample.meta',
)
@click.option(
'--sequence-meta-field-map',
nargs=2,
multiple=True,
help='Two arguments per listing, eg: --sequence-meta-field "name-in-manifest" "name-in-sequence.meta"',
)
@click.option('--default-sample-type', default='blood')
@click.option('--default-sequence-type', default='wgs')
@click.option(
'--confirm', is_flag=True, help='Confirm with user input before updating server'
)
@click.option('--search-path', multiple=True, required=True)
@click.argument('manifests', nargs=-1)
@run_as_sync
async def main(
manifests,
search_path: list[str],
project,
sample_name_column: str,
participant_meta_field: list[str],
participant_meta_field_map: list[tuple[str, str]],
sample_meta_field: list[str],
sample_meta_field_map: list[tuple[str, str]],
sequence_meta_field: list[str],
sequence_meta_field_map: list[tuple[str, str]],
qc_meta_field_map: list[tuple[str, str]] = None,
reads_column: Optional[str] = None,
gvcf_column: Optional[str] = None,
participant_column: Optional[str] = None,
reported_sex_column: Optional[str] = None,
reported_gender_column: Optional[str] = None,
karyotype_column: Optional[str] = None,
default_sample_type='blood',
default_sequence_type='wgs',
confirm=False,
):
"""Run script from CLI arguments"""
if not manifests:
raise ValueError('Expected at least 1 manifest')
extra_seach_paths = [m for m in manifests if m.startswith('gs://')]
if extra_seach_paths:
search_path = list(set(search_path).union(set(extra_seach_paths)))
participant_meta_map: Dict[Any, Any] = {}
sample_meta_map: Dict[Any, Any] = {}
sequence_meta_map: Dict[Any, Any] = {}
qc_meta_map = dict(qc_meta_field_map or {})
if participant_meta_field_map:
participant_meta_map.update(dict(participant_meta_map))
if participant_meta_field:
participant_meta_map.update({k: k for k in participant_meta_field})
if sample_meta_field_map:
sample_meta_map.update(dict(sample_meta_field_map))
if sample_meta_field:
sample_meta_map.update({k: k for k in sample_meta_field})
if sequence_meta_field_map:
sequence_meta_map.update(dict(sequence_meta_field_map))
if sequence_meta_field:
sequence_meta_map.update({k: k for k in sequence_meta_field})
parser = GenericMetadataParser(
project=project,
sample_name_column=sample_name_column,
participant_column=participant_column,
participant_meta_map=participant_meta_map,
sample_meta_map=sample_meta_map,
sequence_meta_map=sequence_meta_map,
qc_meta_map=qc_meta_map,
reads_column=reads_column,
gvcf_column=gvcf_column,
reported_sex_column=reported_sex_column,
reported_gender_column=reported_gender_column,
karyotype_column=karyotype_column,
default_sample_type=default_sample_type,
default_sequence_type=default_sequence_type,
search_locations=search_path,
)
for manifest in manifests:
logger.info(f'Importing {manifest}')
await parser.from_manifest_path(manifest=manifest, confirm=confirm)
if __name__ == '__main__':
# pylint: disable=no-value-for-parameter
main() | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/parser/generic_metadata_parser.py | 0.839816 | 0.191026 | generic_metadata_parser.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class PagingLinks(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'_self': (str,), # noqa: E501
'next': (str,), # noqa: E501
'token': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'_self': 'self', # noqa: E501
'next': 'next', # noqa: E501
'token': 'token', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, _self, *args, **kwargs): # noqa: E501
"""PagingLinks - a model defined in OpenAPI
Args:
_self (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
next (str): [optional] # noqa: E501
token (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self._self = _self
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, _self, *args, **kwargs): # noqa: E501
"""PagingLinks - a model defined in OpenAPI
Args:
_self (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
next (str): [optional] # noqa: E501
token (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self._self = _self
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/paging_links.py | 0.566978 | 0.193471 | paging_links.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class SequenceStatus(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'RECEIVED': "received",
'SENT-TO-SEQUENCING': "sent-to-sequencing",
'COMPLETED-SEQUENCING': "completed-sequencing",
'COMPLETED-QC': "completed-qc",
'FAILED-QC': "failed-qc",
'UPLOADED': "uploaded",
'UNKNOWN': "unknown",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""SequenceStatus - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Status of sequencing., must be one of ["received", "sent-to-sequencing", "completed-sequencing", "completed-qc", "failed-qc", "uploaded", "unknown", ] # noqa: E501
Keyword Args:
value (str): Status of sequencing., must be one of ["received", "sent-to-sequencing", "completed-sequencing", "completed-qc", "failed-qc", "uploaded", "unknown", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""SequenceStatus - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Status of sequencing., must be one of ["received", "sent-to-sequencing", "completed-sequencing", "completed-qc", "failed-qc", "uploaded", "unknown", ] # noqa: E501
Keyword Args:
value (str): Status of sequencing., must be one of ["received", "sent-to-sequencing", "completed-sequencing", "completed-qc", "failed-qc", "uploaded", "unknown", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/sequence_status.py | 0.553626 | 0.236098 | sequence_status.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sample_type import SampleType
globals()['SampleType'] = SampleType
class NewSample(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'external_id': (str,), # noqa: E501
'type': (SampleType,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'participant_id': (int,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'external_id': 'external_id', # noqa: E501
'type': 'type', # noqa: E501
'meta': 'meta', # noqa: E501
'participant_id': 'participant_id', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, external_id, type, *args, **kwargs): # noqa: E501
"""NewSample - a model defined in OpenAPI
Args:
external_id (str):
type (SampleType):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] if omitted the server will use the default value of {} # noqa: E501
participant_id (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.external_id = external_id
self.type = type
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, external_id, type, *args, **kwargs): # noqa: E501
"""NewSample - a model defined in OpenAPI
Args:
external_id (str):
type (SampleType):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] if omitted the server will use the default value of {} # noqa: E501
participant_id (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.external_id = external_id
self.type = type
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/new_sample.py | 0.593256 | 0.168823 | new_sample.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class SampleType(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'BLOOD': "blood",
'SALIVA': "saliva",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""SampleType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Enum describing types of physical samples., must be one of ["blood", "saliva", ] # noqa: E501
Keyword Args:
value (str): Enum describing types of physical samples., must be one of ["blood", "saliva", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""SampleType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Enum describing types of physical samples., must be one of ["blood", "saliva", ] # noqa: E501
Keyword Args:
value (str): Enum describing types of physical samples., must be one of ["blood", "saliva", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/sample_type.py | 0.565419 | 0.23895 | sample_type.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class SequenceType(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'GENOME': "genome",
'EXOME': "exome",
'TRANSCRIPTOME': "transcriptome",
'CHIP': "chip",
'MTSEQ': "mtseq",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""SequenceType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Type of sequencing., must be one of ["genome", "exome", "transcriptome", "chip", "mtseq", ] # noqa: E501
Keyword Args:
value (str): Type of sequencing., must be one of ["genome", "exome", "transcriptome", "chip", "mtseq", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""SequenceType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Type of sequencing., must be one of ["genome", "exome", "transcriptome", "chip", "mtseq", ] # noqa: E501
Keyword Args:
value (str): Type of sequencing., must be one of ["genome", "exome", "transcriptome", "chip", "mtseq", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/sequence_type.py | 0.597373 | 0.229449 | sequence_type.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class MetaSearchEntityPrefix(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'P': "p",
'S': "s",
'SQ': "sq",
'F': "f",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""MetaSearchEntityPrefix - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Links prefixes to tables., must be one of ["p", "s", "sq", "f", ] # noqa: E501
Keyword Args:
value (str): Links prefixes to tables., must be one of ["p", "s", "sq", "f", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""MetaSearchEntityPrefix - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Links prefixes to tables., must be one of ["p", "s", "sq", "f", ] # noqa: E501
Keyword Args:
value (str): Links prefixes to tables., must be one of ["p", "s", "sq", "f", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/meta_search_entity_prefix.py | 0.588771 | 0.239061 | meta_search_entity_prefix.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class NestedFamily(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'id': (int,), # noqa: E501
'external_id': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'external_id': 'external_id', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, id, external_id, *args, **kwargs): # noqa: E501
"""NestedFamily - a model defined in OpenAPI
Args:
id (int):
external_id (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.external_id = external_id
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, id, external_id, *args, **kwargs): # noqa: E501
"""NestedFamily - a model defined in OpenAPI
Args:
id (int):
external_id (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.external_id = external_id
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/nested_family.py | 0.599485 | 0.184804 | nested_family.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class FamilyUpdateModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'id': (int,), # noqa: E501
'external_id': (str,), # noqa: E501
'description': (str,), # noqa: E501
'coded_phenotype': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'external_id': 'external_id', # noqa: E501
'description': 'description', # noqa: E501
'coded_phenotype': 'coded_phenotype', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, id, *args, **kwargs): # noqa: E501
"""FamilyUpdateModel - a model defined in OpenAPI
Args:
id (int):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
description (str): [optional] # noqa: E501
coded_phenotype (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, id, *args, **kwargs): # noqa: E501
"""FamilyUpdateModel - a model defined in OpenAPI
Args:
id (int):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
description (str): [optional] # noqa: E501
coded_phenotype (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/family_update_model.py | 0.582966 | 0.172102 | family_update_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class ExtraParticipantImporterHandler(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'FAIL': "fail",
'IGNORE': "ignore",
'ADD': "add",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""ExtraParticipantImporterHandler - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): How to handle extra participants during metadata import., must be one of ["fail", "ignore", "add", ] # noqa: E501
Keyword Args:
value (str): How to handle extra participants during metadata import., must be one of ["fail", "ignore", "add", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""ExtraParticipantImporterHandler - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): How to handle extra participants during metadata import., must be one of ["fail", "ignore", "add", ] # noqa: E501
Keyword Args:
value (str): How to handle extra participants during metadata import., must be one of ["fail", "ignore", "add", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/extra_participant_importer_handler.py | 0.608012 | 0.228404 | extra_participant_importer_handler.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class AnalysisStatus(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'QUEUED': "queued",
'IN-PROGRESS': "in-progress",
'FAILED': "failed",
'COMPLETED': "completed",
'UNKNOWN': "unknown",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""AnalysisStatus - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Status that an analysis can be run., must be one of ["queued", "in-progress", "failed", "completed", "unknown", ] # noqa: E501
Keyword Args:
value (str): Status that an analysis can be run., must be one of ["queued", "in-progress", "failed", "completed", "unknown", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""AnalysisStatus - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Status that an analysis can be run., must be one of ["queued", "in-progress", "failed", "completed", "unknown", ] # noqa: E501
Keyword Args:
value (str): Status that an analysis can be run., must be one of ["queued", "in-progress", "failed", "completed", "unknown", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/analysis_status.py | 0.564098 | 0.222827 | analysis_status.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sample_type import SampleType
globals()['SampleType'] = SampleType
class SampleUpdateModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'type': (SampleType,), # noqa: E501
'participant_id': (int,), # noqa: E501
'external_id': (str,), # noqa: E501
'active': (bool,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'meta': 'meta', # noqa: E501
'type': 'type', # noqa: E501
'participant_id': 'participant_id', # noqa: E501
'external_id': 'external_id', # noqa: E501
'active': 'active', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""SampleUpdateModel - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] if omitted the server will use the default value of {} # noqa: E501
type (SampleType): [optional] # noqa: E501
participant_id (int): [optional] # noqa: E501
external_id (str): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""SampleUpdateModel - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] if omitted the server will use the default value of {} # noqa: E501
type (SampleType): [optional] # noqa: E501
participant_id (int): [optional] # noqa: E501
external_id (str): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/sample_update_model.py | 0.568296 | 0.191082 | sample_update_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class ExportType(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'CSV': "csv",
'TSV': "tsv",
'JSON': "json",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""ExportType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Wraps up common properties and allows for parameterisation of some table exports.., must be one of ["csv", "tsv", "json", ] # noqa: E501
Keyword Args:
value (str): Wraps up common properties and allows for parameterisation of some table exports.., must be one of ["csv", "tsv", "json", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""ExportType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Wraps up common properties and allows for parameterisation of some table exports.., must be one of ["csv", "tsv", "json", ] # noqa: E501
Keyword Args:
value (str): Wraps up common properties and allows for parameterisation of some table exports.., must be one of ["csv", "tsv", "json", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/export_type.py | 0.603815 | 0.217587 | export_type.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.search_response import SearchResponse
globals()['SearchResponse'] = SearchResponse
class SearchResponseModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'responses': ([SearchResponse],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'responses': 'responses', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, responses, *args, **kwargs): # noqa: E501
"""SearchResponseModel - a model defined in OpenAPI
Args:
responses ([SearchResponse]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.responses = responses
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, responses, *args, **kwargs): # noqa: E501
"""SearchResponseModel - a model defined in OpenAPI
Args:
responses ([SearchResponse]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.responses = responses
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/search_response_model.py | 0.611382 | 0.185007 | search_response_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class WebProject(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'id': (int,), # noqa: E501
'name': (str,), # noqa: E501
'dataset': (str,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'name': 'name', # noqa: E501
'dataset': 'dataset', # noqa: E501
'meta': 'meta', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, id, name, dataset, meta, *args, **kwargs): # noqa: E501
"""WebProject - a model defined in OpenAPI
Args:
id (int):
name (str):
dataset (str):
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.name = name
self.dataset = dataset
self.meta = meta
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, id, name, dataset, meta, *args, **kwargs): # noqa: E501
"""WebProject - a model defined in OpenAPI
Args:
id (int):
name (str):
dataset (str):
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.name = name
self.dataset = dataset
self.meta = meta
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/web_project.py | 0.559771 | 0.178097 | web_project.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sequence_status import SequenceStatus
from sample_metadata.model.sequence_technology import SequenceTechnology
from sample_metadata.model.sequence_type import SequenceType
globals()['SequenceStatus'] = SequenceStatus
globals()['SequenceTechnology'] = SequenceTechnology
globals()['SequenceType'] = SequenceType
class NestedSequence(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'id': (int,), # noqa: E501
'type': (SequenceType,), # noqa: E501
'status': (SequenceStatus,), # noqa: E501
'technology': (SequenceTechnology,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'type': 'type', # noqa: E501
'status': 'status', # noqa: E501
'technology': 'technology', # noqa: E501
'meta': 'meta', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, id, type, status, technology, meta, *args, **kwargs): # noqa: E501
"""NestedSequence - a model defined in OpenAPI
Args:
id (int):
type (SequenceType):
status (SequenceStatus):
technology (SequenceTechnology):
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.type = type
self.status = status
self.technology = technology
self.meta = meta
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, id, type, status, technology, meta, *args, **kwargs): # noqa: E501
"""NestedSequence - a model defined in OpenAPI
Args:
id (int):
type (SequenceType):
status (SequenceStatus):
technology (SequenceTechnology):
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.type = type
self.status = status
self.technology = technology
self.meta = meta
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/nested_sequence.py | 0.599485 | 0.209268 | nested_sequence.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sequence_status import SequenceStatus
from sample_metadata.model.sequence_technology import SequenceTechnology
from sample_metadata.model.sequence_type import SequenceType
globals()['SequenceStatus'] = SequenceStatus
globals()['SequenceTechnology'] = SequenceTechnology
globals()['SequenceType'] = SequenceType
class SequenceUpsert(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'external_ids': ({str: (str,)},), # noqa: E501
'sample_id': (int,), # noqa: E501
'status': (SequenceStatus,), # noqa: E501
'technology': (SequenceTechnology,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'type': (SequenceType,), # noqa: E501
'id': (int,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'external_ids': 'external_ids', # noqa: E501
'sample_id': 'sample_id', # noqa: E501
'status': 'status', # noqa: E501
'technology': 'technology', # noqa: E501
'meta': 'meta', # noqa: E501
'type': 'type', # noqa: E501
'id': 'id', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""SequenceUpsert - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_ids ({str: (str,)}): [optional] # noqa: E501
sample_id (int): [optional] # noqa: E501
status (SequenceStatus): [optional] # noqa: E501
technology (SequenceTechnology): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
type (SequenceType): [optional] # noqa: E501
id (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""SequenceUpsert - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_ids ({str: (str,)}): [optional] # noqa: E501
sample_id (int): [optional] # noqa: E501
status (SequenceStatus): [optional] # noqa: E501
technology (SequenceTechnology): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
type (SequenceType): [optional] # noqa: E501
id (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/sequence_upsert.py | 0.538741 | 0.186225 | sequence_upsert.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.analysis_status import AnalysisStatus
from sample_metadata.model.analysis_type import AnalysisType
globals()['AnalysisStatus'] = AnalysisStatus
globals()['AnalysisType'] = AnalysisType
class AnalysisQueryModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'projects': ([str],), # noqa: E501
'sample_ids': ([str],), # noqa: E501
'type': (AnalysisType,), # noqa: E501
'status': (AnalysisStatus,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'output': (str,), # noqa: E501
'active': (bool,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'projects': 'projects', # noqa: E501
'sample_ids': 'sample_ids', # noqa: E501
'type': 'type', # noqa: E501
'status': 'status', # noqa: E501
'meta': 'meta', # noqa: E501
'output': 'output', # noqa: E501
'active': 'active', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, projects, *args, **kwargs): # noqa: E501
"""AnalysisQueryModel - a model defined in OpenAPI
Args:
projects ([str]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_ids ([str]): [optional] # noqa: E501
type (AnalysisType): [optional] # noqa: E501
status (AnalysisStatus): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
output (str): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.projects = projects
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, projects, *args, **kwargs): # noqa: E501
"""AnalysisQueryModel - a model defined in OpenAPI
Args:
projects ([str]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_ids ([str]): [optional] # noqa: E501
type (AnalysisType): [optional] # noqa: E501
status (AnalysisStatus): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
output (str): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.projects = projects
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/analysis_query_model.py | 0.546254 | 0.197039 | analysis_query_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class ParticipantUpdateModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'external_id': (str,), # noqa: E501
'reported_sex': (int,), # noqa: E501
'reported_gender': (str,), # noqa: E501
'karyotype': (str,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'external_id': 'external_id', # noqa: E501
'reported_sex': 'reported_sex', # noqa: E501
'reported_gender': 'reported_gender', # noqa: E501
'karyotype': 'karyotype', # noqa: E501
'meta': 'meta', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""ParticipantUpdateModel - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
reported_sex (int): [optional] # noqa: E501
reported_gender (str): [optional] # noqa: E501
karyotype (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""ParticipantUpdateModel - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
reported_sex (int): [optional] # noqa: E501
reported_gender (str): [optional] # noqa: E501
karyotype (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/participant_update_model.py | 0.542379 | 0.177989 | participant_update_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class ErrorResponse(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'error': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'error': 'error', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, error, *args, **kwargs): # noqa: E501
"""ErrorResponse - a model defined in OpenAPI
Args:
error (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.error = error
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, error, *args, **kwargs): # noqa: E501
"""ErrorResponse - a model defined in OpenAPI
Args:
error (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.error = error
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/error_response.py | 0.611962 | 0.194139 | error_response.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.participant_upsert import ParticipantUpsert
globals()['ParticipantUpsert'] = ParticipantUpsert
class ParticipantUpsertBody(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'participants': ([ParticipantUpsert],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'participants': 'participants', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, participants, *args, **kwargs): # noqa: E501
"""ParticipantUpsertBody - a model defined in OpenAPI
Args:
participants ([ParticipantUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.participants = participants
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, participants, *args, **kwargs): # noqa: E501
"""ParticipantUpsertBody - a model defined in OpenAPI
Args:
participants ([ParticipantUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.participants = participants
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/participant_upsert_body.py | 0.592431 | 0.184657 | participant_upsert_body.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sample_batch_upsert import SampleBatchUpsert
globals()['SampleBatchUpsert'] = SampleBatchUpsert
class SampleBatchUpsertBody(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'samples': ([SampleBatchUpsert],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'samples': 'samples', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, samples, *args, **kwargs): # noqa: E501
"""SampleBatchUpsertBody - a model defined in OpenAPI
Args:
samples ([SampleBatchUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.samples = samples
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, samples, *args, **kwargs): # noqa: E501
"""SampleBatchUpsertBody - a model defined in OpenAPI
Args:
samples ([SampleBatchUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.samples = samples
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/sample_batch_upsert_body.py | 0.631026 | 0.201892 | sample_batch_upsert_body.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.analysis_status import AnalysisStatus
from sample_metadata.model.analysis_type import AnalysisType
globals()['AnalysisStatus'] = AnalysisStatus
globals()['AnalysisType'] = AnalysisType
class AnalysisModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'sample_ids': ([str],), # noqa: E501
'type': (AnalysisType,), # noqa: E501
'status': (AnalysisStatus,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'output': (str,), # noqa: E501
'active': (bool,), # noqa: E501
'author': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'sample_ids': 'sample_ids', # noqa: E501
'type': 'type', # noqa: E501
'status': 'status', # noqa: E501
'meta': 'meta', # noqa: E501
'output': 'output', # noqa: E501
'active': 'active', # noqa: E501
'author': 'author', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, sample_ids, type, status, *args, **kwargs): # noqa: E501
"""AnalysisModel - a model defined in OpenAPI
Args:
sample_ids ([str]):
type (AnalysisType):
status (AnalysisStatus):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
output (str): [optional] # noqa: E501
active (bool): [optional] if omitted the server will use the default value of True # noqa: E501
author (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.sample_ids = sample_ids
self.type = type
self.status = status
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, sample_ids, type, status, *args, **kwargs): # noqa: E501
"""AnalysisModel - a model defined in OpenAPI
Args:
sample_ids ([str]):
type (AnalysisType):
status (AnalysisStatus):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
output (str): [optional] # noqa: E501
active (bool): [optional] if omitted the server will use the default value of True # noqa: E501
author (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.sample_ids = sample_ids
self.type = type
self.status = status
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/analysis_model.py | 0.562297 | 0.187114 | analysis_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.analysis_status import AnalysisStatus
globals()['AnalysisStatus'] = AnalysisStatus
class AnalysisUpdateModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'status': (AnalysisStatus,), # noqa: E501
'output': (str,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'active': (bool,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'status': 'status', # noqa: E501
'output': 'output', # noqa: E501
'meta': 'meta', # noqa: E501
'active': 'active', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, status, *args, **kwargs): # noqa: E501
"""AnalysisUpdateModel - a model defined in OpenAPI
Args:
status (AnalysisStatus):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
output (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.status = status
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, status, *args, **kwargs): # noqa: E501
"""AnalysisUpdateModel - a model defined in OpenAPI
Args:
status (AnalysisStatus):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
output (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.status = status
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/analysis_update_model.py | 0.596081 | 0.203213 | analysis_update_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class BodyGetSamples(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'sample_ids': ([str],), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'participant_ids': ([int],), # noqa: E501
'project_ids': ([str],), # noqa: E501
'active': (bool,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'sample_ids': 'sample_ids', # noqa: E501
'meta': 'meta', # noqa: E501
'participant_ids': 'participant_ids', # noqa: E501
'project_ids': 'project_ids', # noqa: E501
'active': 'active', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""BodyGetSamples - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_ids ([str]): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
participant_ids ([int]): [optional] # noqa: E501
project_ids ([str]): [optional] # noqa: E501
active (bool): [optional] if omitted the server will use the default value of True # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""BodyGetSamples - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_ids ([str]): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
participant_ids ([int]): [optional] # noqa: E501
project_ids ([str]): [optional] # noqa: E501
active (bool): [optional] if omitted the server will use the default value of True # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/body_get_samples.py | 0.554229 | 0.179315 | body_get_samples.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class ParticipantSearchResponseData(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'project': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501
'id': (int,), # noqa: E501
'family_external_ids': ([str],), # noqa: E501
'participant_external_ids': ([str],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'project': 'project', # noqa: E501
'id': 'id', # noqa: E501
'family_external_ids': 'family_external_ids', # noqa: E501
'participant_external_ids': 'participant_external_ids', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, project, id, family_external_ids, participant_external_ids, *args, **kwargs): # noqa: E501
"""ParticipantSearchResponseData - a model defined in OpenAPI
Args:
project (bool, date, datetime, dict, float, int, list, str, none_type):
id (int):
family_external_ids ([str]):
participant_external_ids ([str]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.project = project
self.id = id
self.family_external_ids = family_external_ids
self.participant_external_ids = participant_external_ids
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, project, id, family_external_ids, participant_external_ids, *args, **kwargs): # noqa: E501
"""ParticipantSearchResponseData - a model defined in OpenAPI
Args:
project (bool, date, datetime, dict, float, int, list, str, none_type):
id (int):
family_external_ids ([str]):
participant_external_ids ([str]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.project = project
self.id = id
self.family_external_ids = family_external_ids
self.participant_external_ids = participant_external_ids
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/participant_search_response_data.py | 0.570571 | 0.173498 | participant_search_response_data.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class BodyGetParticipants(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'external_participant_ids': ([str],), # noqa: E501
'internal_participant_ids': ([int],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'external_participant_ids': 'external_participant_ids', # noqa: E501
'internal_participant_ids': 'internal_participant_ids', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""BodyGetParticipants - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_participant_ids ([str]): [optional] # noqa: E501
internal_participant_ids ([int]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""BodyGetParticipants - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_participant_ids ([str]): [optional] # noqa: E501
internal_participant_ids ([int]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/body_get_participants.py | 0.572245 | 0.194578 | body_get_participants.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class SampleSearchResponseData(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'project': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501
'family_external_ids': ([str],), # noqa: E501
'participant_external_ids': ([str],), # noqa: E501
'sample_external_ids': ([str],), # noqa: E501
'id': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'project': 'project', # noqa: E501
'family_external_ids': 'family_external_ids', # noqa: E501
'participant_external_ids': 'participant_external_ids', # noqa: E501
'sample_external_ids': 'sample_external_ids', # noqa: E501
'id': 'id', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, project, family_external_ids, participant_external_ids, sample_external_ids, *args, **kwargs): # noqa: E501
"""SampleSearchResponseData - a model defined in OpenAPI
Args:
project (bool, date, datetime, dict, float, int, list, str, none_type):
family_external_ids ([str]):
participant_external_ids ([str]):
sample_external_ids ([str]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
id (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.project = project
self.family_external_ids = family_external_ids
self.participant_external_ids = participant_external_ids
self.sample_external_ids = sample_external_ids
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, project, family_external_ids, participant_external_ids, sample_external_ids, *args, **kwargs): # noqa: E501
"""SampleSearchResponseData - a model defined in OpenAPI
Args:
project (bool, date, datetime, dict, float, int, list, str, none_type):
family_external_ids ([str]):
participant_external_ids ([str]):
sample_external_ids ([str]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
id (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.project = project
self.family_external_ids = family_external_ids
self.participant_external_ids = participant_external_ids
self.sample_external_ids = sample_external_ids
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/sample_search_response_data.py | 0.533884 | 0.178311 | sample_search_response_data.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class SequenceTechnology(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'SHORT-READ': "short-read",
'LONG-READ': "long-read",
'SINGLE-CELL-RNA-SEQ': "single-cell-rna-seq",
'BULK-RNA-SEQ': "bulk-rna-seq",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""SequenceTechnology - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Sequencing technologies., must be one of ["short-read", "long-read", "single-cell-rna-seq", "bulk-rna-seq", ] # noqa: E501
Keyword Args:
value (str): Sequencing technologies., must be one of ["short-read", "long-read", "single-cell-rna-seq", "bulk-rna-seq", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""SequenceTechnology - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Sequencing technologies., must be one of ["short-read", "long-read", "single-cell-rna-seq", "bulk-rna-seq", ] # noqa: E501
Keyword Args:
value (str): Sequencing technologies., must be one of ["short-read", "long-read", "single-cell-rna-seq", "bulk-rna-seq", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/sequence_technology.py | 0.55447 | 0.230248 | sequence_technology.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class ValidationError(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'loc': ([bool, date, datetime, dict, float, int, list, str, none_type],), # noqa: E501
'msg': (str,), # noqa: E501
'type': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'loc': 'loc', # noqa: E501
'msg': 'msg', # noqa: E501
'type': 'type', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, loc, msg, type, *args, **kwargs): # noqa: E501
"""ValidationError - a model defined in OpenAPI
Args:
loc ([bool, date, datetime, dict, float, int, list, str, none_type]):
msg (str):
type (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.loc = loc
self.msg = msg
self.type = type
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, loc, msg, type, *args, **kwargs): # noqa: E501
"""ValidationError - a model defined in OpenAPI
Args:
loc ([bool, date, datetime, dict, float, int, list, str, none_type]):
msg (str):
type (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.loc = loc
self.msg = msg
self.type = type
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/validation_error.py | 0.588534 | 0.188063 | validation_error.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class BodyGetSequencesByCriteria(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'sample_ids': ([str],), # noqa: E501
'sequence_ids': ([int],), # noqa: E501
'external_sequence_ids': ([str],), # noqa: E501
'seq_meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'sample_meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'projects': ([str],), # noqa: E501
'types': ([str],), # noqa: E501
'statuses': ([str],), # noqa: E501
'technologies': ([str],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'sample_ids': 'sample_ids', # noqa: E501
'sequence_ids': 'sequence_ids', # noqa: E501
'external_sequence_ids': 'external_sequence_ids', # noqa: E501
'seq_meta': 'seq_meta', # noqa: E501
'sample_meta': 'sample_meta', # noqa: E501
'projects': 'projects', # noqa: E501
'types': 'types', # noqa: E501
'statuses': 'statuses', # noqa: E501
'technologies': 'technologies', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""BodyGetSequencesByCriteria - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_ids ([str]): [optional] # noqa: E501
sequence_ids ([int]): [optional] # noqa: E501
external_sequence_ids ([str]): [optional] # noqa: E501
seq_meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
sample_meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
projects ([str]): [optional] # noqa: E501
types ([str]): [optional] # noqa: E501
statuses ([str]): [optional] # noqa: E501
technologies ([str]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""BodyGetSequencesByCriteria - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
sample_ids ([str]): [optional] # noqa: E501
sequence_ids ([int]): [optional] # noqa: E501
external_sequence_ids ([str]): [optional] # noqa: E501
seq_meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
sample_meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
projects ([str]): [optional] # noqa: E501
types ([str]): [optional] # noqa: E501
statuses ([str]): [optional] # noqa: E501
technologies ([str]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/body_get_sequences_by_criteria.py | 0.524395 | 0.181481 | body_get_sequences_by_criteria.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sample_type import SampleType
from sample_metadata.model.sequence_upsert import SequenceUpsert
globals()['SampleType'] = SampleType
globals()['SequenceUpsert'] = SequenceUpsert
class SampleBatchUpsert(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'sequences': ([SequenceUpsert],), # noqa: E501
'external_id': (str,), # noqa: E501
'type': (SampleType,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'participant_id': (int,), # noqa: E501
'active': (bool,), # noqa: E501
'id': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'sequences': 'sequences', # noqa: E501
'external_id': 'external_id', # noqa: E501
'type': 'type', # noqa: E501
'meta': 'meta', # noqa: E501
'participant_id': 'participant_id', # noqa: E501
'active': 'active', # noqa: E501
'id': 'id', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, sequences, *args, **kwargs): # noqa: E501
"""SampleBatchUpsert - a model defined in OpenAPI
Args:
sequences ([SequenceUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
type (SampleType): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] if omitted the server will use the default value of {} # noqa: E501
participant_id (int): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
id (bool, date, datetime, dict, float, int, list, str, none_type): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.sequences = sequences
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, sequences, *args, **kwargs): # noqa: E501
"""SampleBatchUpsert - a model defined in OpenAPI
Args:
sequences ([SequenceUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
type (SampleType): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] if omitted the server will use the default value of {} # noqa: E501
participant_id (int): [optional] # noqa: E501
active (bool): [optional] # noqa: E501
id (bool, date, datetime, dict, float, int, list, str, none_type): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.sequences = sequences
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/sample_batch_upsert.py | 0.586049 | 0.187411 | sample_batch_upsert.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.error_response import ErrorResponse
from sample_metadata.model.search_response_type import SearchResponseType
globals()['ErrorResponse'] = ErrorResponse
globals()['SearchResponseType'] = SearchResponseType
class SearchResponse(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'type': (SearchResponseType,), # noqa: E501
'title': (str,), # noqa: E501
'data': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501
'error': (ErrorResponse,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'type': 'type', # noqa: E501
'title': 'title', # noqa: E501
'data': 'data', # noqa: E501
'error': 'error', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, type, title, data, *args, **kwargs): # noqa: E501
"""SearchResponse - a model defined in OpenAPI
Args:
type (SearchResponseType):
title (str):
data (bool, date, datetime, dict, float, int, list, str, none_type):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
error (ErrorResponse): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.type = type
self.title = title
self.data = data
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, type, title, data, *args, **kwargs): # noqa: E501
"""SearchResponse - a model defined in OpenAPI
Args:
type (SearchResponseType):
title (str):
data (bool, date, datetime, dict, float, int, list, str, none_type):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
error (ErrorResponse): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.type = type
self.title = title
self.data = data
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/search_response.py | 0.584745 | 0.159054 | search_response.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class Project(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'id': (int,), # noqa: E501
'name': (str,), # noqa: E501
'dataset': (str,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'read_group_name': (str,), # noqa: E501
'write_group_name': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'name': 'name', # noqa: E501
'dataset': 'dataset', # noqa: E501
'meta': 'meta', # noqa: E501
'read_group_name': 'read_group_name', # noqa: E501
'write_group_name': 'write_group_name', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""Project - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
id (int): [optional] # noqa: E501
name (str): [optional] # noqa: E501
dataset (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
read_group_name (str): [optional] # noqa: E501
write_group_name (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""Project - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
id (int): [optional] # noqa: E501
name (str): [optional] # noqa: E501
dataset (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
read_group_name (str): [optional] # noqa: E501
write_group_name (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/project.py | 0.529993 | 0.185726 | project.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class AnalysisType(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'QC': "qc",
'JOINT-CALLING': "joint-calling",
'GVCF': "gvcf",
'CRAM': "cram",
'CUSTOM': "custom",
'ES-INDEX': "es-index",
'SV': "sv",
'WEB': "web",
'ANALYSIS-RUNNER': "analysis-runner",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""AnalysisType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Types of analysis., must be one of ["qc", "joint-calling", "gvcf", "cram", "custom", "es-index", "sv", "web", "analysis-runner", ] # noqa: E501
Keyword Args:
value (str): Types of analysis., must be one of ["qc", "joint-calling", "gvcf", "cram", "custom", "es-index", "sv", "web", "analysis-runner", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""AnalysisType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Types of analysis., must be one of ["qc", "joint-calling", "gvcf", "cram", "custom", "es-index", "sv", "web", "analysis-runner", ] # noqa: E501
Keyword Args:
value (str): Types of analysis., must be one of ["qc", "joint-calling", "gvcf", "cram", "custom", "es-index", "sv", "web", "analysis-runner", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/analysis_type.py | 0.533884 | 0.229838 | analysis_type.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sequence_status import SequenceStatus
from sample_metadata.model.sequence_technology import SequenceTechnology
from sample_metadata.model.sequence_type import SequenceType
globals()['SequenceStatus'] = SequenceStatus
globals()['SequenceTechnology'] = SequenceTechnology
globals()['SequenceType'] = SequenceType
class NewSequence(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'external_ids': ({str: (str,)},), # noqa: E501
'status': (SequenceStatus,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'type': (SequenceType,), # noqa: E501
'technology': (SequenceTechnology,), # noqa: E501
'sample_id': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'external_ids': 'external_ids', # noqa: E501
'status': 'status', # noqa: E501
'meta': 'meta', # noqa: E501
'type': 'type', # noqa: E501
'technology': 'technology', # noqa: E501
'sample_id': 'sample_id', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, external_ids, status, meta, type, technology, sample_id, *args, **kwargs): # noqa: E501
"""NewSequence - a model defined in OpenAPI
Args:
external_ids ({str: (str,)}):
status (SequenceStatus):
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}):
type (SequenceType):
technology (SequenceTechnology):
sample_id (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.external_ids = external_ids
self.status = status
self.meta = meta
self.type = type
self.technology = technology
self.sample_id = sample_id
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, external_ids, status, meta, type, technology, sample_id, *args, **kwargs): # noqa: E501
"""NewSequence - a model defined in OpenAPI
Args:
external_ids ({str: (str,)}):
status (SequenceStatus):
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}):
type (SequenceType):
technology (SequenceTechnology):
sample_id (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.external_ids = external_ids
self.status = status
self.meta = meta
self.type = type
self.technology = technology
self.sample_id = sample_id
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/new_sequence.py | 0.590543 | 0.177027 | new_sequence.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class SearchResponseType(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('value',): {
'FAMILY': "family",
'PARTICIPANT': "participant",
'SAMPLE': "sample",
'ERROR': "error",
},
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""SearchResponseType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Define types of search results., must be one of ["family", "participant", "sample", "error", ] # noqa: E501
Keyword Args:
value (str): Define types of search results., must be one of ["family", "participant", "sample", "error", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""SearchResponseType - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Define types of search results., must be one of ["family", "participant", "sample", "error", ] # noqa: E501
Keyword Args:
value (str): Define types of search results., must be one of ["family", "participant", "sample", "error", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/search_response_type.py | 0.557845 | 0.232093 | search_response_type.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
class FamilySearchResponseData(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'project': (bool, date, datetime, dict, float, int, list, str, none_type,), # noqa: E501
'id': (int,), # noqa: E501
'family_external_ids': ([str],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'project': 'project', # noqa: E501
'id': 'id', # noqa: E501
'family_external_ids': 'family_external_ids', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, project, id, family_external_ids, *args, **kwargs): # noqa: E501
"""FamilySearchResponseData - a model defined in OpenAPI
Args:
project (bool, date, datetime, dict, float, int, list, str, none_type):
id (int):
family_external_ids ([str]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.project = project
self.id = id
self.family_external_ids = family_external_ids
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, project, id, family_external_ids, *args, **kwargs): # noqa: E501
"""FamilySearchResponseData - a model defined in OpenAPI
Args:
project (bool, date, datetime, dict, float, int, list, str, none_type):
id (int):
family_external_ids ([str]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.project = project
self.id = id
self.family_external_ids = family_external_ids
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/family_search_response_data.py | 0.601945 | 0.175432 | family_search_response_data.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sequence_status import SequenceStatus
from sample_metadata.model.sequence_technology import SequenceTechnology
from sample_metadata.model.sequence_type import SequenceType
globals()['SequenceStatus'] = SequenceStatus
globals()['SequenceTechnology'] = SequenceTechnology
globals()['SequenceType'] = SequenceType
class SequenceUpdateModel(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'external_ids': ({str: (str,)},), # noqa: E501
'sample_id': (int,), # noqa: E501
'status': (SequenceStatus,), # noqa: E501
'technology': (SequenceTechnology,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'type': (SequenceType,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'external_ids': 'external_ids', # noqa: E501
'sample_id': 'sample_id', # noqa: E501
'status': 'status', # noqa: E501
'technology': 'technology', # noqa: E501
'meta': 'meta', # noqa: E501
'type': 'type', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""SequenceUpdateModel - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_ids ({str: (str,)}): [optional] # noqa: E501
sample_id (int): [optional] # noqa: E501
status (SequenceStatus): [optional] # noqa: E501
technology (SequenceTechnology): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
type (SequenceType): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""SequenceUpdateModel - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_ids ({str: (str,)}): [optional] # noqa: E501
sample_id (int): [optional] # noqa: E501
status (SequenceStatus): [optional] # noqa: E501
technology (SequenceTechnology): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
type (SequenceType): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/sequence_update_model.py | 0.565059 | 0.192369 | sequence_update_model.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.sample_batch_upsert import SampleBatchUpsert
globals()['SampleBatchUpsert'] = SampleBatchUpsert
class ParticipantUpsert(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'samples': ([SampleBatchUpsert],), # noqa: E501
'external_id': (str,), # noqa: E501
'reported_sex': (int,), # noqa: E501
'reported_gender': (str,), # noqa: E501
'karyotype': (str,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'id': (int,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'samples': 'samples', # noqa: E501
'external_id': 'external_id', # noqa: E501
'reported_sex': 'reported_sex', # noqa: E501
'reported_gender': 'reported_gender', # noqa: E501
'karyotype': 'karyotype', # noqa: E501
'meta': 'meta', # noqa: E501
'id': 'id', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, samples, *args, **kwargs): # noqa: E501
"""ParticipantUpsert - a model defined in OpenAPI
Args:
samples ([SampleBatchUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
reported_sex (int): [optional] # noqa: E501
reported_gender (str): [optional] # noqa: E501
karyotype (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
id (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.samples = samples
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, samples, *args, **kwargs): # noqa: E501
"""ParticipantUpsert - a model defined in OpenAPI
Args:
samples ([SampleBatchUpsert]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
external_id (str): [optional] # noqa: E501
reported_sex (int): [optional] # noqa: E501
reported_gender (str): [optional] # noqa: E501
karyotype (str): [optional] # noqa: E501
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}): [optional] # noqa: E501
id (int): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.samples = samples
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/participant_upsert.py | 0.54698 | 0.164081 | participant_upsert.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.nested_sequence import NestedSequence
from sample_metadata.model.sample_type import SampleType
globals()['NestedSequence'] = NestedSequence
globals()['SampleType'] = SampleType
class NestedSample(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'id': (str,), # noqa: E501
'external_id': (str,), # noqa: E501
'type': (SampleType,), # noqa: E501
'meta': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},), # noqa: E501
'sequences': ([NestedSequence],), # noqa: E501
'created_date': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'id': 'id', # noqa: E501
'external_id': 'external_id', # noqa: E501
'type': 'type', # noqa: E501
'meta': 'meta', # noqa: E501
'sequences': 'sequences', # noqa: E501
'created_date': 'created_date', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, id, external_id, type, meta, sequences, *args, **kwargs): # noqa: E501
"""NestedSample - a model defined in OpenAPI
Args:
id (str):
external_id (str):
type (SampleType):
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}):
sequences ([NestedSequence]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
created_date (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.external_id = external_id
self.type = type
self.meta = meta
self.sequences = sequences
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, id, external_id, type, meta, sequences, *args, **kwargs): # noqa: E501
"""NestedSample - a model defined in OpenAPI
Args:
id (str):
external_id (str):
type (SampleType):
meta ({str: (bool, date, datetime, dict, float, int, list, str, none_type)}):
sequences ([NestedSequence]):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
created_date (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.id = id
self.external_id = external_id
self.type = type
self.meta = meta
self.sequences = sequences
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/nested_sample.py | 0.564939 | 0.172974 | nested_sample.py | pypi |
import re # noqa: F401
import sys # noqa: F401
from sample_metadata.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from sample_metadata.exceptions import ApiAttributeError
def lazy_import():
from sample_metadata.model.validation_error import ValidationError
globals()['ValidationError'] = ValidationError
class HTTPValidationError(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'detail': ([ValidationError],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'detail': 'detail', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""HTTPValidationError - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
detail ([ValidationError]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""HTTPValidationError - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
detail ([ValidationError]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.") | /sample_metadata-5.7.2.tar.gz/sample_metadata-5.7.2/sample_metadata/model/http_validation_error.py | 0.603465 | 0.207034 | http_validation_error.py | pypi |
import json
from pathlib import Path
from typing import Any
from typing import Dict
from typing import List
import numpy as np
from jsonschema import validate
from sample_size.metrics import BaseMetric
from sample_size.metrics import BooleanMetric
from sample_size.metrics import NumericMetric
from sample_size.metrics import RatioMetric
from sample_size.multiple_testing import MultipleTestingMixin
DEFAULT_ALPHA = 0.05
DEFAULT_POWER = 0.8
DEFAULT_VARIANTS = 2
RANDOM_STATE = np.random.RandomState(1)
STATE = RANDOM_STATE.get_state()
schema_file_path = Path(Path(__file__).parent, "metrics_schema.json")
with open(str(schema_file_path), "r") as schema_file:
METRICS_SCHEMA = json.load(schema_file)
class SampleSizeCalculator(MultipleTestingMixin):
"""
This class is to calculate sample size based on metric type
Attributes:
alpha: statistical significance
power: statistical power
"""
def __init__(self, alpha: float = DEFAULT_ALPHA, variants: int = DEFAULT_VARIANTS, power: float = DEFAULT_POWER):
self.alpha = alpha
self.power = power
self.metrics: List[BaseMetric] = []
self.variants: int = variants
def _get_single_sample_size(self, metric: BaseMetric, alpha: float) -> float:
effect_size = metric.mde / float(np.sqrt(metric.variance))
power_analysis = metric.power_analysis_instance
sample_size = int(
power_analysis.solve_power(
effect_size=effect_size,
alpha=alpha,
power=self.power,
ratio=1,
alternative=metric.alternative,
)
)
return sample_size
def get_sample_size(self) -> float:
if len(self.metrics) * (self.variants - 1) < 2:
return self._get_single_sample_size(self.metrics[0], self.alpha)
num_tests = len(self.metrics) * (self.variants - 1)
lower = min([self._get_single_sample_size(metric, self.alpha) for metric in self.metrics])
upper = max([self._get_single_sample_size(metric, self.alpha / num_tests) for metric in self.metrics])
RANDOM_STATE.set_state(STATE)
return self.get_multiple_sample_size(lower, upper, RANDOM_STATE)
def register_metrics(self, metrics: List[Dict[str, Any]]) -> None:
METRIC_REGISTER_MAP = {
"boolean": BooleanMetric,
"numeric": NumericMetric,
"ratio": RatioMetric,
}
validate(instance=metrics, schema=METRICS_SCHEMA)
for metric in metrics:
metric_class = METRIC_REGISTER_MAP[metric["metric_type"]]
registered_metric = metric_class(**metric["metric_metadata"])
self.metrics.append(registered_metric) | /sample_size-3.0.0.tar.gz/sample_size-3.0.0/sample_size/sample_size_calculator.py | 0.879276 | 0.280672 | sample_size_calculator.py | pypi |
from abc import ABCMeta
from abc import abstractmethod
from typing import Union
import numpy as np
import numpy.typing as npt
from scipy import stats
from statsmodels.stats.power import NormalIndPower
from statsmodels.stats.power import TTestIndPower
class BaseMetric:
__metaclass__ = ABCMeta
mde: float
def __init__(self, mde: float, alternative: str):
self.mde = mde
self.alternative = alternative
@property
@abstractmethod
def power_analysis_instance(self) -> Union[NormalIndPower, TTestIndPower]:
raise NotImplementedError
@property
@abstractmethod
def variance(self) -> float:
raise NotImplementedError
@staticmethod
def check_positive(number: float, name: str) -> float:
if number < 0:
raise ValueError(f"Error: Please provide a positive number for {name}.")
else:
return number
def generate_p_values(
self, true_alt: npt.NDArray[np.bool_], sample_size: int, random_state: np.random.RandomState
) -> npt.NDArray[np.float_]:
"""
This method simulates any registered metric's p-value. The output will
later be applied to BH procedure
Parameters:
true_alt: A boolean array of shape (m hypotheses x replications).
Each element represents whether the alternative hypothesis is true
for an individual hypothesis sample_size: sample size used for simulations
sample_size: an integer used to generate
random_state: random state to generate fixed output for any given input
Returns:
p-value: A float array of shape (m hypotheses x replications) of
simulated p-values
"""
total_alt = true_alt.sum()
total_null = true_alt.size - total_alt
p_values = np.empty(true_alt.shape)
p_values[true_alt] = self._generate_alt_p_values(total_alt, sample_size, random_state)
p_values[~true_alt] = stats.uniform.rvs(0, 1, total_null, random_state=random_state)
return p_values
@abstractmethod
def _generate_alt_p_values(
self, size: int, sample_size: int, random_state: np.random.RandomState
) -> npt.NDArray[np.float_]:
raise NotImplementedError
class BooleanMetric(BaseMetric):
probability: float
mde: float
def __init__(
self,
probability: float,
mde: float,
alternative: str,
):
super(BooleanMetric, self).__init__(mde, alternative)
self.probability = self._check_probability(probability)
@property
def variance(self) -> float:
return self.probability * (1 - self.probability)
@property
def power_analysis_instance(self) -> NormalIndPower:
return NormalIndPower()
@staticmethod
def _check_probability(probability: float) -> float:
if 0 <= probability <= 1:
return probability
else:
raise ValueError("Error: Please provide a float between 0 and 1 for probability.")
def _generate_alt_p_values(
self, size: int, sample_size: int, random_state: np.random.RandomState
) -> npt.NDArray[np.float_]:
effect_size = self.mde / np.sqrt(2 * self.variance / sample_size)
z_alt = stats.norm.rvs(loc=effect_size, size=size, random_state=random_state)
p_values: npt.NDArray[np.float_] = stats.norm.sf(np.abs(z_alt))
if self.alternative == "two-sided":
p_values *= 2
return p_values
class NumericMetric(BaseMetric):
mde: float
def __init__(
self,
variance: float,
mde: float,
alternative: str,
):
super(NumericMetric, self).__init__(mde, alternative)
self._variance = self.check_positive(variance, "variance")
@property
def variance(self) -> float:
return self._variance
@property
def power_analysis_instance(self) -> TTestIndPower:
return TTestIndPower()
def _generate_alt_p_values(
self, size: int, sample_size: int, random_state: np.random.RandomState
) -> npt.NDArray[np.float_]:
nc = np.sqrt(sample_size / 2 / self.variance) * self.mde
t_alt = stats.nct.rvs(nc=nc, df=2 * (sample_size - 1), size=size, random_state=random_state)
p_values: npt.NDArray[np.float_] = stats.t.sf(np.abs(t_alt), 2 * (sample_size - 1))
# Todo: use accurate p-value calculation due to nct's asymmetric distribution
if self.alternative == "two-sided":
p_values *= 2
return p_values
class RatioMetric(BaseMetric):
numerator_mean: float
numerator_variance: float
denominator_mean: float
denominator_variance: float
covariance: float
def __init__(
self,
numerator_mean: float,
numerator_variance: float,
denominator_mean: float,
denominator_variance: float,
covariance: float,
mde: float,
alternative: str,
):
super(RatioMetric, self).__init__(mde, alternative)
# TODO: add check for Cauchy-Schwarz inequality
self.numerator_mean = numerator_mean
self.numerator_variance = self.check_positive(numerator_variance, "numerator variance")
self.denominator_mean = denominator_mean
self.denominator_variance = self.check_positive(denominator_variance, "denominator variance")
self.covariance = covariance
@property
def variance(self) -> float:
variance = (
self.numerator_variance / self.denominator_mean**2
+ self.denominator_variance * self.numerator_mean**2 / self.denominator_mean**4
- 2 * self.covariance * self.numerator_mean / self.denominator_mean**3
)
return variance
@property
def power_analysis_instance(self) -> NormalIndPower:
return NormalIndPower()
def _generate_alt_p_values(
self, size: int, sample_size: int, random_state: np.random.RandomState
) -> npt.NDArray[np.float_]:
effect_size = self.mde / np.sqrt(2 * self.variance / sample_size)
z_alt = stats.norm.rvs(loc=effect_size, size=size, random_state=random_state)
p_values: npt.NDArray[np.float_] = stats.norm.sf(np.abs(z_alt))
if self.alternative == "two-sided":
p_values *= 2
return p_values | /sample_size-3.0.0.tar.gz/sample_size-3.0.0/sample_size/metrics.py | 0.872517 | 0.597667 | metrics.py | pypi |
from typing import Any
from typing import Collection
from typing import Dict
from typing import List
from sample_size.sample_size_calculator import DEFAULT_ALPHA
METRIC_PARAMETERS = {
"boolean": {"probability": "baseline probability (between 0 and 1)"},
"numeric": {"variance": "variance of the baseline metric"},
"ratio": {
"numerator_mean": "mean of the baseline metric's numerator",
"numerator_variance": "variance of the baseline metric's numerator",
"denominator_mean": "mean of the baseline metric's denominator",
"denominator_variance": "variance of the baseline metric's denominator",
"covariance": "covariance between the baseline metric's numerator and denominator",
},
}
def is_float(value: str) -> bool:
try:
float(value)
return True
except ValueError:
return False
def get_float(input_str: str, input_name: str) -> float:
input_str = input_str.strip()
if is_float(input_str):
return float(input_str)
else:
raise ValueError(f"Error: Please enter a float for the {input_name}.")
def get_alpha() -> float:
alpha_input = input(
"Enter the alpha between (between 0 and 0.4 inclusively) or press Enter to use default alpha=0.05: "
).strip()
if alpha_input == "":
print("Using default alpha (0.05) and default power (0.8)...")
return DEFAULT_ALPHA
else:
alpha = get_float(alpha_input, "alpha")
if 0 < alpha <= 0.4:
print(f"Using alpha ({alpha}) and default power (0.8)...")
return alpha
else:
raise ValueError("Error: Please provide a float between 0 and 0.4 for alpha.")
def get_mde(metric_type: str) -> float:
mde = get_float(
input(
f"Enter the absolute minimum detectable effect for this {metric_type} \n"
f"MDE: targeted treatment metric value minus the baseline value: "
),
"minimum detectable effect",
)
return mde
def get_alternative() -> str:
alternative = (
input(
"Enter the alternative hypothesis type (two-sided, one-sided) or press Enter to use "
"default two-sided test: "
)
.strip()
.lower()
)
if alternative in ["two-sided", "one-sided"]:
return alternative
elif alternative == "":
print("Using default(two-sided test)...")
return "two-sided"
else:
raise ValueError("Error: Unexpected alternative type. Please enter two-sided, larger, or smaller.")
def get_metric_type() -> str:
metric_type = input("Enter metric type (Boolean, Numeric, Ratio): ").strip().lower()
if metric_type in ["boolean", "numeric", "ratio"]:
return metric_type
else:
raise ValueError("Error: Unexpected metric type. Please enter Boolean, Numeric, or Ratio.")
def get_metric_parameters(parameter_definitions: Dict[str, str]) -> Dict[str, Any]:
parameters = {}
for parameter_name, parameter_definition in parameter_definitions.items():
parameters[parameter_name] = get_float(input(f"Enter the {parameter_definition}: "), parameter_definition)
return parameters
def get_variants() -> int:
number_of_variants = (
input(
"Enter the number of cohorts for this test or Press Enter to use default variant = 2 if you have only 1 "
"control and 1 treatment. \n"
"definition: Control + number of treatments: "
)
.strip()
.lower()
)
if number_of_variants.isdigit():
if int(number_of_variants) < 2:
raise ValueError("Error: An experiment must contain at least 2 variants.")
return int(number_of_variants)
elif number_of_variants == "":
print("Using default variants(2)...")
return 2
else:
raise ValueError("Error: Please enter a positive integer for the number of variants.")
def register_another_metric() -> bool:
register = input("Are you going to register another metric? (y/n)").strip().lower()
if register == "y":
return True
elif register in ["n", ""]:
return False
else:
raise ValueError("Error: Please enter 'y' or 'n'.")
def _get_metric() -> Dict[str, Collection[str]]:
metric_type = get_metric_type()
metric_metadata = get_metric_parameters(METRIC_PARAMETERS[metric_type])
metric_metadata["mde"] = get_mde(metric_type)
if get_alternative() == "two-sided":
metric_metadata["alternative"] = "two-sided"
else:
metric_metadata["alternative"] = "larger" if metric_metadata["mde"] > 0 else "smaller"
return {"metric_type": metric_type, "metric_metadata": metric_metadata}
def get_metrics() -> List[Dict[str, Collection[str]]]:
metrics = [_get_metric()]
while register_another_metric():
metrics.append(_get_metric())
return metrics | /sample_size-3.0.0.tar.gz/sample_size-3.0.0/sample_size/scripts/input_utils.py | 0.876661 | 0.574216 | input_utils.py | pypi |
# WeightedStatsCalculator
The WeightedStatsCalculator class is a tool for storing samples and calculating their weighted statistics. It calculates and stores the following statistics measures: weighted mean, weighted standard deviation, and standard error of the mean, for a set of weighted data points.
## Installation
The WeightedStatsCalculator can be installed using pip:
```
pip install sample-statistics
```
The package is tested on Python 3.7, 3.8, 3.9 and 3.11.
## Getting started
To use the WeightedStatsCalculator, you first need to create an instance of the WeightedStatsCalculator class. You can do this by importing the library and calling the constructor:
```
from sample_statistics import WeightedStatsCalculator
wsc = WeightedStatsCalculator()
```
## Usage
You can then add weighted data points to the calculator using the add method. This method takes in a sample and its corresponding weights (if any), calculates its weighted statistics and returns a dictionary containing the statistics:
```
sample1 = np.array([1,2,3])
weights1 = np.array([1,2,0.5])
stats1 = wsc.add(sample=sample1, weights=weights1)
print(stats1)
# Output: {'sample': array([1, 2, 3]), 'weights': array([1. , 2. , 0.5]), 'weighted_mean': 1.8571428571428572, 'weighted_std': 0.7559289460184544, 'standard_error_mean': 0.4040610178208842}
```
You can add as many samples as you want using the add method.
```
sample2 = [4, 5, 6]
stats2 = wsc.add(sample=sample2)
print(stats2)
# Output: {'sample': array([4, 5, 6]), 'weights': array([1., 1., 1.]), 'weighted_mean': 5.0, 'weighted_std': 1.0, 'standard_error_mean': 0.5773502691896257}
```
The resulting dict (inner dict) of a sample is stored in a nested dict. You can access the current size of the nested dict and the stored inner dicts using the size and samples attributes, respectively:
```
print(wsc.size)
# Output: 2
print(wsc.samples)
# Output: {
0: {'sample': array([1, 2, 3]), 'weights': array([1. , 2. , 0.5]), 'weighted_mean': 1.8571428571428572, 'weighted_std': 0.7559289460184544, 'standard_error_mean': 0.4040610178208842},
1: {'sample': array([4, 5, 6]), 'weights': array([1., 1., 1.]), 'weighted_mean': 5.0, 'weighted_std': 1.0, 'standard_error_mean': 0.5773502691896257}
}
```
Alternatively, the nested dict can be also accessed by the __call__ method:
```
print(wsc.())
# Output: {
0: {'sample': array([1, 2, 3]), 'weights': array([1. , 2. , 0.5]), 'weighted_mean': 1.8571428571428572, 'weighted_std': 0.7559289460184544, 'standard_error_mean': 0.4040610178208842},
1: {'sample': array([4, 5, 6]), 'weights': array([1., 1., 1.]), 'weighted_mean': 5.0, 'weighted_std': 1.0, 'standard_error_mean': 0.5773502691896257}
}
```
You can update the statistics of a particular sample using the update method. This method takes in the index of the sample to update, and the new sample and weights (if any). It then calculates the new weighted statistics and returns a dictionary containing the updated statistics. If the sample (or weights) are not provided as arguments, the already stored sample (or weights) will be used in the calculation of the new statistics.
```
stats1_updated = wsc.update(index=0, sample=[1, 2, 3, 4, 5], weights=[1, 2, 1, 1, 1])
print(stats1_updated)
# Output: {'sample': array([1, 2, 3, 4, 5]), 'weights': array([1, 2, 1, 1, 1]), 'weighted_mean': 2.8333333333333335, 'weighted_std': 1.4719601443879744, 'standard_error_mean': 0.6009252125773314}
```
In the example above the new sample provided has a different size than the stored sample (len(new sample) = 5, len(old sample)= 3). It was therefore necessary to also provide new weights with the same size as the new sample. Calling the update method with just the new sample raises a type error as the calculator tries to get the statistics of the new sample using the old weights which have a different size.
## How it works
The package stores samples and their corresponding weights into a nested dictionary. Each sample is stored into a separate inner dictionary. The assigned key to a new sample corresponds to the current size of the nested dictionary (e.g. new key = number of stored samples - 1). In addition, the package calculates the weighted mean, weighted standard deviation and the standard error of the mean of each sample by using the DescrStatsW class from statsmodels. These statistics are stored in each inner dictionary. Every inner dictionary has the following keys: "sample", "weights", "weighted_mean", "weighted_std" and "standard_error_mean".
More on math?
## Requirements
This project uses the following third-party libraries:
* NumPy for simple data handling and data transformations.
* Statsmodels for calculating the weighted statistics.
API Documentation
================================================================================
The WeightedStatsCalculator class is a tool for storing samples and calculating their weighted statistics. It provides methods to add new samples, update existing samples, and retrieve the calculated statistics.
## `class sample_statistics.main.WeightedStatsCalculator`
A class for calculating weighted statistics on a set of samples.
Attributes:
-------------------------------------------------------
- `samples`: dict
A nested dictionary containing dictionaries with the weighted statistics for each sample. Each inner dictionary has the following keys:
- `sample`: numpy.ndarray
The sample data.
- `weights`: numpy.ndarray
The weights for each data point in the sample.
- `weighted_mean`: float
The weighted mean of the sample.
- `weighted_std`: float
The weighted standard deviation of the sample.
- `standard_error_mean`: float
The standard error of the mean of the sample.
- `size`: int
The number of samples in the calculator.
Methods:
-------------------------------------------------------
## `add(sample, weights=None)`
Adds a new sample to the nested dictionary and returns the dictionary of this added sample with its statistics.
### Parameters:
- `sample`: 1D array-like
The sample data to add.
- `weights`: 1D array-like, optional
The weights for each data point in the sample. Default is None, which gives equal weights to all data points.
### Returns:
- `dict`
A dictionary containing the weighted statistics for the added sample.
## `update(index, sample=None, weights=None)`
Update the statistics of a particular sample and return its dictionary.
### Parameters:
- `index`: int
The index of the sample to update.
- `sample`: 1D array-like, optional
The new data to be used in the calculation of the sample statistics. If not provided, the existing sample data will be used. If provided, the weights must also change to the same size.
- `weights`: 1D array-like, optional
The new weights to be used in the calculation of the sample statistics. If not provided, the existing weights will be used. They must have the same size as the sample. For the unweighted case, the input weights must be np.ones(len(sample)).
### Returns:
- `dict`:
A dictionary containing the updated sample statistics.
## `__call__()`
### Returns:
- `dict`:
The nested dictionary of samples and their statistics.
| /sample-statistics-1.0.1.tar.gz/sample-statistics-1.0.1/README.md | 0.807688 | 0.987664 | README.md | pypi |
import logging
import yaml
from sampletester import caserunner
from sampletester import testplan
class Visitor(testplan.Visitor):
def __init__(self, fail_fast=False):
self.run_passed = True
self.fail_fast = fail_fast
self.encountered_failure = False
def start_visit(self):
logging.info("========== Running test!")
return self.visit_environment, self.visit_environment_end
def visit_environment(self, environment: testplan.Environment, do_environment: bool):
if not do_environment:
logging.info('skipping environment "{}"'.format(environment.name()))
return None, None
if self.fail_fast and self.encountered_failure:
logging.info('fail fast: not running environment "{}"'.format(environment.name))
return None, None
environment.attempted = True
environment.config.setup()
return (lambda idx, suite, do_suite: self.visit_suite(idx, suite, do_suite, environment),
lambda idx, suite, do_suite: self.visit_suite_end(idx, suite, do_suite, environment))
def visit_suite(self, idx: int, suite: testplan.Suite, do_suite: bool,
environment: testplan.Environment):
if not do_suite:
logging.info('skipping suite "{}"'.format(suite.name()))
return None
if self.fail_fast and self.encountered_failure:
logging.info('fail fast: not running suite "{}"'.format(suite.name))
return None
suite.attempted = True
logging.info(
"\n==== SUITE {}:{}:{} START =========================================="
.format(environment.name(), idx, suite.name()))
logging.info(" {}".format(suite.source()))
return lambda idx, testcase, do_case: self.visit_testcase(idx, testcase,
do_case, environment, suite)
def visit_testcase(self, idx: int, tcase: testplan.TestCase, do_case: bool,
environment: testplan.Environment, suite: testplan.Suite):
if not do_case:
logging.info('skipping case "{}"'.format(tcase.name()))
return
if self.fail_fast and self.encountered_failure:
logging.info('fail fast: not running case "{}"'.format(tcase.name))
return
tcase.attempted = True
case_runner = caserunner.TestCase(environment.config, idx, tcase.name(),
suite.setup(), tcase.spec(),
suite.teardown())
tcase.runner = case_runner
case_runner.run()
num_failures = len(case_runner.failures)
tcase.num_failures += num_failures
suite.num_failures += tcase.num_failures
if tcase.num_failures > 0:
suite.num_failing_cases += 1
num_errors = len(case_runner.errors)
tcase.num_errors += num_errors
suite.num_errors += tcase.num_errors
if tcase.num_errors > 0:
suite.num_erroring_cases += 1
tcase.update_times(case_runner.start_time, case_runner.end_time)
suite.update_times(case_runner.start_time, case_runner.end_time)
self.encountered_failure = self.encountered_failure or num_errors > 0 or num_failures > 0
tcase.completed = True
def visit_suite_end(self, idx, suite: testplan.Suite,
do_suite: bool, environment: testplan.Environment):
if suite.success():
logging.info(
"==== SUITE {}:{}:{} SUCCESS ========================================"
.format(environment.name(), idx, suite.name()))
else:
environment.num_failures += suite.num_failures
environment.num_failing_cases += suite.num_failing_cases
if suite.num_failures > 0:
environment.num_failing_suites += 1
environment.num_errors += suite.num_errors
environment.num_erroring_cases += suite.num_erroring_cases
if suite.num_errors > 0:
environment.num_erroring_suites += 1
environment.update_times(suite.start_time, suite.end_time)
logging.info(
"==== SUITE {}:{}:{} FAILURE ========================================"
.format(environment.name(), idx, suite.name()))
suite.completed = True
def visit_environment_end(self, environment: testplan.Environment, do_environment: bool):
if not environment.success():
self.run_passed = False
environment.completed = True
environment.config.teardown()
def end_visit(self):
logging.info("========== Finished running test")
return self.success()
def success(self):
return self.run_passed | /sample-tester-0.16.3.tar.gz/sample-tester-0.16.3/sampletester/runner.py | 0.40392 | 0.26009 | runner.py | pypi |
from enum import Enum
import os
import sys
from sampletester import testplan
class Detail(Enum):
NONE=1
BRIEF=2
FULL=3
class SummaryVisitor(testplan.Visitor):
"""Print a (running) summary of test case execution.
The summary is printed with indentation for environments, suites, and
test cases. By default, the summary is printed as lines are recorded, unless
`progress_out == None` is specified in `__init__(...)`. The full output
accumulated is available via `output()`.
"""
def __init__(self, verbosity, show_errors,
progress_out=sys.stderr,
debug=False):
self.verbosity = verbosity
self.show_errors = show_errors
self.lines = []
self.indent = ' '
self.progress_out = progress_out if progress_out else os.devnull
self.debug = debug
def visit_environment(self, environment: testplan.Environment, doit: bool):
if self.verbosity == Detail.NONE and (environment.success() or not self.show_errors):
return None, None
name = environment.name()
status = self.status_str(environment, doit)
if not status:
return None, None
self.append_lines('{}: Test environment: "{}"'.format(status, name))
return self.visit_suite, None
def visit_suite(self, idx, suite: testplan.Suite, doit:bool):
name = suite.name()
status = self.status_str(suite, doit)
if not status:
return None
self.append_lines(self.indent + '{}: Test suite: "{}"'.format(status, name))
return self.visit_testcase
def visit_testcase(self, idx, tcase: testplan.TestCase, doit: bool):
name = tcase.name()
runner = tcase.runner
status = self.status_str(tcase, doit)
if not status:
return
self.append_lines(self.indent * 2 + '{}: Test case: "{}"'
.format(status, name))
if runner and (self.verbosity == Detail.FULL or (self.show_errors and not tcase.success())):
self.append_lines(runner.get_output(6, '| '))
if self.debug and runner:
for error in runner.get_errors():
self.append_lines('DEBUGGING: Error "{}":\n{}'.format(error[0],error[1]))
def output(self):
return '\n'.join(self.lines)
def append_lines(self, str):
print(str, file=self.progress_out)
self.lines.append(str)
def status_str(self, obj, doit):
"""Returns the status to print for a given object, or None if no status is to
be displayed given the verbosity settings.
"""
if not doit:
return 'SKIPPED'
if not obj.attempted:
if self.verbosity == Detail.FULL:
return 'PREEMPTED' # by an error
return None
if not obj.completed:
return 'RUNNING'
return 'PASSED' if obj.success() else 'FAILED' | /sample-tester-0.16.3.tar.gz/sample-tester-0.16.3/sampletester/summary.py | 0.534127 | 0.252744 | summary.py | pypi |
import glob
import itertools
import logging
import os
from functools import reduce
from typing import Set
from sampletester import parser
from sampletester.parser import SCHEMA_TYPE_ABSENT as UNKNOWN_TYPE
from sampletester.sample_manifest import SCHEMA as MANIFEST_SCHEMA
from sampletester.testplan import SCHEMA as TESTPLAN_SCHEMA
def untyped_yaml_resolver(unknown_doc: parser.Document) -> str :
"""Determines how `parser.IndexedDocs` should classify `unknown_doc`
This is a resolver for parser.IndexedDocs, used to resolve YAML docs that did
not have a type field and thus could not be automatically classified. This
resolver resolves using the filename, for backward compatibility: files ending
in `.manifest.yaml` are categorized as manifest files, and remaining YAML
files are categorized as testplan files.
"""
ext_split = os.path.splitext(unknown_doc.path)
ext = ext_split[-1]
if ext == ".yaml":
prev_ext = os.path.splitext(ext_split[0])[-1]
if prev_ext == ".manifest":
return MANIFEST_SCHEMA.primary_type
else:
return TESTPLAN_SCHEMA.primary_type
return UNKNOWN_TYPE
def index_docs(*file_patterns: str) -> parser.IndexedDocs:
"""Obtains manifests and testplans by indexing the specified paths or cwd.
This function works in the following sequence:
1. It attempts to obtain all the manifests and testplans contained in
the globs in `file_patterns`.
2. If either no manifest or no testplan is obtained this way:
2.1 if any of the globs in `file_patterns` resolved to a directory, it does
not search for any more files.
2.2 if none of the globs in `file_patterns` resolved to a directory, it
searches in all the paths under the cwd and registers the
`file_patterns` matching the types not found in the previous step. In
other words, if no manifests are found via the globs in `file_patterns`,
it attempts to get manifests under the cwd, and similarly for testplans.
Returns: the indexed docs of the files that were searched for.
"""
def log_files(indexed_files):
"Helper to be called before exiting this method"
manifest_paths = [doc.path for doc in indexed_files.of_type(MANIFEST_SCHEMA.primary_type)]
testplan_paths = [doc.path for doc in indexed_files.of_type(TESTPLAN_SCHEMA.primary_type)]
logging.info('manifest files:\n {}'.format('\n '.join(manifest_paths)))
logging.info('testplan files:\n {}'.format('\n '.join(testplan_paths)))
return indexed_files
if not file_patterns:
file_patterns = ['**/*.yaml']
explicit_paths = get_globbed(*file_patterns)
explicit_directories = {path for path in explicit_paths
if os.path.isdir(path)}
files_in_directories = get_globbed(*{f'{path}/**/*.yaml'
for path in explicit_directories})
explicit_paths |= files_in_directories
indexed_explicit = create_indexed_docs(*explicit_paths)
has_manifests = indexed_explicit.contains(MANIFEST_SCHEMA.primary_type)
has_testplans = indexed_explicit.contains(TESTPLAN_SCHEMA.primary_type)
if (has_manifests and has_testplans):
# We have successfully found needed inputs already.
return log_files(indexed_explicit)
if files_in_directories:
# Because directories were specified, we use this as a signal to *not*
# recurse into the cwd. The caller of this method is responsible for
# reporting that one or both of the needed file types is missing.
return log_files(indexed_explicit)
implicit_files = get_globbed('**/*.yaml')
indexed_implicit = create_indexed_docs(*implicit_files)
if not has_testplans:
indexed_explicit.add_documents(*indexed_implicit.of_type(TESTPLAN_SCHEMA.primary_type))
if not has_manifests:
indexed_explicit.add_documents(*indexed_implicit.of_type(MANIFEST_SCHEMA.primary_type))
return log_files(indexed_explicit)
def create_indexed_docs(*all_paths: Set[str]) -> parser.IndexedDocs:
"""Returns a parser.IndexedDocs that contains all documents in `all_paths`.
This is a helper for `indexed_docs()`, and is also used heavily in tests.
"""
indexed_docs = parser.IndexedDocs(resolver=untyped_yaml_resolver)
indexed_docs.from_files(*all_paths)
return indexed_docs
def get_globbed(*file_patterns: str) -> Set[str]:
"""Returns the set of files returned from globbing `file_patterns`"""
return {filename
for pattern in file_patterns
for filename in glob.glob(pattern, recursive=True)} | /sample-tester-0.16.3.tar.gz/sample-tester-0.16.3/sampletester/inputs.py | 0.76947 | 0.268018 | inputs.py | pypi |
import importlib
import logging
import os
DEFAULT="tag:sample:invocation,chdir"
__abs_file__ = os.path.abspath(__file__)
__abs_file_path__ = os.path.split(__abs_file__)[0]
# The list of environment creation functions for each of the various conventions
# we discover below.
environment_creators = {}
# We find all the conventions defined via modules and subpackages rooted in the
# current directory
all_files = [entry for entry in os.listdir(__abs_file_path__)
if entry !='__init__.py' and entry != '__pycache__' and not entry.endswith('~')]
all_conventions = [os.path.splitext(os.path.basename(entry))[0] for entry in all_files]
# We register the environment creation function for each convention. Each such
# function has the name `test_environments` within the convention code.
for convention in all_conventions:
module = importlib.import_module('.'+convention, package='sampletester.convention')
if 'test_environments' in dir(module):
logging.info('registering convention "{}"'.format(convention))
environment_creators[convention] = module.test_environments
def generate_environments(requested_conventions, testcase_args, manifest_options, indexed_docs):
"""Generates the environments for the requested conventions with the given args.
Note that a given convention may (and usually will) generate multiple
environments, typically for running tests in multiple languages.
Args:
requested_conventions: A list of strings, each of which contains the
name of a convention previously registered in `environment_creators`
testcase_args: A list of args to pass in its entirety to each convention in
`requested_conventions`. These are intended to be passed through to the
caserunner.
manifest_options: A dict of options to pass in its entirety to each
convention. These are intended to address how the convention itself parses
the manifest file.
files: A list of files needed by the convention to instantiate environments.
"""
all_environments = []
for convention in requested_conventions:
create_fn = environment_creators.get(convention, None)
if create_fn is None:
raise ValueError('convention "{}" not implemented'.format(convention))
try:
all_environments.extend(create_fn(indexed_docs, testcase_args, manifest_options))
except Exception as ex:
raise ValueError( 'could not create test environments '
f'for convention "{convention}": {ex}')
return all_environments | /sample-tester-0.16.3.tar.gz/sample-tester-0.16.3/sampletester/convention/__init__.py | 0.530966 | 0.345547 | __init__.py | pypi |
# sample_annn_pkg
- author: laplaciannin102
- date: 2021/06/01
---
## Table of Contents
- [sample_annn_pkg](#sample_annn_pkg)
- [Table of Contents](#table-of-contents)
- [How to install](#how-to-install)
- [Github repository](#github-repository)
- [PyPI repository](#pypi-repository)
- [TestPyPI](#testpypi)
- [PyPI](#pypi)
- [Directory structure](#directory-structure)
- [Easy installation sample](#easy-installation-sample)
- [各ファイルの説明 or 書き方](#各ファイルの説明-or-書き方)
- [README.md / README.rst](#readmemd--readmerst)
- [LICENSE](#license)
- [requirements.txt](#requirementstxt)
- [MANIFEST.in](#manifestin)
- [setup.py](#setuppy)
- [パッケージデータに関して](#パッケージデータに関して)
- [.pypirc](#pypirc)
- [testについて](#testについて)
- [登録](#登録)
- [前提](#前提)
- [TestPyPI](#testpypi-1)
- [PyPI](#pypi-1)
- [参考](#参考)
- [公式](#公式)
- [Qiita系](#qiita系)
- [色々](#色々)
- [sample module](#sample-module)
---
## How to install
```shell
pip install sample_annn_pkg
```
## Github repository
- [https://github.com/laplaciannin102/sample_annn_pkg](https://github.com/laplaciannin102/sample_annn_pkg)
## PyPI repository
### TestPyPI
- [https://pypi.org/project/sample_annn_pkg/](https://pypi.org/project/sample_annn_pkg/)
### PyPI
- [https://test.pypi.org/project/sample_annn_pkg/](https://test.pypi.org/project/sample_annn_pkg/)
---
## Directory structure
```
sample_annn_pkg
├── .gitignore
├── LICENSE
├── MANIFEST.in
├── README.md
├── README.rst
├── sample_annn_pkg
│ ├── __init__.py
│ ├── _version.py
│ ├── sample_main_module.py
│ ├── sample_sub_module.py
│ └── datasets
│ ├── __init__.py
│ ├── load_datasets.py
│ └── sample_data
│ ├── sample_data.csv
│ └── sample_data.xlsx
├── tests
│ ├── __init__.py
│ └── test_main_moduleXXX.py
├── requirements.txt
└── setup.py
```
## Easy installation sample
- command
```shell
>> git clone https://github.com/laplaciannin102/sample_annn_pkg.git
>> cd sample_annn_pkg
>> python setup.py sdist upload -r testpypi
>> pip install --index-url https://test.pypi.org/simple/ sample_annn_pkg
```
- python
```python
>>> import sample_annn_pkg as sap
>>> sap.func02()
# success!!
# poyo
>>> df0 = sap.datasets.load_sample_data0() # load csv
# load sample data0
# file format: csv
# sample pandas.DataFrame:
# col1 col2 col3
# 0 1 2 3
# 1 4 5 6
# 2 7 8 9
>>> df1 = sap.datasets.load_sample_data1() # load excel
# load sample data1
# file format: excel
# sample pandas.DataFrame:
# col4 col5 col6
# 0 hoge 10 11
# 1 fuga 12 13
# 2 poyo 14 15
# 3 piyo 16 17
```
---
## 各ファイルの説明 or 書き方
### README.md / README.rst
- README.md: Githubに表示するためのMarkdown形式Readmeファイル
- README.md: PyPIに表示するためのsetup.pyからlong_descriptionとして読み込むためのRST形式Readmeファイル
- RST: reStructuredTextの略.
- `pandoc --from markdown --to rst` 等でRST形式に変換してしまうと楽.
### LICENSE
- MITなどのライセンス情報を記入.
### requirements.txt
```
# pypiに存在するパッケージ名はそのまま書いていい
numpy
# バージョン指定したいときは等式を使う
scipy == 1.2.2
# pypiに存在しないパッケージの時は git://リポジトリのURL.git
git://git@github.com/foo/foo.git
# プライベートリポジトリの場合は+sshをつける git+ssh://git@github.com/foo/foo.git
git+ssh://git@github.com/foo/foo.git
```
### MANIFEST.in
- 必要なファイルはMANIFEST.inに書き込んでおく.
- 関連ファイルの書き方は[こちらのURL](https://docs.python.org/ja/3/distutils/sourcedist.html)を参照.
- [https://docs.python.org/ja/3/distutils/sourcedist.html](https://docs.python.org/ja/3/distutils/sourcedist.html)
- [https://docs.python.org/ja/3/distutils/commandref.html#sdist-cmd](https://docs.python.org/ja/3/distutils/commandref.html#sdist-cmd)
```
include README.md
include README.rst
include requirements.txt
include LICENSE
```
- 例
- *.txt のパターンに当てはまる全てのファイルを含む.
- examplesディレクトリにある*.txt *.pyに当てはまる全てのファイルを含む.
- examples/sample?/buildに当てはまる全てのディレクトリを除外する.
```
include *.txt
recursive-include examples *.txt *.py
prune examples/sample?/build
```
### setup.py
- [Python公式サイト(setup スクリプトを書く)](https://docs.python.org/ja/3/distutils/setupscript.html)
- [https://docs.python.org/ja/3/distutils/setupscript.html](https://docs.python.org/ja/3/distutils/setupscript.html)
- NumpyやScipyのモジュールでもsetuptoolsのsetupと似たものが存在する.
#### パッケージデータに関して
- packages=find_packages(exclude=('tests', 'docs'))で全パッケージのリストを取得.
- 公式サイトからの例
- ディレクトリ構成
```
setup.py
src/
mypkg/
__init__.py
module.py
data/
tables.dat
spoons.dat
forks.dat
```
- setup.py
```python
setup(...,
packages=['mypkg'],
package_dir={'mypkg': 'src/mypkg'},
package_data={'mypkg': ['data/*.dat']},
)
```
---
### .pypirc
- 後述のTestPyPIやPyPIへの登録が容易になる.
- C:\Users\<user> ディレクトリに.pypircというファイルを作成する.
- C:\Users\<user>\.pypirc
- 中身
- PyPIとTestPyPIに登録した際のユーザ名とパスワードを書き込む.
```
[distutils]
index-servers =
pypi
testpypi
[pypi]
repository=https://upload.pypi.org/legacy/
username=<PyPI username>
password=<PyPI password>
[testpypi]
repository=https://test.pypi.org/legacy/
username=<TestPyPI username>
password=<TestPyPI password>
```
---
## testについて
- モジュールの品質を担保するために, 予めモジュールのテストをしておくことが重要.
- [テスト方法例(推奨)]
- testsディレクトリを作成する.
- 下記の様なテスト用Pythonスクリプトを作成し, testを実行.
- testは次のコマンドで行うことができる.
- `python -m unittest discover`
- 通り次第, testsに格納しておく.
```python
from context import sample_annn_pkg as sap
import unittest
class TestMainModule(unittest.TestCase):
def setUp(self):
"""
最初に実行されるメソッド
"""
def test_func(self):
"""
functionをtestするメソッド
"""
actual = sap.func01
expected = 12345
self.assertEqual(expected, actual)
def tearDown(self):
"""
最後に実行されるメソッド
"""
print('tear down main module')
# delete poyo class object
del self.pc
print('test end!!')
print('*' * 80)
print()
if __name__ == '__main__':
unittest.main()
```
---
## 登録
### 前提
- 最終的には **PyPI** に登録するが, 先に試験的に **TestPyPI** に登録することができる.
- 先に **.pypirc** の作成をしておく必要がある.
### TestPyPI
- TestPyPIへのパッケージアップロード.
- パッケージ情報登録は不要.
- アップロードするパッケージの`setup.py`があるディレクトリで, 下記コマンドを実行.
`python setup.py sdist upload -r testpypi`
- TestPyPIからのインストール.
- 変更後のリポジトリURLからインストールする.
`pip install --index-url https://test.pypi.org/simple/ <PACKAGE_NAME>`
### PyPI
- PyPIへのパッケージアップロード.
- TestPyPIと同様にパッケージ情報登録は不要.
`python setup.py sdist upload`
---
## 参考
### 公式
- [6. Python Package Index (PyPI)](https://docs.python.org/ja/3.6/distutils/packageindex.html)
- [Packaging Python Projects](https://packaging.python.org/tutorials/packaging-projects/)
### Qiita系
- [(インターン向けに書いた)Pythonパッケージを作る方法](https://qiita.com/Ultra-grand-child/items/7717f823df5a30c27077)
- [自作のPythonパッケージをPyPIに登録してpip install可能にする](https://qiita.com/shonansurvivors/items/0fbcbfde129f2d26301c)
- [PyPIデビューしたい人の為のPyPI登録の手順](https://qiita.com/kinpira/items/0a4e7c78fc5dd28bd695)
- [PyPI 新URLへの登録・アップロード](https://qiita.com/ukiuki-satoshi/items/77ef1e39598226f1cff7)
- [Python標準のunittestの使い方メモ](https://qiita.com/aomidro/items/3e3449fde924893f18ca)
### 色々
- [PyPIに自作パッケージを登録する際にrequirements.txtを使用する方法](https://trsasasusu.com/blog/53/)
### sample module
- [github.com/navdeep-G/samplemod](https://github.com/kennethreitz/samplemod)
| /sample_annn_pkg-0.0.28.tar.gz/sample_annn_pkg-0.0.28/docs/how_to_upload_pypi_pkg.md | 0.478285 | 0.943815 | how_to_upload_pypi_pkg.md | pypi |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev) | /sampled_distributions-0.2.tar.gz/sampled_distributions-0.2/sampled_distributions/Gaussiandistribution.py | 0.688364 | 0.853058 | Gaussiandistribution.py | pypi |
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev) | /sampler_distributions_pkg-0.1.tar.gz/sampler_distributions_pkg-0.1/sampler_distributions_pkg/Gaussiandistribution.py | 0.688364 | 0.853058 | Gaussiandistribution.py | pypi |
from __future__ import print_function, division
import numpy as np
import sounddevice as sd
import samplerate as sr
source_samplerate = 3600
target_samplerate = 44100
converter_type = 'sinc_fastest'
params = {
'mod_amplitude': 1, # Modulation amplitude (Hz)
'mod_frequency': 1, # Modulation frequency (Hz)
'fm_gain': 20, # FM gain (Hz/Hz)
'output_volume': 0.1, # Output volume
'carrier_frequency': 500, # Carrier frequency (Hz)
}
def get_input_callback(samplerate, params, num_samples=256):
"""Return a function that produces samples of a sine.
Parameters
----------
samplerate : float
The sample rate.
params : dict
Parameters for FM generation.
num_samples : int, optional
Number of samples to be generated on each call.
"""
amplitude = params['mod_amplitude']
frequency = params['mod_frequency']
def producer():
"""Generate samples.
Yields
------
samples : ndarray
A number of samples (`num_samples`) of the sine.
"""
start_time = 0
while True:
time = start_time + np.arange(num_samples) / samplerate
start_time += num_samples / samplerate
output = amplitude * np.cos(2 * np.pi * frequency * time)
yield output
return lambda p=producer(): next(p)
def get_playback_callback(resampler, samplerate, params):
"""Return a sound playback callback.
Parameters
----------
resampler
The resampler from which samples are read.
samplerate : float
The sample rate.
params : dict
Parameters for FM generation.
"""
def callback(outdata, frames, time, _):
"""Playback callback.
Read samples from the resampler and modulate them onto a carrier
frequency.
"""
last_fmphase = getattr(callback, 'last_fmphase', 0)
df = params['fm_gain'] * resampler.read(frames)
df = np.pad(df, (0, frames - len(df)), mode='constant')
t = time.outputBufferDacTime + np.arange(frames) / samplerate
phase = 2 * np.pi * params['carrier_frequency'] * t
fmphase = last_fmphase + 2 * np.pi * np.cumsum(df) / samplerate
outdata[:, 0] = params['output_volume'] * np.cos(phase + fmphase)
callback.last_fmphase = fmphase[-1]
return callback
def main(source_samplerate, target_samplerate, params, converter_type):
"""Setup the resampling and audio output callbacks and start playback."""
from time import sleep
ratio = target_samplerate / source_samplerate
with sr.CallbackResampler(get_input_callback(source_samplerate, params),
ratio, converter_type) as resampler, \
sd.OutputStream(channels=1, samplerate=target_samplerate,
callback=get_playback_callback(
resampler, target_samplerate, params)):
print("Playing back... Ctrl+C to stop.")
try:
while True:
sleep(1)
except KeyboardInterrupt:
print("Aborting.")
if __name__ == '__main__':
main(
source_samplerate=source_samplerate,
target_samplerate=target_samplerate,
params=params,
converter_type=converter_type) | /samplerate-0.1.0.tar.gz/samplerate-0.1.0/examples/play_modulation.py | 0.920839 | 0.375077 | play_modulation.py | pypi |
# Samplicity v0.5
Samplicity is a command line sample convertion tool created to transform .SFZ sample packs to .XI (Fasttracker 2 eXtended Instrument) format, supported by a number of music creation software. Designed to deal with SunVox music tracker.
Thanks to [Alex Zolotov](http://www.warmplace.ru/) for help and materials.
**If you encounter any problems — contact me here, on [SoundCloud](http://soundcloud.com/convergent) or just email me ```andrew.magalich@gmail.com```**
## Changelog
### v0.5 April 12th, 2014
* Various WAV types support thanks to scikits.audiolab.SndFile (including 24bit!)
* Runtime option "--play": play all samples converted
* Runtime option "--verbose %": set output verbosity to % (0/1/2)
* Excess samples are no longer added to resulting .XI file
* New notice about omitted excess samples
* Conversion speed increased dramatically
* Case insensitive path matching to deal with SFZ-files created on different platforms
### v0.4 September 27th, 2012
* Added sample count constraint (no more than 128 per file) – -1 observed error
* Moved temp files to system temp dir – job done clear now
* Fixed envelope length and seconds-to-ticks conversion parameter — no more SunVox crashes
## Disclaimer
Samplicity is in early beta status and does not support all features in intersection of .SFZ and .XI. Now it is tested **only** in [SunVox tracker](http://www.warmplace.ru/soft/sunvox/) v1.6 and v1.7.2 with 59 sample packs (in 16bit format) I've got.
> Crashes of SunVox are known to me for wrongly encoded .XI-instruments, so **you should save your files every time before loading an instrument**
#### But what the hell! It helped me to write some [songs](http://soundcloud.com/convergent)!
## Formats
### eXtended Instrument
This format was created in 1990's for DOS music tracker called Fasttracker 2. It's binary, old and rusty, but still useful.
### SFZ
Open format by Cakewalk company. Designed for creation in notepad. Sample pack contains .sfz textfile and a number of samples nearby. So, you can create your sample pack without any specific software. See more [here](http://www.cakewalk.com/DevXchange/article.aspx?aid=108)
## Usage
Samplicity is written in [python v2.7.3](http://www.python.org/). To use this tool Python v2.7+ should be installed on your computer.
### Installation
If you use ```pip```, you can just
```bash
pip install samplicity
```
To manually install this package, simply download and run in its directory:
```bash
python setup.py install
```
Now you're ready to use Samplicity! Try:
```bash
samplicity
```
### Sample convertion
To convert single sample pack, navigate in **terminal/bash/command**line to sample pack folder and run the following command:
```bash
python samplicity "<SAMPLE PACK NAME>.sfz"
```
If python is installed, path to samplicity is right and sample pack is a valid .SFZ file, you'll see something like this:
```bash
--------------------------------------------------------------------------------
Converting " Keys - Grand Piano.sfz "
--------------------------------------------------------------------------------
////////// Notice: some regions are overlapping and would be overwritten
c1, c#1, d1, d#1, e1, f1, f#1, g1, g#1, a1, a#1, b1, c2, c#2, d2, d#2, e2, f2,
f#2, g2, g#2, a2, a#2, b2, c3, c#3, d3, d#3, e3, f3, f#3, g3, g#3, a3, a#3, b3,
c4, c#4, d4, d#4, e4, f4, f#4, g4, g#4, a4, a#4, b4, c5, c#5, d5, d#5, e5, f5,
f#5, g5, g#5, a5, a#5, b5, c6, c#6, d6, d#6, e6, f6, f#6, g6, g#6, a6, a#6, b6,
c7, c#7, d7, d#7, e7, f7, f#7, g7, g#7, a7, a#7, b7
////////// Notice: some notes are out of range and ignored
c8
////////// Notice: some regions are not used, skipping:
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
23, 24, 25, 26, 27, 28, 29
29 samples, 54225 kB written during 0.347247 seconds
1 files converted in 0.352371 seconds
```
You can control verbosity of output using ```--verbose``` command:
```bash
$ samplicity Keys\ -\ Grand\ Piano.sfz --force --verbose 0
--------------------------------------------------------------------------------
Converting " Keys - Grand Piano.sfz "
--------------------------------------------------------------------------------
29 samples, 54225 kB written during 0.35783 seconds
1 files converted in 0.362867 seconds
```
```bash
$ samplicity Keys\ -\ Grand\ Piano.sfz --force --verbose 2
--------------------------------------------------------------------------------
Converting " Keys - Grand Piano.sfz "
--------------------------------------------------------------------------------
////////// Notice: some regions are overlapping and would be overwritten
c1, c#1, d1, d#1, e1, f1, f#1, g1, g#1, a1, a#1, b1, c2, c#2, d2, d#2, e2, f2,
f#2, g2, g#2, a2, a#2, b2, c3, c#3, d3, d#3, e3, f3, f#3, g3, g#3, a3, a#3, b3,
c4, c#4, d4, d#4, e4, f4, f#4, g4, g#4, a4, a#4, b4, c5, c#5, d5, d#5, e5, f5,
f#5, g5, g#5, a5, a#5, b5, c6, c#6, d6, d#6, e6, f6, f#6, g6, g#6, a6, a#6, b6,
c7, c#7, d7, d#7, e7, f7, f#7, g7, g#7, a7, a#7, b7
////////// Notice: some notes are out of range and ignored
c8
* pcm16 stereo sample " samples/grand piano/piano-p-c1.wav " 1493336 kB
* pcm16 stereo sample " samples/grand piano/piano-p-d#1.wav " 1516008 kB
* pcm16 stereo sample " samples/grand piano/piano-p-f#1.wav " 1509820 kB
* pcm16 stereo sample " samples/grand piano/piano-p-a1.wav " 1498120 kB
* pcm16 stereo sample " samples/grand piano/piano-p-c2.wav " 1481792 kB
* pcm16 stereo sample " samples/grand piano/piano-p-d#2.wav " 1449812 kB
* pcm16 stereo sample " samples/grand piano/piano-p-f#2.wav " 1439776 kB
* pcm16 stereo sample " samples/grand piano/piano-p-a2.wav " 1417312 kB
* pcm16 stereo sample " samples/grand piano/piano-p-c3.wav " 1261156 kB
* pcm16 stereo sample " samples/grand piano/piano-p-d#3.wav " 1303952 kB
* pcm16 stereo sample " samples/grand piano/piano-p-f#3.wav " 1243268 kB
* pcm16 stereo sample " samples/grand piano/piano-p-a3.wav " 1182584 kB
* pcm16 stereo sample " samples/grand piano/piano-p-c4.wav " 1153464 kB
* pcm16 stereo sample " samples/grand piano/piano-p-d#4.wav " 1079780 kB
* pcm16 stereo sample " samples/grand piano/piano-p-f#4.wav " 1025388 kB
* pcm16 stereo sample " samples/grand piano/piano-p-a4.wav " 953004 kB
* pcm16 stereo sample " samples/grand piano/piano-p-c5.wav " 918164 kB
* pcm16 stereo sample " samples/grand piano/piano-p-d#5.wav " 840008 kB
* pcm16 stereo sample " samples/grand piano/piano-p-f#5.wav " 753584 kB
* pcm16 stereo sample " samples/grand piano/piano-p-a5.wav " 698204 kB
* pcm16 stereo sample " samples/grand piano/piano-p-c6.wav " 676156 kB
* pcm16 stereo sample " samples/grand piano/piano-p-d#6.wav " 573092 kB
* pcm16 stereo sample " samples/grand piano/piano-p-f#6.wav " 512252 kB
* pcm16 stereo sample " samples/grand piano/piano-p-a6.wav " 425984 kB
* pcm16 stereo sample " samples/grand piano/piano-p-c7.wav " 404128 kB
* pcm16 stereo sample " samples/grand piano/piano-p-d#7.wav " 270348 kB
* pcm16 stereo sample " samples/grand piano/piano-p-f#7.wav " 246012 kB
* pcm16 stereo sample " samples/grand piano/piano-p-a7.wav " 224744 kB
* pcm16 stereo sample " samples/grand piano/piano-p-c8.wav " 211276 kB
////////// Notice: some regions are not used, skipping:
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
23, 24, 25, 26, 27, 28, 29
29 samples, 54225 kB written during 0.346783 seconds
1 files converted in 0.351817 seconds
```
###Batch conversion
To convert more than one .SFZ file you can specify as many arguments to Samplicity as you want. Or even use a wildcard
```bash
samplicity "<SAMPLE 1>.sfz" "<SAMPLE 2>.sfz" "<SAMPLE 3>.sfz"
samplicity *.sfz
```
###Reconversion
If there is corresponding to your sample pack .XI file, Samplicity won't convert it again. To force reconversion, add ```--force``` attribute:
```bash
samplicity "<SAMPLE NAME>.sfz" --force
```
##Package
Repository contains:
* ```samplicity.py```
* ```xi_reader.py``` — tool to verify your .XI if something went wrong. Usage: ```python "<PATH TO SAMPLICITY FOLDER>/xi_reader.py" "<SAMPLE NAME>.xi"```. It will show you full info, contained in .XI file (but not samples binary data). It is useful for bugtrack.
* ```xi_specs.txt``` — specifications of eXtended Instrument edited and improved a bit. Thanks [Alex Zolotov](http://www.warmplace.ru/)
* ```Cakewalk DevXchange - Specifications - sfz File Format.pdf``` — specifications of .SFZ saved from Cakewalk [website](http://www.cakewalk.com/DevXchange/article.aspx?aid=108).
##Notices and errors
* **Notice: some notes are out of range and ignored** — .XI supports only 96 notes from C0 to B7, so some notes in your sample pack cannot fit in this range. Consider editing .SFZ file.
* **Notice: some regions are overlapping and would be overwritten** — .SFZ format supports velocity maps. But .XI doesn't. Consider splitting your .SFZ file into separate files. For example, I've got ```Grand Piano (Piano).sfz``` and ```Grand Piano (Forte).sfz```
* **Notice: some samples are not used, skipping** – Some SFZ-regions did not make it to the final key mapping, so they will be skipped to reduce the file size
* **Too long envelope, shrinked to 512** — .XI does not support envelopes longer than 512 ticks (~10.24 seconds), so you instrument envelope was modified to fit this range
* **Too many samples in file** — .XI does not support more than 128 samples in instrument. Consider splitting your file or removing some. | /samplicity-0.5.1.tar.gz/samplicity-0.5.1/README.md | 0.52074 | 0.813572 | README.md | pypi |
<img src="./img/samplics_logo.jpg" align="left" style="height: 110px; border-radius: 10%; padding: 5px;"/>
<h1> Sample Analytics </h1>
[<img src="https://github.com/survey-methods/samplics/workflows/Testing/badge.svg">](https://github.com/survey-methods/samplics/actions?query=workflow%3ATesting)
[<img src="https://github.com/survey-methods/samplics/workflows/Coverage/badge.svg">](https://github.com/survey-methods/samplics/actions?query=workflow%3ACoverage)
[<img src="https://github.com/survey-methods/samplics/workflows/Docs/badge.svg">](https://github.com/samplics-org/samplics/actions?query=workflow%3ADocs)
[](https://doi.org/10.21105/joss.03376)
[<img src="https://pepy.tech/badge/samplics">](https://pepy.tech/project/samplics)
In large-scale surveys, often complex random mechanisms are used to select samples. Estimates derived from such samples must reflect the random mechanism. _Samplics_ is a python package that implements a set of sampling techniques for complex survey designs. These survey sampling techniques are organized into the following four sub-packages.
**Sampling** provides a set of random selection techniques used to draw a sample from a population. It also provides procedures for calculating sample sizes. The sampling subpackage contains:
- Sample size calculation and allocation: Wald and Fleiss methods for proportions.
- Equal probability of selection: simple random sampling (SRS) and systematic selection (SYS)
- Probability proportional to size (PPS): Systematic, Brewer's method, Hanurav-Vijayan method, Murphy's method, and Rao-Sampford's method.
**Weighting** provides the procedures for adjusting sample weights. More specifically, the weighting subpackage allows the following:
- Weight adjustment due to nonresponse
- Weight poststratification, calibration and normalization
- Weight replication i.e. Bootstrap, BRR, and Jackknife
**Estimation** provides methods for estimating the parameters of interest with uncertainty measures that are consistent with the sampling design. The estimation subpackage implements the following types of estimation methods:
- Taylor-based, also called linearization methods
- Replication-based estimation i.e. Boostrap, BRR, and Jackknife
- Regression-based e.g. generalized regression (GREG)
**Small Area Estimation (SAE).** When the sample size is not large enough to produce reliable / stable domain level estimates, SAE techniques can be used to model the output variable of interest to produce domain level estimates. This subpackage provides Area-level and Unit-level SAE methods.
For more details, visit https://samplics-org.github.io/samplics/
## Usage
Let's assume that we have a population and we would like to select a sample from it. The goal is to calculate the sample size for an expected proportion of 0.80 with a precision (half confidence interval) of 0.10.
> ```python
> from samplics.sampling import SampleSize
>
> sample_size = SampleSize(parameter = "proportion")
> sample_size.calculate(target=0.80, half_ci=0.10)
> ```
Furthermore, the population is located in four natural regions i.e. North, South, East, and West. We could be interested in calculating sample sizes based on region specific requirements e.g. expected proportions, desired precisions and associated design effects.
> ```python
> from samplics.sampling import SampleSize
>
> sample_size = SampleSize(parameter="proportion", method="wald", strat=True)
>
> expected_proportions = {"North": 0.95, "South": 0.70, "East": 0.30, "West": 0.50}
> half_ci = {"North": 0.30, "South": 0.10, "East": 0.15, "West": 0.10}
> deff = {"North": 1, "South": 1.5, "East": 2.5, "West": 2.0}
>
> sample_size = SampleSize(parameter = "proportion", method="Fleiss", strat=True)
> sample_size.calculate(target=expected_proportions, half_ci=half_ci, deff=deff)
> ```
To select a sample of primary sampling units using PPS method,
we can use code similar to the snippets below. Note that we first use the `datasets` module to import the example dataset.
> ```python
> # First we import the example dataset
> from samplics.datasets import load_psu_frame
> psu_frame_dict = load_psu_frame()
> psu_frame = psu_frame_dict["data"]
>
> # Code for the sample selection
> from samplics.sampling import SampleSelection
> from samplics.utils import SelectMethod
>
> psu_sample_size = {"East":3, "West": 2, "North": 2, "South": 3}
> pps_design = SampleSelection(
> method=SelectMethod.pps_sys,
> strat=True,
> wr=False
> )
>
> psu_frame["psu_prob"] = pps_design.inclusion_probs(
> psu_frame["cluster"],
> psu_sample_size,
> psu_frame["region"],
> psu_frame["number_households_census"]
> )
> ```
The initial weighting step is to obtain the design sample weights. In this example, we show a simple example of two-stage sampling design.
> ```python
> import pandas as pd
>
> from samplics.datasets import load_psu_sample, load_ssu_sample
> from samplics.weighting import SampleWeight
>
> # Load PSU sample data
> psu_sample_dict = load_psu_sample()
> psu_sample = psu_sample_dict["data"]
>
> # Load PSU sample data
> ssu_sample_dict = load_ssu_sample()
> ssu_sample = ssu_sample_dict["data"]
>
> full_sample = pd.merge(
> psu_sample[["cluster", "region", "psu_prob"]],
> ssu_sample[["cluster", "household", "ssu_prob"]],
> on="cluster"
> )
>
> full_sample["inclusion_prob"] = full_sample["psu_prob"] * full_sample["ssu_prob"]
> full_sample["design_weight"] = 1 / full_sample["inclusion_prob"]
> ```
To adjust the design sample weight for nonresponse,
we can use code similar to:
> ```python
> import numpy as np
>
> from samplics.weighting import SampleWeight
>
> # Simulate response
> np.random.seed(7)
> full_sample["response_status"] = np.random.choice(
> ["ineligible", "respondent", "non-respondent", "unknown"],
> size=full_sample.shape[0],
> p=(0.10, 0.70, 0.15, 0.05),
> )
> # Map custom response statuses to teh generic samplics statuses
> status_mapping = {
> "in": "ineligible",
> "rr": "respondent",
> "nr": "non-respondent",
> "uk":"unknown"
> }
> # adjust sample weights
> full_sample["nr_weight"] = SampleWeight().adjust(
> samp_weight=full_sample["design_weight"],
> adjust_class=full_sample["region"],
> resp_status=full_sample["response_status"],
> resp_dict=status_mapping
> )
> ```
To estimate population parameters using Taylor-based and replication-based methods, we can use code similar to:
> ```python
> # Taylor-based
> from samplics.datasets import load_nhanes2
>
> nhanes2_dict = load_nhanes2()
> nhanes2 = nhanes2_dict["data"]
>
> from samplics.estimation import TaylorEstimator
>
> zinc_mean_str = TaylorEstimator("mean")
> zinc_mean_str.estimate(
> y=nhanes2["zinc"],
> samp_weight=nhanes2["finalwgt"],
> stratum=nhanes2["stratid"],
> psu=nhanes2["psuid"],
> remove_nan=True,
> )
>
> # Replicate-based
> from samplics.datasets import load_nhanes2brr
>
> nhanes2brr_dict = load_nhanes2brr()
> nhanes2brr = nhanes2brr_dict["data"]
>
> from samplics.estimation import ReplicateEstimator
>
> ratio_wgt_hgt = ReplicateEstimator("brr", "ratio").estimate(
> y=nhanes2brr["weight"],
> samp_weight=nhanes2brr["finalwgt"],
> x=nhanes2brr["height"],
> rep_weights=nhanes2brr.loc[:, "brr_1":"brr_32"],
> remove_nan=True,
> )
>
> ```
To predict small area parameters, we can use code similar to:
> ```python
> import numpy as np
> import pandas as pd
>
> # Area-level basic method
> from samplics.datasets import load_expenditure_milk
>
> milk_exp_dict = load_expenditure_milk()
> milk_exp = milk_exp_dict["data"]
>
> from samplics.sae import EblupAreaModel
>
> fh_model_reml = EblupAreaModel(method="REML")
> fh_model_reml.fit(
> yhat=milk_exp["direct_est"],
> X=pd.get_dummies(milk_exp["major_area"], drop_first=True),
> area=milk_exp["small_area"],
> error_std=milk_exp["std_error"],
> intercept=True,
> tol=1e-8,
> )
> fh_model_reml.predict(
> X=pd.get_dummies(milk_exp["major_area"], drop_first=True),
> area=milk_exp["small_area"],
> intercept=True,
> )
>
> # Unit-level basic method
> from samplics.datasets import load_county_crop, load_county_crop_means
>
> # Load County Crop sample data
> countycrop_dict = load_county_crop()
> countycrop = countycrop_dict["data"]
> # Load County Crop Area Means sample data
> countycropmeans_dict = load_county_crop_means()
> countycrop_means = countycropmeans_dict["data"]
>
> from samplics.sae import EblupUnitModel
>
> eblup_bhf_reml = EblupUnitModel()
> eblup_bhf_reml.fit(
> countycrop["corn_area"],
> countycrop[["corn_pixel", "soybeans_pixel"]],
> countycrop["county_id"],
> )
> eblup_bhf_reml.predict(
> Xmean=countycrop_means[["ave_corn_pixel", "ave_corn_pixel"]],
> area=np.linspace(1, 12, 12),
> )
>
> ```
## Installation
`pip install samplics`
Python 3.7 or newer is required and the main dependencies are [numpy](https://numpy.org), [pandas](https://pandas.pydata.org), [scpy](https://www.scipy.org), and [statsmodel](https://www.statsmodels.org/stable/index.html).
## Contribution
If you would like to contribute to the project, please read [contributing to samplics](https://github.com/samplics-org/samplics/blob/main/CONTRIBUTING.md)
## License
[MIT](https://github.com/survey-methods/samplics/blob/master/license.txt)
## Contact
created by [Mamadou S. Diallo](https://twitter.com/MamadouSDiallo) - feel free to contact me!
| /samplics-0.4.10.tar.gz/samplics-0.4.10/README.md | 0.822296 | 0.937954 | README.md | pypi |
from concurrent.futures.thread import ThreadPoolExecutor
from concurrent.futures import Future, as_completed
from typing import List, Iterable, Tuple, Optional
from .constraint import SyntaxConstraint
from .constraint.json import valid_json, force_json_schema
from .constraint.one_of import one_of
def _check_token_batched(
check: SyntaxConstraint,
check_idx: int,
token_batch: List[str],
start_token_idx: int,
) -> Iterable[Tuple[int, int]]:
for token_idx, token in enumerate(token_batch, start=start_token_idx):
if not check.check_next(token):
yield check_idx, token_idx
class SyntaxValidityCheckFactory:
def __init__(self, **init_kwargs):
self._init_kwargs = init_kwargs
def __call__(self) -> SyntaxConstraint:
raise NotImplementedError()
class JSONValidityCheckFactory(SyntaxValidityCheckFactory):
def __call__(self) -> SyntaxConstraint:
return valid_json(**self._init_kwargs)
class JSONSchemaCheckFactory(SyntaxValidityCheckFactory):
def __call__(self) -> SyntaxConstraint:
return force_json_schema(**self._init_kwargs)
class OneOfValidityCheckFactory(SyntaxValidityCheckFactory):
def __call__(self) -> SyntaxConstraint:
return one_of(**self._init_kwargs)
class SyntaxValidityCheckHandler:
def __init__(
self,
token_vocab: List[str],
check_factory: SyntaxValidityCheckFactory,
num_workers: int = 8,
begin_first_check: bool = True,
):
self._executor = ThreadPoolExecutor(max_workers=num_workers)
self._num_workers = num_workers
self._batch_size = len(token_vocab) // num_workers
self._active_futures: List[Future] = []
self._initialized = False
self._token_vocab = token_vocab
self._check_factory = check_factory
self._active_checks = [check_factory()] # initialize single check to constrain start tokens
if begin_first_check:
self.process_invalid_next_tokens()
def cancel_current_check(self):
for future in self._active_futures:
future.cancel()
self._active_futures = []
r"""
Returns tuples (batch idx, vocab idx) for every invalid next token for each active check.
Should be called after each update."""
def process_invalid_next_tokens(self):
self._active_futures = []
for check_idx, check in enumerate(self._active_checks):
for start_token_idx in range(0, len(self._token_vocab), self._batch_size):
self._active_futures += [
self._executor.submit(
_check_token_batched,
check,
check_idx,
self._token_vocab[start_token_idx:start_token_idx+self._batch_size],
start_token_idx,
)
]
def await_invalid_next_tokens(self) -> Iterable[Tuple[int, int]]:
for future in as_completed(self._active_futures):
invalid_token_batch = future.result()
yield from invalid_token_batch
self._active_futures = []
r"""
Updates parsers for all active checks with next sampled token for the corresponding generation.
If this is the first sample step, initialize a check for each element in batch"""
def update(self, next_token_ids: List[int], begin_next_check: bool = True):
next_token_id = next_token_ids[0] # currently only supports single-beam and greedy
print(next_token_id)
print(f"Next token: {next_token_id}, {repr(self._token_vocab[next_token_id])}")
if not self._initialized: # this is the first sampling step
self._active_checks += [self._check_factory() for _ in range(len(next_token_ids) - 1)]
for token_id, check in zip(next_token_ids, self._active_checks):
check.update_parser(self._token_vocab[token_id])
if begin_next_check:
self.process_invalid_next_tokens() | /sampling_constraints-0.0.18-py3-none-any.whl/scs/handler.py | 0.86785 | 0.215598 | handler.py | pypi |
from typing import List, Union
from scs.incremental_parse import IncrementalParser, SpecialToken
from . import IncrementalParser, ParseFailure, SpecialToken
class StringMatchParser(IncrementalParser):
def __init__(self, match_string: str = "", nocase: bool = False):
super().__init__()
self.match_string = match_string.lower() if nocase else match_string
self._nocase = nocase
self._parse_idx = 0
self._done = False
def _copy_from(self, other: "StringMatchParser"):
super()._copy_from(other)
self.match_string = other.match_string
self._parse_idx = other._parse_idx
self._done = other._done
def _append(self, char: str | SpecialToken) -> bool:
if self._nocase:
char = char.lower()
if isinstance(char, SpecialToken):
if char != SpecialToken.EOS or not self._done:
raise ParseFailure("Got special token before match completion")
return True
if len(self.match_string) <= self._parse_idx:
raise ParseFailure("Parse idx out of bounds")
if self.match_string[self._parse_idx] != char:
raise ParseFailure("Found character mismatch")
self._parsed += char
self._parse_idx += 1
self._done = self._parse_idx == len(self.match_string)
return self._done
class MultiStringMatchParser(IncrementalParser):
def __init__(self, match_strings: List[str] = []):
super().__init__()
self._done = False
self._sub_parsers: List[StringMatchParser] = [StringMatchParser(match_string) for match_string in match_strings]
self._running_parsers = list(range(len(self._sub_parsers)))
def _copy_from(self, other: "MultiStringMatchParser"):
super()._copy_from(other)
self._done = other._done
self._sub_parsers = [s.copy() for s in other._sub_parsers]
self._running_parsers = [i for i in other._running_parsers]
def _append(self, char: str | SpecialToken) -> bool:
if len(self._running_parsers) == 0:
raise ParseFailure("No remaining subparsers to match")
not_failed = []
failures = []
done = False
for i in self._running_parsers:
try:
done = self._sub_parsers[i]._append(char) or done
not_failed += [i]
except ParseFailure as e:
failures += [str(e)]
self._running_parsers = not_failed
if len(not_failed) == 0:
raise ParseFailure(f"Failure(s) in string match subparsers: {', '.join(failures)}")
self._parsed = self._sub_parsers[self._running_parsers[0]]._parsed
return done | /sampling_constraints-0.0.18-py3-none-any.whl/scs/incremental_parse/string_match.py | 0.712732 | 0.181444 | string_match.py | pypi |
from enum import Enum
from typing import Dict, Union, Optional, List, Tuple, Iterable
from dataclasses import dataclass
from scs.incremental_parse import IncrementalParser, SpecialToken
from .. import IncrementalParser, ParseFailure, SpecialToken
from ..string_match import StringMatchParser
def isalpha(char: str) -> bool:
return char.isalpha() or char == '_'
class SchemaValueParser(IncrementalParser):
def __init__(self):
super().__init__()
self.value: JSONSchema = None
class ObjectSchemaParser(SchemaValueParser):
def __init__(self):
super().__init__()
self.value: ObjectSchema = ObjectSchema()
self._parsed = SpecialChar.OPEN_OBJECT.value
self._parse_status: ObjectParseStatus = ObjectParseStatus.OPENED
self._active_subparser: Optional[SchemaValueParser] = None
self._curr_key: JSONKey = None
self._curr_value_basetype: BaseTypeSchema = None
self._array_set = False
r"""
Opens a subparser and sends characters to it to begin parsing.
Previous subparser should be closed before this is called."""
def _open_subparser(self, char: str, array_set: bool = False):
if char == SpecialChar.OPEN_OBJECT.value: # begin parsing nested schema
self._active_subparser = ObjectSchemaParser()
self._parse_status = ObjectParseStatus.IN_VALUE_SUBPARSER
elif (not array_set) and char == ControlSequences.ARRAY.value[0]: # begin parsing array control sequence
self._active_subparser = StringMatchParser(ControlSequences.ARRAY.value, nocase=True)
self._active_subparser._append(char)
self._parse_status = ObjectParseStatus.IN_ARRAY_CTR_SEQ_SUBPARSER
elif char == ControlSequences.STRING.value[0]:
self._active_subparser = StringMatchParser(ControlSequences.STRING.value, nocase=True)
self._curr_value_basetype = BaseTypeSchema(BaseType.STRING)
self._active_subparser._append(char)
self._parse_status = ObjectParseStatus.IN_VALUE_SUBPARSER
elif char == ControlSequences.NUMBER.value[0]:
self._active_subparser = StringMatchParser(ControlSequences.NUMBER.value, nocase=True)
self._curr_value_basetype = BaseTypeSchema(BaseType.NUMBER)
self._active_subparser._append(char)
self._parse_status = ObjectParseStatus.IN_VALUE_SUBPARSER
else:
raise ParseFailure(f"Expected start of value, got {char}")
r"""
Closes a subparser and adds its final value to current parsed
content"""
def _close_subparser(self, char: str):
parsed = self._active_subparser._parsed
self._parsed += parsed
if self._parse_status == ObjectParseStatus.IN_ARRAY_CTR_SEQ_SUBPARSER:
self._array_set = True
self._parse_status = ObjectParseStatus.AWAITING_OBJECT
elif self._parse_status == ObjectParseStatus.IN_VALUE_SUBPARSER:
self._parse_status = ObjectParseStatus.FINISHED_VALUE
assert self._curr_key is not None
value = self._curr_value_basetype if self._curr_value_basetype is not None else self._active_subparser.value
value._is_list = self._array_set
self.value.add_prop(key=self._curr_key, value=JSONValue(value))
self._curr_key = None
self._curr_value_basetype = None
self._array_set = False
else: # IN_KEY_SUBPARSER
if char == SpecialChar.OPTIONAL.value:
# TODO set optional on current key
self._parse_status = ObjectParseStatus.FINISHED_KEY
self._curr_key = JSONKey(name=parsed, optional=True)
elif char == SpecialChar.COLON.value:
self._parse_status = ObjectParseStatus.AWAITING_VALUE
self._curr_key = JSONKey(name=parsed, optional=False)
else:
raise ParseFailure(f"Invalid char following key name {char}")
self._active_subparser = None
def _append(self, char: str) -> bool:
if self._active_subparser is not None:
done = self._active_subparser._append(char)
if done:
self._close_subparser(char)
return self._parse_status == ObjectParseStatus.PARSE_COMPLETE
return False
if char.isspace():
return False
if self._parse_status in [ObjectParseStatus.OPENED, ObjectParseStatus.AWAITING_KEY]:
if char == SpecialChar.CLOSE_OBJECT.value:
self._parsed += char
return True
if isalpha(char):
self._active_subparser = PropNameParser()
self._active_subparser._append(char)
self._parse_status = ObjectParseStatus.IN_KEY_SUBPARSER
return False
raise ParseFailure(f"Expected '}}' or '\"', got {char}")
if self._parse_status == ObjectParseStatus.AWAITING_VALUE:
self._open_subparser(char, array_set=False)
return False
if self._parse_status == ObjectParseStatus.AWAITING_OBJECT:
self._open_subparser(char, array_set=True)
return False
if self._parse_status == ObjectParseStatus.FINISHED_VALUE:
if char == SpecialChar.COMMA.value:
self._parsed += char
self._parse_status = ObjectParseStatus.AWAITING_KEY
return False
if char == SpecialChar.CLOSE_OBJECT.value:
self._parsed += char
self._parse_status = ObjectParseStatus.PARSE_COMPLETE
return True
raise ParseFailure("Expected ',' or '}', got " + char)
if self._parse_status == ObjectParseStatus.FINISHED_KEY:
if char == SpecialChar.COLON.value:
self._parsed += char
self._parse_status = ObjectParseStatus.AWAITING_VALUE
return False
raise ParseFailure(f"Expected ':', got {char}")
raise Exception("Something went wrong")
class JSONSchemaParser(ObjectSchemaParser):
"""
Parser for outer JSON. Effectively an ObjectSchemaParser that is initialized with AWAITING_VALUE status
and closes once the first value has been parsed."""
def __init__(self):
super().__init__()
self._curr_key = JSONKey("")
self._parsed = ""
self._parse_status: ObjectParseStatus = ObjectParseStatus.AWAITING_VALUE
self._active_subparser: Optional[SchemaValueParser] = None
self._done = False
def _append(self, char: str | SpecialToken) -> bool:
if self._done:
if char == SpecialToken.EOS:
return True
raise ParseFailure("Got EOS before schema was complete")
super()._append(char)
if self._parse_status == ObjectParseStatus.FINISHED_VALUE:
self._done = True
return True
return False
def get_schema(self) -> "JSONSchema":
for _, value in self.value._child_schemas:
return value.value_def
class PropNameParser(IncrementalParser):
def __init__(self) -> None:
super().__init__()
self._parsed = ""
def valid_char(self, char: str):
if len(self._parsed) > 0:
return char.isalnum() or char == SpecialChar.UNDERSCORE.value
return char.isalpha() or char == SpecialChar.UNDERSCORE.value
def _append(self, char: str) -> bool:
if not self.valid_char(char):
return True
self._parsed += char
return False
class JSONSchema:
def __init__(self, is_list: bool = False) -> None:
self._is_list = is_list
class BaseTypeSchema(JSONSchema):
def __init__(self, type: "BaseType", is_list: bool = False):
super().__init__(is_list=is_list)
self.type = type
def __eq__(self, __value: object) -> bool:
return (
isinstance(__value, BaseTypeSchema) and
self.type == __value.type and
self._is_list == __value._is_list
)
def __repr__(self) -> str:
return ('[]' if self._is_list else '') + self.type.value
# TODO add this
class StringEnumSchema(JSONSchema):
def __init__(self, options: List[str] = [], is_list: bool = False) -> None:
super().__init__(is_list=is_list)
self.options = options
def __eq__(self, __value: object) -> bool:
return (
isinstance(__value, StringEnumSchema) and
self._is_list == __value._is_list and
len(set(self.options).intersection(set(__value.options))) == len(self.options)
)
class ObjectSchema(JSONSchema):
def __init__(self, is_list: bool = False):
super().__init__(is_list=is_list)
self._child_schemas: List[Tuple[JSONKey, JSONValue]] = []
def add_prop(self, key: "JSONKey", value: "JSONValue"):
self._child_schemas += [(key, value)]
def get_keys(self, optional: Optional[bool] = None) -> Iterable["JSONKey"]:
for k, _ in self._child_schemas:
if optional is None or k.optional == optional:
yield k
def get_items(self) -> Iterable[Tuple["JSONKey", "JSONValue"]]:
for k, v in self._child_schemas:
yield k, v
def __eq__(self, __value: object) -> bool:
if not isinstance(__value, ObjectSchema):
return False
if self._is_list != __value._is_list:
return False
for (k1, v1), (k2, v2) in zip(self._child_schemas, __value._child_schemas):
if k1 != k2 or v1 != v2:
return False
return True
@dataclass
class JSONKey:
name: str
optional: bool = False
@dataclass
class JSONValue:
value_def: JSONSchema
class BaseType(Enum):
STRING = "string"
NUMBER = "number"
def schema(self, is_list: bool = False) -> BaseTypeSchema:
return BaseTypeSchema(type=self, is_list=is_list)
class SpecialChar(Enum):
OPEN_OBJECT = "{"
CLOSE_OBJECT = "}"
COMMA = ","
COLON = ":"
OPTIONAL = "?"
UNDERSCORE = "_"
class ControlSequences(Enum):
STRING = "string"
NUMBER = "number"
ONE_OF = "oneof"
ARRAY = "[]"
class ObjectParseStatus(Enum):
OPENED = 0
AWAITING_KEY = 1
AWAITING_VALUE = 2
AWAITING_OBJECT = 3
IN_KEY_SUBPARSER = 4
IN_ARRAY_CTR_SEQ_SUBPARSER = 5
IN_VALUE_SUBPARSER = 6
FINISHED_KEY = 7
FINISHED_VALUE = 8
PARSE_COMPLETE = 9 | /sampling_constraints-0.0.18-py3-none-any.whl/scs/incremental_parse/json/schema.py | 0.76533 | 0.177704 | schema.py | pypi |
from enum import Enum
from typing import Dict, Union, Optional
from copy import deepcopy
from scs.incremental_parse import IncrementalParser
from .. import IncrementalParser, ParseFailure, SpecialToken
class JSONParser(IncrementalParser):
def __init__(
self,
allow_outer_list: bool = True,
allow_empty: bool = True,
allow_empty_children: bool = True,
allow_whitespace_formatting: bool = False,
):
super().__init__()
self._allow_outer_list = allow_outer_list
self._allow_empty = allow_empty
self._allow_empty_children = allow_empty_children
self._allow_whitespace_formatting = allow_whitespace_formatting
self._subparser = None
self._complete = False
def _copy_from(self, other: "JSONParser"):
super()._copy_from(other)
if other._subparser:
self._subparser = other._subparser.copy()
self._complete = other._complete
self._allow_outer_list = other._allow_outer_list
self._allow_empty = other._allow_empty
self._allow_empty_children = other._allow_empty_children
self._allow_whitespace_formatting = other._allow_whitespace_formatting
def _append(self, char: Union[str, SpecialToken]) -> bool:
if self._subparser is None:
if char == SpecialChar.OPEN_ARRAY.value:
if self._allow_outer_list:
self._subparser = ArrayParser(
allow_empty=self._allow_empty,
allow_empty_children=self._allow_empty_children,
allow_whitespace_formatting=self._allow_whitespace_formatting,
)
else:
raise ParseFailure("Only allow object in outer JSON")
elif char == SpecialChar.OPEN_OBJECT.value:
self._subparser = ObjectParser(
allow_empty=self._allow_empty,
allow_empty_children=self._allow_empty_children,
allow_whitespace_formatting=self._allow_whitespace_formatting,
)
else: # disallow empty space characters
raise ParseFailure(f"Expected '{{' or '[', got {char}")
return False
if self._complete:
if char == SpecialToken.EOS:
return True
raise ParseFailure("Expected end of sequence after close.")
if isinstance(char, SpecialToken):
raise ParseFailure(f"Expected character, got special token: {char}")
done = self._subparser._append(char)
if done:
sub_parsed = self._subparser._parsed
self._parsed += sub_parsed
self._complete = True
return True
return False
def get_parsed(self) -> str:
if self._subparser is None:
return ""
else:
return self._subparser.get_parsed()
class ObjectParser(IncrementalParser):
def __init__(
self,
allow_empty: bool = True,
allow_empty_children: bool = True,
allow_whitespace_formatting: bool = False,
):
super().__init__()
self._allow_empty = allow_empty
self._allow_empty_children = allow_empty_children
self._allow_whitespace_formatting = allow_whitespace_formatting
self._parsed = SpecialChar.OPEN_OBJECT.value
self.state: Dict[str, Union[int, float, str, ObjectParser]] = {}
self._parse_status: ObjectParseStatus = ObjectParseStatus.OPENED
self._active_subparser: Optional[IncrementalParser] = None
def _copy_from(self, other: "ObjectParser"):
super()._copy_from(other)
self.state = deepcopy(other.state)
self._parse_status = other._parse_status
self._active_subparser = (
other._active_subparser.copy() if other._active_subparser else None
)
self._allow_empty = other._allow_empty
self._allow_empty_children = other._allow_empty_children
self._allow_whitespace_formatting = other._allow_whitespace_formatting
def get_parsed(self) -> str:
parsed = self._parsed
if self._parse_status in [
ObjectParseStatus.IN_KEY_SUBPARSER,
ObjectParseStatus.IN_VALUE_SUBPARSER,
]:
parsed += self._active_subparser.get_parsed()
return parsed
r"""
Opens a subparser and sends characters to it to begin parsing.
Previous subparser should be closed before this is called."""
def _open_subparser(self, char: str):
if char == SpecialChar.OPEN_OBJECT.value: # begin parsing object
self._active_subparser = ObjectParser(
allow_empty=self._allow_empty_children,
allow_empty_children=self._allow_empty_children,
allow_whitespace_formatting=self._allow_whitespace_formatting,
)
elif char == SpecialChar.OPEN_ARRAY.value: # begin parsing array
self._active_subparser = ArrayParser(
allow_empty=self._allow_empty_children,
allow_empty_children=self._allow_empty_children,
)
elif char == SpecialChar.QUOTE.value: # begin parsing text
self._active_subparser = StringParser()
elif char.isnumeric(): # begin parsing number
self._active_subparser = NumberParser()
self._active_subparser._append(char)
else:
raise ParseFailure(f"Expected start of value, got {char}")
r"""
Closes a subparser and adds its final value to current parsed
content"""
def _close_subparser(self):
self._parsed += self._active_subparser._parsed
if (
isinstance(self._active_subparser, NumberParser)
and not self._active_subparser.closing_char.isspace()
):
# infer current parse state based on how number parser was terminated
self._parsed += self._active_subparser.closing_char
if self._active_subparser.closing_char == SpecialChar.COMMA.value:
self._parse_status = ObjectParseStatus.AWAITING_KEY
elif self._active_subparser.closing_char == SpecialChar.CLOSE_OBJECT.value:
self._parse_status = ObjectParseStatus.PARSE_COMPLETE
else:
raise ParseFailure(
f"Expected ',' or '}}', got {self._active_subparser.closing_char}"
)
elif self._parse_status == ObjectParseStatus.IN_VALUE_SUBPARSER:
self._parse_status = ObjectParseStatus.FINISHED_VALUE
else:
self._parse_status = ObjectParseStatus.FINISHED_KEY
self._active_subparser = None
def _append(self, char: str) -> bool:
if self._parse_status in [
ObjectParseStatus.IN_VALUE_SUBPARSER,
ObjectParseStatus.IN_KEY_SUBPARSER,
]:
done = self._active_subparser._append(char)
if done:
self._close_subparser()
return self._parse_status == ObjectParseStatus.PARSE_COMPLETE
return False
if char.isspace():
if not self._allow_whitespace_formatting:
raise ParseFailure(
"Got whitespace in JSON body. If expected set allow_whitespace_formatting accordingly."
)
return False
if self._parse_status == ObjectParseStatus.OPENED:
if char == SpecialChar.CLOSE_OBJECT.value:
if not self._allow_empty:
raise ParseFailure(
"Got empty object. If this is expected set allow_empty and allow_empty_children accordingly"
)
self._parsed += char
return True
if char == SpecialChar.QUOTE.value:
self._active_subparser = StringParser()
self._parse_status = ObjectParseStatus.IN_KEY_SUBPARSER
return False
raise ParseFailure(f"Expected '}}' or '\"', got {char}")
if self._parse_status == ObjectParseStatus.AWAITING_VALUE:
self._open_subparser(char)
self._parse_status = ObjectParseStatus.IN_VALUE_SUBPARSER
return False
if self._parse_status == ObjectParseStatus.AWAITING_KEY:
if char == SpecialChar.QUOTE.value: # begin parsing key
self._active_subparser = StringParser()
self._parse_status = ObjectParseStatus.IN_KEY_SUBPARSER
return False
raise ParseFailure(f"Expected '\"', got {char}")
if self._parse_status == ObjectParseStatus.FINISHED_VALUE:
if char == SpecialChar.COMMA.value:
self._parsed += char
self._parse_status = ObjectParseStatus.AWAITING_KEY
return False
if char == SpecialChar.CLOSE_OBJECT.value:
self._parsed += char
self._parse_status = ObjectParseStatus.PARSE_COMPLETE
return True
raise ParseFailure("Expected ',' or '}', got " + char)
if self._parse_status == ObjectParseStatus.FINISHED_KEY:
if char == SpecialChar.COLON.value:
self._parsed += char
self._parse_status = ObjectParseStatus.AWAITING_VALUE
return False
raise ParseFailure(f"Expected ':', got {char}")
raise Exception("Something went wrong")
class ArrayParser(ObjectParser):
def __init__(
self,
allow_empty: bool = True,
allow_empty_children: bool = True,
allow_whitespace_formatting: bool = False,
):
super().__init__(
allow_empty=allow_empty,
allow_empty_children=allow_empty_children,
allow_whitespace_formatting=allow_whitespace_formatting,
)
self._parsed = SpecialChar.OPEN_ARRAY.value
def _close_subparser(self):
self._parsed += self._active_subparser._parsed
if (
isinstance(self._active_subparser, NumberParser)
and not self._active_subparser.closing_char.isspace()
):
# infer current parse state based on how number parsing was terminated
self._parsed += self._active_subparser.closing_char
if self._active_subparser.closing_char == SpecialChar.COMMA.value:
self._parse_status = ObjectParseStatus.AWAITING_VALUE
elif self._active_subparser.closing_char == SpecialChar.CLOSE_ARRAY.value:
self._parse_status = ObjectParseStatus.PARSE_COMPLETE
else:
raise ParseFailure(
f"Expected ',' or ']', got {self._active_subparser.closing_char}"
)
else:
self._parse_status = ObjectParseStatus.FINISHED_VALUE
self._active_subparser = None
def _append(self, char: str) -> bool:
if self._parse_status == ObjectParseStatus.IN_VALUE_SUBPARSER:
done = self._active_subparser._append(char)
if done:
self._close_subparser()
return self._parse_status == ObjectParseStatus.PARSE_COMPLETE
return False
if char.isspace():
if not self._allow_whitespace_formatting:
raise ParseFailure(
"Got whitespace in JSON body. If expected set allow_whitespace_formatting accordingly."
)
return False
if self._parse_status == ObjectParseStatus.OPENED:
if char == SpecialChar.CLOSE_ARRAY.value:
if not self._allow_empty:
raise ParseFailure(
"Got empty object. If this is expected set allow_empty and allow_empty_children accordingly"
)
self._parsed += char
return True
self._open_subparser(char)
self._parse_status = ObjectParseStatus.IN_VALUE_SUBPARSER
return False
if self._parse_status == ObjectParseStatus.AWAITING_VALUE:
self._open_subparser(char)
self._parse_status = ObjectParseStatus.IN_VALUE_SUBPARSER
return False
if self._parse_status == ObjectParseStatus.FINISHED_VALUE:
if char == SpecialChar.COMMA.value:
self._parsed += char
self._parse_status = ObjectParseStatus.AWAITING_VALUE
return False
if char == SpecialChar.CLOSE_ARRAY.value:
self._parsed += char
self._parse_status = ObjectParseStatus.PARSE_COMPLETE
return True
raise ParseFailure(f"Expected ',' or ']', got {char}")
raise Exception("Something went wrong")
class NumberParser(IncrementalParser):
_END_CHARS = [",", "]", "}"]
def __init__(self) -> None:
super().__init__()
self._has_period = False
self._leading_zero = None
self._is_valid = True
self.closing_char = None # appended to outer parser after closing
def _copy_from(self, other: "NumberParser") -> "NumberParser":
super()._copy_from(other)
self._has_period = other._has_period
self._leading_zero = other._leading_zero
self._is_valid = other._is_valid
def _append(self, char: str) -> bool:
if self._leading_zero:
if char != SpecialChar.PERIOD.value:
raise ParseFailure("Leading 0 in integer value")
self._leading_zero = False
if char.isnumeric():
if len(self._parsed) == 0 and char == SpecialChar.ZERO.value:
self._leading_zero = True
self._parsed += char
self._is_valid = True
elif char == SpecialChar.PERIOD.value:
if (not self._has_period) and len(
self._parsed
) > 0: # cannot begin with '.'
self._parsed += char
self._has_period = True
self._is_valid = False # cannot end with '.'
else:
raise ParseFailure("Invalid position for '.' in number")
elif char in NumberParser._END_CHARS or char.isspace():
if self._is_valid:
self.closing_char = char
return True
raise ParseFailure(
f"End character '{char}' after invalid number {self._parsed}"
)
else:
raise ParseFailure(f"Invalid character for number: {char}")
return False
class StringParser(IncrementalParser):
def __init__(self) -> None:
super().__init__()
self._parsed = '"'
self._escape_next = False
def _copy_from(self, other: "StringParser"):
super()._copy_from(other)
self._escape_next = other._escape_next
def _append(self, char: str) -> bool:
if self._escape_next:
self._parsed += char
self._escape_next = False
elif char == SpecialChar.QUOTE.value:
self._parsed += char
return True
elif char == SpecialChar.ESCAPE.value:
self._escape_next = True
else:
self._parsed += char
return False
class SpecialChar(Enum):
ESCAPE = "\\"
PERIOD = "."
OPEN_OBJECT = "{"
CLOSE_OBJECT = "}"
OPEN_ARRAY = "["
CLOSE_ARRAY = "]"
QUOTE = '"'
COMMA = ","
COLON = ":"
ZERO = "0"
class ObjectParseStatus(Enum):
OPENED = 0
AWAITING_KEY = 1
AWAITING_VALUE = 2
IN_KEY_SUBPARSER = 3
IN_VALUE_SUBPARSER = 4
FINISHED_KEY = 5
FINISHED_VALUE = 6
PARSE_COMPLETE = 7 | /sampling_constraints-0.0.18-py3-none-any.whl/scs/incremental_parse/json/__init__.py | 0.838117 | 0.165222 | __init__.py | pypi |
## samplingsimulatorpy
 [](https://codecov.io/gh/UBC-MDS/samplingsimulatorpy) 
[](https://samplingsimulatorpy.readthedocs.io/en/latest/?badge=latest)
`samplingsimulatorpy` is a Python package intended to assist those teaching or learning basic statistical inference.
### Authors
| Name | GitHub |
| ---------------- | ----------------------------------------------- |
| Holly Williams | [hwilliams10](https://github.com/hwilliams10) |
| Lise Braaten | [lisebraaten](https://github.com/lisebraaten) |
| Tao Guo | [tguo9](https://github.com/tguo9) |
| Yue (Alex) Jiang | [YueJiangMDSV](https://github.com/YueJiangMDSV) |
### Overview
This package allows users to generate virtual populations which can be sampled from in order to compare and contrast sample vs sampling distributions for different sample sizes. The package also allows users to sample from the generated virtual population (or any other population), plot the distributions, and view summaries for the parameters of interest.
## Installation:
```
pip install -i https://test.pypi.org/simple/ samplingsimulatorpy
```
## Function Descriptions
- `generate_virtual_pop` creates a virtual population.
- **Inputs** : distribution function (i.e. `np.random.lognormal`, `np.random.binomial`, etc), the parameters required by the distribution function, and the size of the population.
- **Outputs**: the virtual population as a tibble
- `draw_samples` generates samples of different sizes
- **Inputs** : population to sample from, the sample size, and the number of samples
- **Outputs**: returns a tibble with the sample number in one column and value in a second column.
- `plot_sample_hist` creates sample distributions for different sample sizes.
- **Inputs** : population to sample from, the samples to plot, and a vector of the sample sizes
- **Outputs**: returns a grid of sample distribution plots
- `plot_sampling_dist` creates sampling distributions for different sample sizes.
- **Inputs** : population to sample from, the samples to plot, and a vector of the sample sizes
- **Outputs**: returns a grid of sampling distribution plots
- `stat_summary`: returns a summary of the statistical parameters of interest
- **Inputs**: population, samples, parameter(s) of interest
- **Outputs**: summary tibble
#### How do these fit into the Python ecosystem?
To the best of our knowledge, there is currently no existing Python package with the specific functionality to create virtual populations and make the specific sample and sampling distributions described above. We do make use of many existing Python packages and expand on them to make very specific functions. These include:
- `scipy.stats` to get distribution functions
- `np.random` to generate random samples
- [Altair](https://altair-viz.github.io/) to create plots
Python `pandas` already includes some summary statistics functions such as `.describe()`, however our package will be more customizable. Our summary will only include the statistical parameters of interest and will provide a comparison between the sample, sampling, and true population parameters.
### Dependencies
- python = "^3.7"
- pandas = "^1.0.1"
- numpy = "^1.18.1"
- altair = "^4.0.1"
## Usage
#### `generate_virtual_pop`
```
from samplingsimulatorpy import generate_virtual_pop
generate_virtual_pop(size, distribution_func, *para)
```
**Arguments:**
- `size`: The number of samples
- `distribution_func`: The distribution that we are generating samples from
- `*para`: The arguments required for the distribution function
**Example:**
`pop = generate_virtual_pop(100, np.random.normal, 0, 1)`
#### `draw_samples`
```
from samplingsimulatorpy import draw_samples
draw_samples(pop, reps, n_s)
```
**Arguments:**
- `pop` the virtual population as a data frame
- `reps` the number of replication for each sample size as an integer
value
- `n_s` the sample size for each one of the samples as a list
**Example:**
`samples = draw_samples(pop, 3, [5, 10, 15, 20])`
#### `plot_sample_hist`
```
from samplingsimulatorpy import plot_sample_hist
plot_sample_hist(pop, samples)
```
**Arguments:**
- `pop` the virtual population as a data frame
- `samples` the samples as a data frame
**Example:**
`plot_sample_hist(samples)`
#### `plot_sampling_hist`
```
from samplingsimulatorpy import plot_sampling_hist
plot_sampling_hist(pop, samples)
```
**Arguments:**
- `samples` the samples as a data frame
**Example:**
`plot_sampling_hist(samples)`
#### `stat_summary`
```
from samplingsimulatorpy import stat_summary
plot_sampling_hist(pop, samples, parameter)
```
**Arguments**
- `population` The virtual population
- `samples` The drawed samples
- `parameter` The parameter(s) of interest
**Example**
`stat_summary(pop, samples, ['np.mean', 'np.std'])`
### Example Usage Scenario
```python
from samplingsimulatorpy import generate_virtual_pop,
draw_samples,
plot_sample_dist,
plot_sampling_dist,
stat_summary
# create virtual population
pop = generate_virtual_pop(100, np.random.normal, 0, 1)
# take samples
samples = draw_samples(pop, 3, [10, 20])
# plot sample histogram
plot_sample_hist(pop, samples)
```

```python
# plot sampling distribution
plot_sampling_hist(samples)
```

```python
# compare mean and standard deviation
stat_summary(pop, samples, ['np.mean', 'np.std'])
```

### Documentation
The official documentation is hosted on Read the Docs: <https://samplingsimulatorpy.readthedocs.io/en/latest/>
### Credits
This package was created with Cookiecutter and the UBC-MDS/cookiecutter-ubc-mds project template, modified from the [pyOpenSci/cookiecutter-pyopensci](https://github.com/pyOpenSci/cookiecutter-pyopensci) project template and the [audreyr/cookiecutter-pypackage](https://github.com/audreyr/cookiecutter-pypackage).
| /samplingsimulatorpy-0.1.0.tar.gz/samplingsimulatorpy-0.1.0/README.md | 0.923683 | 0.989265 | README.md | pypi |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.