Search is not available for this dataset
repo stringlengths 2 152 ⌀ | file stringlengths 15 239 | code stringlengths 0 58.4M | file_length int64 0 58.4M | avg_line_length float64 0 1.81M | max_line_length int64 0 12.7M | extension_type stringclasses 364 values |
|---|---|---|---|---|---|---|
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/label_box_v4_data_row.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field, StrictStr
class LabelBoxV4DataRow(BaseModel):
"""
LabelBoxV4DataRow
"""
row_data: StrictStr = Field(..., description="A URL which allows anyone in possession of said URL for the time specified by the expiresIn query param to access the resource")
global_key: Optional[StrictStr] = Field(None, description="The task_id for importing into LabelBox.")
media_type: Optional[StrictStr] = Field(None, description="LabelBox media type, e.g. IMAGE")
__properties = ["row_data", "global_key", "media_type"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> LabelBoxV4DataRow:
"""Create an instance of LabelBoxV4DataRow from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> LabelBoxV4DataRow:
"""Create an instance of LabelBoxV4DataRow from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return LabelBoxV4DataRow.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in LabelBoxV4DataRow) in the input: " + str(obj))
_obj = LabelBoxV4DataRow.parse_obj({
"row_data": obj.get("row_data"),
"global_key": obj.get("global_key"),
"media_type": obj.get("media_type")
})
return _obj
| 2,938 | 34.409639 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/label_studio_task.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field, StrictInt
from lightly.openapi_generated.swagger_client.models.label_studio_task_data import LabelStudioTaskData
class LabelStudioTask(BaseModel):
"""
LabelStudioTask
"""
id: StrictInt = Field(..., description="The task_id for importing into LabelStudio.")
data: LabelStudioTaskData = Field(...)
__properties = ["id", "data"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> LabelStudioTask:
"""Create an instance of LabelStudioTask from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of data
if self.data:
_dict['data' if by_alias else 'data'] = self.data.to_dict(by_alias=by_alias)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> LabelStudioTask:
"""Create an instance of LabelStudioTask from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return LabelStudioTask.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in LabelStudioTask) in the input: " + str(obj))
_obj = LabelStudioTask.parse_obj({
"id": obj.get("id"),
"data": LabelStudioTaskData.from_dict(obj.get("data")) if obj.get("data") is not None else None
})
return _obj
| 2,914 | 33.294118 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/label_studio_task_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field, StrictStr
from lightly.openapi_generated.swagger_client.models.sample_data import SampleData
class LabelStudioTaskData(BaseModel):
"""
LabelStudioTaskData
"""
image: StrictStr = Field(..., description="A URL which allows anyone in possession of said URL for the time specified by the expiresIn query param to access the resource")
lightly_file_name: Optional[StrictStr] = Field(None, alias="lightlyFileName", description="The original fileName of the sample. This is unique within a dataset")
lightly_meta_info: Optional[SampleData] = Field(None, alias="lightlyMetaInfo")
__properties = ["image", "lightlyFileName", "lightlyMetaInfo"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> LabelStudioTaskData:
"""Create an instance of LabelStudioTaskData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of lightly_meta_info
if self.lightly_meta_info:
_dict['lightlyMetaInfo' if by_alias else 'lightly_meta_info'] = self.lightly_meta_info.to_dict(by_alias=by_alias)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> LabelStudioTaskData:
"""Create an instance of LabelStudioTaskData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return LabelStudioTaskData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in LabelStudioTaskData) in the input: " + str(obj))
_obj = LabelStudioTaskData.parse_obj({
"image": obj.get("image"),
"lightly_file_name": obj.get("lightlyFileName"),
"lightly_meta_info": SampleData.from_dict(obj.get("lightlyMetaInfo")) if obj.get("lightlyMetaInfo") is not None else None
})
return _obj
| 3,438 | 38.528736 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/lightly_docker_selection_method.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class LightlyDockerSelectionMethod(str, Enum):
"""
LightlyDockerSelectionMethod
"""
"""
allowed enum values
"""
CORESET = 'coreset'
RANDOM = 'random'
@classmethod
def from_json(cls, json_str: str) -> 'LightlyDockerSelectionMethod':
"""Create an instance of LightlyDockerSelectionMethod from a JSON string"""
return LightlyDockerSelectionMethod(json.loads(json_str))
| 994 | 22.139535 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/lightly_model_v2.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class LightlyModelV2(str, Enum):
"""
LightlyModelV2
"""
"""
allowed enum values
"""
RESNET_MINUS_18 = 'resnet-18'
RESNET_MINUS_34 = 'resnet-34'
RESNET_MINUS_50 = 'resnet-50'
RESNET_MINUS_101 = 'resnet-101'
RESNET_MINUS_152 = 'resnet-152'
@classmethod
def from_json(cls, json_str: str) -> 'LightlyModelV2':
"""Create an instance of LightlyModelV2 from a JSON string"""
return LightlyModelV2(json.loads(json_str))
| 1,052 | 21.891304 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/lightly_model_v3.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class LightlyModelV3(str, Enum):
"""
LightlyModelV3
"""
"""
allowed enum values
"""
RESNET_MINUS_18 = 'resnet-18'
RESNET_MINUS_34 = 'resnet-34'
RESNET_MINUS_50 = 'resnet-50'
RESNET_MINUS_101 = 'resnet-101'
RESNET_MINUS_152 = 'resnet-152'
@classmethod
def from_json(cls, json_str: str) -> 'LightlyModelV3':
"""Create an instance of LightlyModelV3 from a JSON string"""
return LightlyModelV3(json.loads(json_str))
| 1,052 | 21.891304 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/lightly_trainer_precision_v2.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class LightlyTrainerPrecisionV2(int, Enum):
"""
LightlyTrainerPrecisionV2
"""
"""
allowed enum values
"""
NUMBER_16 = 16
NUMBER_32 = 32
@classmethod
def from_json(cls, json_str: str) -> 'LightlyTrainerPrecisionV2':
"""Create an instance of LightlyTrainerPrecisionV2 from a JSON string"""
return LightlyTrainerPrecisionV2(json.loads(json_str))
| 971 | 21.604651 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/lightly_trainer_precision_v3.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class LightlyTrainerPrecisionV3(int, Enum):
"""
LightlyTrainerPrecisionV3
"""
"""
allowed enum values
"""
NUMBER_16 = 16
NUMBER_32 = 32
@classmethod
def from_json(cls, json_str: str) -> 'LightlyTrainerPrecisionV3':
"""Create an instance of LightlyTrainerPrecisionV3 from a JSON string"""
return LightlyTrainerPrecisionV3(json.loads(json_str))
| 971 | 21.604651 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_singleton.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
from inspect import getfullargspec
import json
import pprint
import re # noqa: F401
from typing import Any, List, Optional
from pydantic import BaseModel, Field, StrictStr, ValidationError, validator
from lightly.openapi_generated.swagger_client.models.prediction_singleton_classification import PredictionSingletonClassification
from lightly.openapi_generated.swagger_client.models.prediction_singleton_instance_segmentation import PredictionSingletonInstanceSegmentation
from lightly.openapi_generated.swagger_client.models.prediction_singleton_keypoint_detection import PredictionSingletonKeypointDetection
from lightly.openapi_generated.swagger_client.models.prediction_singleton_object_detection import PredictionSingletonObjectDetection
from lightly.openapi_generated.swagger_client.models.prediction_singleton_semantic_segmentation import PredictionSingletonSemanticSegmentation
from typing import Any, List
from pydantic import StrictStr, Field, Extra
PREDICTIONSINGLETON_ONE_OF_SCHEMAS = ["PredictionSingletonClassification", "PredictionSingletonInstanceSegmentation", "PredictionSingletonKeypointDetection", "PredictionSingletonObjectDetection", "PredictionSingletonSemanticSegmentation"]
class PredictionSingleton(BaseModel):
"""
PredictionSingleton
"""
# data type: PredictionSingletonClassification
oneof_schema_1_validator: Optional[PredictionSingletonClassification] = None
# data type: PredictionSingletonObjectDetection
oneof_schema_2_validator: Optional[PredictionSingletonObjectDetection] = None
# data type: PredictionSingletonSemanticSegmentation
oneof_schema_3_validator: Optional[PredictionSingletonSemanticSegmentation] = None
# data type: PredictionSingletonInstanceSegmentation
oneof_schema_4_validator: Optional[PredictionSingletonInstanceSegmentation] = None
# data type: PredictionSingletonKeypointDetection
oneof_schema_5_validator: Optional[PredictionSingletonKeypointDetection] = None
actual_instance: Any
one_of_schemas: List[str] = Field(PREDICTIONSINGLETON_ONE_OF_SCHEMAS, const=True)
class Config:
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
discriminator_value_class_map = {
}
def __init__(self, *args, **kwargs):
if args:
if len(args) > 1:
raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`")
if kwargs:
raise ValueError("If a position argument is used, keyword arguments cannot be used.")
super().__init__(actual_instance=args[0])
else:
super().__init__(**kwargs)
@validator('actual_instance')
def actual_instance_must_validate_oneof(cls, v):
instance = PredictionSingleton.construct()
error_messages = []
match = 0
# validate data type: PredictionSingletonClassification
if not isinstance(v, PredictionSingletonClassification):
error_messages.append(f"Error! Input type `{type(v)}` is not `PredictionSingletonClassification`")
else:
match += 1
# validate data type: PredictionSingletonObjectDetection
if not isinstance(v, PredictionSingletonObjectDetection):
error_messages.append(f"Error! Input type `{type(v)}` is not `PredictionSingletonObjectDetection`")
else:
match += 1
# validate data type: PredictionSingletonSemanticSegmentation
if not isinstance(v, PredictionSingletonSemanticSegmentation):
error_messages.append(f"Error! Input type `{type(v)}` is not `PredictionSingletonSemanticSegmentation`")
else:
match += 1
# validate data type: PredictionSingletonInstanceSegmentation
if not isinstance(v, PredictionSingletonInstanceSegmentation):
error_messages.append(f"Error! Input type `{type(v)}` is not `PredictionSingletonInstanceSegmentation`")
else:
match += 1
# validate data type: PredictionSingletonKeypointDetection
if not isinstance(v, PredictionSingletonKeypointDetection):
error_messages.append(f"Error! Input type `{type(v)}` is not `PredictionSingletonKeypointDetection`")
else:
match += 1
if match > 1:
# more than 1 match
raise ValueError("Multiple matches found when setting `actual_instance` in PredictionSingleton with oneOf schemas: PredictionSingletonClassification, PredictionSingletonInstanceSegmentation, PredictionSingletonKeypointDetection, PredictionSingletonObjectDetection, PredictionSingletonSemanticSegmentation. Details: " + ", ".join(error_messages))
elif match == 0:
# no match
raise ValueError("No match found when setting `actual_instance` in PredictionSingleton with oneOf schemas: PredictionSingletonClassification, PredictionSingletonInstanceSegmentation, PredictionSingletonKeypointDetection, PredictionSingletonObjectDetection, PredictionSingletonSemanticSegmentation. Details: " + ", ".join(error_messages))
else:
return v
@classmethod
def from_dict(cls, obj: dict) -> PredictionSingleton:
return cls.from_json(json.dumps(obj))
@classmethod
def from_json(cls, json_str: str) -> PredictionSingleton:
"""Returns the object represented by the json string"""
instance = PredictionSingleton.construct()
error_messages = []
match = 0
# use oneOf discriminator to lookup the data type
_data_type = json.loads(json_str).get("type")
if not _data_type:
raise ValueError("Failed to lookup data type from the field `type` in the input.")
# check if data type is `PredictionSingletonClassification`
if _data_type == "CLASSIFICATION":
instance.actual_instance = PredictionSingletonClassification.from_json(json_str)
return instance
# check if data type is `PredictionSingletonInstanceSegmentation`
if _data_type == "INSTANCE_SEGMENTATION":
instance.actual_instance = PredictionSingletonInstanceSegmentation.from_json(json_str)
return instance
# check if data type is `PredictionSingletonKeypointDetection`
if _data_type == "KEYPOINT_DETECTION":
instance.actual_instance = PredictionSingletonKeypointDetection.from_json(json_str)
return instance
# check if data type is `PredictionSingletonObjectDetection`
if _data_type == "OBJECT_DETECTION":
instance.actual_instance = PredictionSingletonObjectDetection.from_json(json_str)
return instance
# check if data type is `PredictionSingletonClassification`
if _data_type == "PredictionSingletonClassification":
instance.actual_instance = PredictionSingletonClassification.from_json(json_str)
return instance
# check if data type is `PredictionSingletonInstanceSegmentation`
if _data_type == "PredictionSingletonInstanceSegmentation":
instance.actual_instance = PredictionSingletonInstanceSegmentation.from_json(json_str)
return instance
# check if data type is `PredictionSingletonKeypointDetection`
if _data_type == "PredictionSingletonKeypointDetection":
instance.actual_instance = PredictionSingletonKeypointDetection.from_json(json_str)
return instance
# check if data type is `PredictionSingletonObjectDetection`
if _data_type == "PredictionSingletonObjectDetection":
instance.actual_instance = PredictionSingletonObjectDetection.from_json(json_str)
return instance
# check if data type is `PredictionSingletonSemanticSegmentation`
if _data_type == "PredictionSingletonSemanticSegmentation":
instance.actual_instance = PredictionSingletonSemanticSegmentation.from_json(json_str)
return instance
# check if data type is `PredictionSingletonSemanticSegmentation`
if _data_type == "SEMANTIC_SEGMENTATION":
instance.actual_instance = PredictionSingletonSemanticSegmentation.from_json(json_str)
return instance
# deserialize data into PredictionSingletonClassification
try:
instance.actual_instance = PredictionSingletonClassification.from_json(json_str)
match += 1
except (ValidationError, ValueError) as e:
error_messages.append(str(e))
# deserialize data into PredictionSingletonObjectDetection
try:
instance.actual_instance = PredictionSingletonObjectDetection.from_json(json_str)
match += 1
except (ValidationError, ValueError) as e:
error_messages.append(str(e))
# deserialize data into PredictionSingletonSemanticSegmentation
try:
instance.actual_instance = PredictionSingletonSemanticSegmentation.from_json(json_str)
match += 1
except (ValidationError, ValueError) as e:
error_messages.append(str(e))
# deserialize data into PredictionSingletonInstanceSegmentation
try:
instance.actual_instance = PredictionSingletonInstanceSegmentation.from_json(json_str)
match += 1
except (ValidationError, ValueError) as e:
error_messages.append(str(e))
# deserialize data into PredictionSingletonKeypointDetection
try:
instance.actual_instance = PredictionSingletonKeypointDetection.from_json(json_str)
match += 1
except (ValidationError, ValueError) as e:
error_messages.append(str(e))
if match > 1:
# more than 1 match
raise ValueError("Multiple matches found when deserializing the JSON string into PredictionSingleton with oneOf schemas: PredictionSingletonClassification, PredictionSingletonInstanceSegmentation, PredictionSingletonKeypointDetection, PredictionSingletonObjectDetection, PredictionSingletonSemanticSegmentation. Details: " + ", ".join(error_messages))
elif match == 0:
# no match
raise ValueError("No match found when deserializing the JSON string into PredictionSingleton with oneOf schemas: PredictionSingletonClassification, PredictionSingletonInstanceSegmentation, PredictionSingletonKeypointDetection, PredictionSingletonObjectDetection, PredictionSingletonSemanticSegmentation. Details: " + ", ".join(error_messages))
else:
return instance
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the actual instance"""
if self.actual_instance is None:
return "null"
to_json = getattr(self.actual_instance, "to_json", None)
if callable(to_json):
return self.actual_instance.to_json(by_alias=by_alias)
else:
return json.dumps(self.actual_instance)
def to_dict(self, by_alias: bool = False) -> dict:
"""Returns the dict representation of the actual instance"""
if self.actual_instance is None:
return None
to_dict = getattr(self.actual_instance, "to_dict", None)
if callable(to_dict):
return self.actual_instance.to_dict(by_alias=by_alias)
else:
# primitive type
return self.actual_instance
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the actual instance"""
return pprint.pformat(self.dict(by_alias=by_alias))
| 12,126 | 49.319502 | 363 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_singleton_base.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
import lightly.openapi_generated.swagger_client.models
from typing import Optional, Union
from pydantic import Extra, BaseModel, Field, StrictStr, confloat, conint, constr, validator
class PredictionSingletonBase(BaseModel):
"""
PredictionSingletonBase
"""
type: StrictStr = Field(...)
task_name: constr(strict=True, min_length=1) = Field(..., alias="taskName", description="A name which is safe to have as a file/folder name in a file system")
crop_dataset_id: Optional[constr(strict=True)] = Field(None, alias="cropDatasetId", description="MongoDB ObjectId")
crop_sample_id: Optional[constr(strict=True)] = Field(None, alias="cropSampleId", description="MongoDB ObjectId")
category_id: conint(strict=True, ge=0) = Field(..., alias="categoryId", description="The id of the category. Needs to be a positive integer but can be any integer (gaps are allowed, does not need to be sequential)")
score: Union[confloat(le=1, ge=0, strict=True), conint(le=1, ge=0, strict=True)] = Field(..., description="the score for the prediction task which yielded this crop")
__properties = ["type", "taskName", "cropDatasetId", "cropSampleId", "categoryId", "score"]
@validator('task_name')
def task_name_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9 ._-]+$", value):
raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9][a-zA-Z0-9 ._-]+$/")
return value
@validator('crop_dataset_id')
def crop_dataset_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('crop_sample_id')
def crop_sample_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
# JSON field name that stores the object type
__discriminator_property_name = 'type'
# discriminator mappings
__discriminator_value_class_map = {
'PredictionSingletonClassification': 'PredictionSingletonClassification',
'PredictionSingletonInstanceSegmentation': 'PredictionSingletonInstanceSegmentation',
'PredictionSingletonKeypointDetection': 'PredictionSingletonKeypointDetection',
'PredictionSingletonObjectDetection': 'PredictionSingletonObjectDetection',
'PredictionSingletonSemanticSegmentation': 'PredictionSingletonSemanticSegmentation'
}
@classmethod
def get_discriminator_value(cls, obj: dict) -> str:
"""Returns the discriminator value (object type) of the data"""
discriminator_value = obj[cls.__discriminator_property_name]
if discriminator_value:
return cls.__discriminator_value_class_map.get(discriminator_value)
else:
return None
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> Union(PredictionSingletonClassification, PredictionSingletonInstanceSegmentation, PredictionSingletonKeypointDetection, PredictionSingletonObjectDetection, PredictionSingletonSemanticSegmentation):
"""Create an instance of PredictionSingletonBase from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> Union(PredictionSingletonClassification, PredictionSingletonInstanceSegmentation, PredictionSingletonKeypointDetection, PredictionSingletonObjectDetection, PredictionSingletonSemanticSegmentation):
"""Create an instance of PredictionSingletonBase from a dict"""
# look up the object type based on discriminator mapping
object_type = cls.get_discriminator_value(obj)
if object_type:
klass = getattr(lightly.openapi_generated.swagger_client.models, object_type)
return klass.from_dict(obj)
else:
raise ValueError("PredictionSingletonBase failed to lookup discriminator value from " +
json.dumps(obj) + ". Discriminator property name: " + cls.__discriminator_property_name +
", mapping: " + json.dumps(cls.__discriminator_value_class_map))
| 5,891 | 45.393701 | 238 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_singleton_classification.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional, Union
from pydantic import Extra, BaseModel, Field, confloat, conint, conlist
from lightly.openapi_generated.swagger_client.models.prediction_singleton_base import PredictionSingletonBase
class PredictionSingletonClassification(PredictionSingletonBase):
"""
PredictionSingletonClassification
"""
probabilities: Optional[conlist(Union[confloat(le=1, ge=0, strict=True), conint(le=1, ge=0, strict=True)])] = Field(None, description="The probabilities of it being a certain category other than the one which was selected. The sum of all probabilities should equal 1.")
__properties = ["type", "taskName", "cropDatasetId", "cropSampleId", "categoryId", "score", "probabilities"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionSingletonClassification:
"""Create an instance of PredictionSingletonClassification from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionSingletonClassification:
"""Create an instance of PredictionSingletonClassification from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionSingletonClassification.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionSingletonClassification) in the input: " + str(obj))
_obj = PredictionSingletonClassification.parse_obj({
"type": obj.get("type"),
"task_name": obj.get("taskName"),
"crop_dataset_id": obj.get("cropDatasetId"),
"crop_sample_id": obj.get("cropSampleId"),
"category_id": obj.get("categoryId"),
"score": obj.get("score"),
"probabilities": obj.get("probabilities")
})
return _obj
| 3,376 | 38.267442 | 273 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_singleton_classification_all_of.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional, Union
from pydantic import Extra, BaseModel, Field, confloat, conint, conlist
class PredictionSingletonClassificationAllOf(BaseModel):
"""
PredictionSingletonClassificationAllOf
"""
probabilities: Optional[conlist(Union[confloat(le=1, ge=0, strict=True), conint(le=1, ge=0, strict=True)])] = Field(None, description="The probabilities of it being a certain category other than the one which was selected. The sum of all probabilities should equal 1.")
__properties = ["probabilities"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionSingletonClassificationAllOf:
"""Create an instance of PredictionSingletonClassificationAllOf from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionSingletonClassificationAllOf:
"""Create an instance of PredictionSingletonClassificationAllOf from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionSingletonClassificationAllOf.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionSingletonClassificationAllOf) in the input: " + str(obj))
_obj = PredictionSingletonClassificationAllOf.parse_obj({
"probabilities": obj.get("probabilities")
})
return _obj
| 2,937 | 36.189873 | 273 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_singleton_instance_segmentation.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional, Union
from pydantic import Extra, BaseModel, Field, confloat, conint, conlist
from lightly.openapi_generated.swagger_client.models.prediction_singleton_base import PredictionSingletonBase
class PredictionSingletonInstanceSegmentation(PredictionSingletonBase):
"""
PredictionSingletonInstanceSegmentation
"""
segmentation: conlist(conint(strict=True, ge=0)) = Field(..., description="Run Length Encoding (RLE) as outlined by https://docs.lightly.ai/docs/prediction-format#semantic-segmentation ")
bbox: conlist(Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)], max_items=4, min_items=4) = Field(..., description="The bbox of where a prediction task yielded a finding. [x, y, width, height]")
probabilities: Optional[conlist(Union[confloat(le=1, ge=0, strict=True), conint(le=1, ge=0, strict=True)])] = Field(None, description="The probabilities of it being a certain category other than the one which was selected. The sum of all probabilities should equal 1.")
__properties = ["type", "taskName", "cropDatasetId", "cropSampleId", "categoryId", "score", "segmentation", "bbox", "probabilities"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionSingletonInstanceSegmentation:
"""Create an instance of PredictionSingletonInstanceSegmentation from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionSingletonInstanceSegmentation:
"""Create an instance of PredictionSingletonInstanceSegmentation from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionSingletonInstanceSegmentation.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionSingletonInstanceSegmentation) in the input: " + str(obj))
_obj = PredictionSingletonInstanceSegmentation.parse_obj({
"type": obj.get("type"),
"task_name": obj.get("taskName"),
"crop_dataset_id": obj.get("cropDatasetId"),
"crop_sample_id": obj.get("cropSampleId"),
"category_id": obj.get("categoryId"),
"score": obj.get("score"),
"segmentation": obj.get("segmentation"),
"bbox": obj.get("bbox"),
"probabilities": obj.get("probabilities")
})
return _obj
| 3,948 | 42.877778 | 273 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_singleton_instance_segmentation_all_of.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional, Union
from pydantic import Extra, BaseModel, Field, confloat, conint, conlist
class PredictionSingletonInstanceSegmentationAllOf(BaseModel):
"""
PredictionSingletonInstanceSegmentationAllOf
"""
segmentation: conlist(conint(strict=True, ge=0)) = Field(..., description="Run Length Encoding (RLE) as outlined by https://docs.lightly.ai/docs/prediction-format#semantic-segmentation ")
bbox: conlist(Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)], max_items=4, min_items=4) = Field(..., description="The bbox of where a prediction task yielded a finding. [x, y, width, height]")
probabilities: Optional[conlist(Union[confloat(le=1, ge=0, strict=True), conint(le=1, ge=0, strict=True)])] = Field(None, description="The probabilities of it being a certain category other than the one which was selected. The sum of all probabilities should equal 1.")
__properties = ["segmentation", "bbox", "probabilities"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionSingletonInstanceSegmentationAllOf:
"""Create an instance of PredictionSingletonInstanceSegmentationAllOf from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionSingletonInstanceSegmentationAllOf:
"""Create an instance of PredictionSingletonInstanceSegmentationAllOf from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionSingletonInstanceSegmentationAllOf.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionSingletonInstanceSegmentationAllOf) in the input: " + str(obj))
_obj = PredictionSingletonInstanceSegmentationAllOf.parse_obj({
"segmentation": obj.get("segmentation"),
"bbox": obj.get("bbox"),
"probabilities": obj.get("probabilities")
})
return _obj
| 3,509 | 41.289157 | 273 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_singleton_keypoint_detection.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional, Union
from pydantic import Extra, BaseModel, Field, confloat, conint, conlist
from lightly.openapi_generated.swagger_client.models.prediction_singleton_base import PredictionSingletonBase
class PredictionSingletonKeypointDetection(PredictionSingletonBase):
"""
PredictionSingletonKeypointDetection
"""
keypoints: conlist(Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)], min_items=3) = Field(..., description="[x1, y2, s1, ..., xk, yk, sk] as outlined by https://docs.lightly.ai/docs/prediction-format#keypoint-detection ")
probabilities: Optional[conlist(Union[confloat(le=1, ge=0, strict=True), conint(le=1, ge=0, strict=True)])] = Field(None, description="The probabilities of it being a certain category other than the one which was selected. The sum of all probabilities should equal 1.")
__properties = ["type", "taskName", "cropDatasetId", "cropSampleId", "categoryId", "score", "keypoints", "probabilities"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionSingletonKeypointDetection:
"""Create an instance of PredictionSingletonKeypointDetection from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionSingletonKeypointDetection:
"""Create an instance of PredictionSingletonKeypointDetection from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionSingletonKeypointDetection.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionSingletonKeypointDetection) in the input: " + str(obj))
_obj = PredictionSingletonKeypointDetection.parse_obj({
"type": obj.get("type"),
"task_name": obj.get("taskName"),
"crop_dataset_id": obj.get("cropDatasetId"),
"crop_sample_id": obj.get("cropSampleId"),
"category_id": obj.get("categoryId"),
"score": obj.get("score"),
"keypoints": obj.get("keypoints"),
"probabilities": obj.get("probabilities")
})
return _obj
| 3,702 | 41.079545 | 273 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_singleton_keypoint_detection_all_of.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional, Union
from pydantic import Extra, BaseModel, Field, confloat, conint, conlist
class PredictionSingletonKeypointDetectionAllOf(BaseModel):
"""
PredictionSingletonKeypointDetectionAllOf
"""
keypoints: conlist(Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)], min_items=3) = Field(..., description="[x1, y2, s1, ..., xk, yk, sk] as outlined by https://docs.lightly.ai/docs/prediction-format#keypoint-detection ")
probabilities: Optional[conlist(Union[confloat(le=1, ge=0, strict=True), conint(le=1, ge=0, strict=True)])] = Field(None, description="The probabilities of it being a certain category other than the one which was selected. The sum of all probabilities should equal 1.")
__properties = ["keypoints", "probabilities"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionSingletonKeypointDetectionAllOf:
"""Create an instance of PredictionSingletonKeypointDetectionAllOf from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionSingletonKeypointDetectionAllOf:
"""Create an instance of PredictionSingletonKeypointDetectionAllOf from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionSingletonKeypointDetectionAllOf.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionSingletonKeypointDetectionAllOf) in the input: " + str(obj))
_obj = PredictionSingletonKeypointDetectionAllOf.parse_obj({
"keypoints": obj.get("keypoints"),
"probabilities": obj.get("probabilities")
})
return _obj
| 3,263 | 39.296296 | 273 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_singleton_object_detection.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional, Union
from pydantic import Extra, BaseModel, Field, confloat, conint, conlist
from lightly.openapi_generated.swagger_client.models.prediction_singleton_base import PredictionSingletonBase
class PredictionSingletonObjectDetection(PredictionSingletonBase):
"""
PredictionSingletonObjectDetection
"""
bbox: conlist(Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)], max_items=4, min_items=4) = Field(..., description="The bbox of where a prediction task yielded a finding. [x, y, width, height]")
probabilities: Optional[conlist(Union[confloat(le=1, ge=0, strict=True), conint(le=1, ge=0, strict=True)])] = Field(None, description="The probabilities of it being a certain category other than the one which was selected. The sum of all probabilities should equal 1.")
__properties = ["type", "taskName", "cropDatasetId", "cropSampleId", "categoryId", "score", "bbox", "probabilities"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionSingletonObjectDetection:
"""Create an instance of PredictionSingletonObjectDetection from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionSingletonObjectDetection:
"""Create an instance of PredictionSingletonObjectDetection from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionSingletonObjectDetection.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionSingletonObjectDetection) in the input: " + str(obj))
_obj = PredictionSingletonObjectDetection.parse_obj({
"type": obj.get("type"),
"task_name": obj.get("taskName"),
"crop_dataset_id": obj.get("cropDatasetId"),
"crop_sample_id": obj.get("cropSampleId"),
"category_id": obj.get("categoryId"),
"score": obj.get("score"),
"bbox": obj.get("bbox"),
"probabilities": obj.get("probabilities")
})
return _obj
| 3,642 | 40.397727 | 273 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_singleton_object_detection_all_of.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional, Union
from pydantic import Extra, BaseModel, Field, confloat, conint, conlist
class PredictionSingletonObjectDetectionAllOf(BaseModel):
"""
PredictionSingletonObjectDetectionAllOf
"""
bbox: conlist(Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)], max_items=4, min_items=4) = Field(..., description="The bbox of where a prediction task yielded a finding. [x, y, width, height]")
probabilities: Optional[conlist(Union[confloat(le=1, ge=0, strict=True), conint(le=1, ge=0, strict=True)])] = Field(None, description="The probabilities of it being a certain category other than the one which was selected. The sum of all probabilities should equal 1.")
__properties = ["bbox", "probabilities"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionSingletonObjectDetectionAllOf:
"""Create an instance of PredictionSingletonObjectDetectionAllOf from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionSingletonObjectDetectionAllOf:
"""Create an instance of PredictionSingletonObjectDetectionAllOf from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionSingletonObjectDetectionAllOf.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionSingletonObjectDetectionAllOf) in the input: " + str(obj))
_obj = PredictionSingletonObjectDetectionAllOf.parse_obj({
"bbox": obj.get("bbox"),
"probabilities": obj.get("probabilities")
})
return _obj
| 3,203 | 38.555556 | 273 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_singleton_semantic_segmentation.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional, Union
from pydantic import Extra, BaseModel, Field, confloat, conint, conlist
from lightly.openapi_generated.swagger_client.models.prediction_singleton_base import PredictionSingletonBase
class PredictionSingletonSemanticSegmentation(PredictionSingletonBase):
"""
PredictionSingletonSemanticSegmentation
"""
segmentation: conlist(conint(strict=True, ge=0)) = Field(..., description="Run Length Encoding (RLE) as outlined by https://docs.lightly.ai/docs/prediction-format#semantic-segmentation ")
probabilities: Optional[conlist(Union[confloat(le=1, ge=0, strict=True), conint(le=1, ge=0, strict=True)])] = Field(None, description="The probabilities of it being a certain category other than the one which was selected. The sum of all probabilities should equal 1.")
__properties = ["type", "taskName", "cropDatasetId", "cropSampleId", "categoryId", "score", "segmentation", "probabilities"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionSingletonSemanticSegmentation:
"""Create an instance of PredictionSingletonSemanticSegmentation from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionSingletonSemanticSegmentation:
"""Create an instance of PredictionSingletonSemanticSegmentation from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionSingletonSemanticSegmentation.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionSingletonSemanticSegmentation) in the input: " + str(obj))
_obj = PredictionSingletonSemanticSegmentation.parse_obj({
"type": obj.get("type"),
"task_name": obj.get("taskName"),
"crop_dataset_id": obj.get("cropDatasetId"),
"crop_sample_id": obj.get("cropSampleId"),
"category_id": obj.get("categoryId"),
"score": obj.get("score"),
"segmentation": obj.get("segmentation"),
"probabilities": obj.get("probabilities")
})
return _obj
| 3,691 | 40.954545 | 273 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_singleton_semantic_segmentation_all_of.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional, Union
from pydantic import Extra, BaseModel, Field, confloat, conint, conlist
class PredictionSingletonSemanticSegmentationAllOf(BaseModel):
"""
PredictionSingletonSemanticSegmentationAllOf
"""
segmentation: conlist(conint(strict=True, ge=0)) = Field(..., description="Run Length Encoding (RLE) as outlined by https://docs.lightly.ai/docs/prediction-format#semantic-segmentation ")
probabilities: Optional[conlist(Union[confloat(le=1, ge=0, strict=True), conint(le=1, ge=0, strict=True)])] = Field(None, description="The probabilities of it being a certain category other than the one which was selected. The sum of all probabilities should equal 1.")
__properties = ["segmentation", "probabilities"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionSingletonSemanticSegmentationAllOf:
"""Create an instance of PredictionSingletonSemanticSegmentationAllOf from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionSingletonSemanticSegmentationAllOf:
"""Create an instance of PredictionSingletonSemanticSegmentationAllOf from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionSingletonSemanticSegmentationAllOf.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionSingletonSemanticSegmentationAllOf) in the input: " + str(obj))
_obj = PredictionSingletonSemanticSegmentationAllOf.parse_obj({
"segmentation": obj.get("segmentation"),
"probabilities": obj.get("probabilities")
})
return _obj
| 3,252 | 39.160494 | 273 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_task_schema.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
from inspect import getfullargspec
import json
import pprint
import re # noqa: F401
from typing import Any, List, Optional
from pydantic import BaseModel, Field, StrictStr, ValidationError, validator
from lightly.openapi_generated.swagger_client.models.prediction_task_schema_keypoint import PredictionTaskSchemaKeypoint
from lightly.openapi_generated.swagger_client.models.prediction_task_schema_simple import PredictionTaskSchemaSimple
from typing import Any, List
from pydantic import StrictStr, Field, Extra
PREDICTIONTASKSCHEMA_ONE_OF_SCHEMAS = ["PredictionTaskSchemaKeypoint", "PredictionTaskSchemaSimple"]
class PredictionTaskSchema(BaseModel):
"""
PredictionTaskSchema
"""
# data type: PredictionTaskSchemaSimple
oneof_schema_1_validator: Optional[PredictionTaskSchemaSimple] = None
# data type: PredictionTaskSchemaKeypoint
oneof_schema_2_validator: Optional[PredictionTaskSchemaKeypoint] = None
actual_instance: Any
one_of_schemas: List[str] = Field(PREDICTIONTASKSCHEMA_ONE_OF_SCHEMAS, const=True)
class Config:
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
discriminator_value_class_map = {
}
def __init__(self, *args, **kwargs):
if args:
if len(args) > 1:
raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`")
if kwargs:
raise ValueError("If a position argument is used, keyword arguments cannot be used.")
super().__init__(actual_instance=args[0])
else:
super().__init__(**kwargs)
@validator('actual_instance')
def actual_instance_must_validate_oneof(cls, v):
instance = PredictionTaskSchema.construct()
error_messages = []
match = 0
# validate data type: PredictionTaskSchemaSimple
if not isinstance(v, PredictionTaskSchemaSimple):
error_messages.append(f"Error! Input type `{type(v)}` is not `PredictionTaskSchemaSimple`")
else:
match += 1
# validate data type: PredictionTaskSchemaKeypoint
if not isinstance(v, PredictionTaskSchemaKeypoint):
error_messages.append(f"Error! Input type `{type(v)}` is not `PredictionTaskSchemaKeypoint`")
else:
match += 1
if match > 1:
# more than 1 match
raise ValueError("Multiple matches found when setting `actual_instance` in PredictionTaskSchema with oneOf schemas: PredictionTaskSchemaKeypoint, PredictionTaskSchemaSimple. Details: " + ", ".join(error_messages))
elif match == 0:
# no match
raise ValueError("No match found when setting `actual_instance` in PredictionTaskSchema with oneOf schemas: PredictionTaskSchemaKeypoint, PredictionTaskSchemaSimple. Details: " + ", ".join(error_messages))
else:
return v
@classmethod
def from_dict(cls, obj: dict) -> PredictionTaskSchema:
return cls.from_json(json.dumps(obj))
@classmethod
def from_json(cls, json_str: str) -> PredictionTaskSchema:
"""Returns the object represented by the json string"""
instance = PredictionTaskSchema.construct()
error_messages = []
match = 0
# use oneOf discriminator to lookup the data type
_data_type = json.loads(json_str).get("type")
if not _data_type:
raise ValueError("Failed to lookup data type from the field `type` in the input.")
# check if data type is `PredictionTaskSchemaSimple`
if _data_type == "CLASSIFICATION":
instance.actual_instance = PredictionTaskSchemaSimple.from_json(json_str)
return instance
# check if data type is `PredictionTaskSchemaSimple`
if _data_type == "INSTANCE_SEGMENTATION":
instance.actual_instance = PredictionTaskSchemaSimple.from_json(json_str)
return instance
# check if data type is `PredictionTaskSchemaKeypoint`
if _data_type == "KEYPOINT_DETECTION":
instance.actual_instance = PredictionTaskSchemaKeypoint.from_json(json_str)
return instance
# check if data type is `PredictionTaskSchemaSimple`
if _data_type == "OBJECT_DETECTION":
instance.actual_instance = PredictionTaskSchemaSimple.from_json(json_str)
return instance
# check if data type is `PredictionTaskSchemaKeypoint`
if _data_type == "PredictionTaskSchemaKeypoint":
instance.actual_instance = PredictionTaskSchemaKeypoint.from_json(json_str)
return instance
# check if data type is `PredictionTaskSchemaSimple`
if _data_type == "PredictionTaskSchemaSimple":
instance.actual_instance = PredictionTaskSchemaSimple.from_json(json_str)
return instance
# check if data type is `PredictionTaskSchemaSimple`
if _data_type == "SEMANTIC_SEGMENTATION":
instance.actual_instance = PredictionTaskSchemaSimple.from_json(json_str)
return instance
# deserialize data into PredictionTaskSchemaSimple
try:
instance.actual_instance = PredictionTaskSchemaSimple.from_json(json_str)
match += 1
except (ValidationError, ValueError) as e:
error_messages.append(str(e))
# deserialize data into PredictionTaskSchemaKeypoint
try:
instance.actual_instance = PredictionTaskSchemaKeypoint.from_json(json_str)
match += 1
except (ValidationError, ValueError) as e:
error_messages.append(str(e))
if match > 1:
# more than 1 match
raise ValueError("Multiple matches found when deserializing the JSON string into PredictionTaskSchema with oneOf schemas: PredictionTaskSchemaKeypoint, PredictionTaskSchemaSimple. Details: " + ", ".join(error_messages))
elif match == 0:
# no match
raise ValueError("No match found when deserializing the JSON string into PredictionTaskSchema with oneOf schemas: PredictionTaskSchemaKeypoint, PredictionTaskSchemaSimple. Details: " + ", ".join(error_messages))
else:
return instance
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the actual instance"""
if self.actual_instance is None:
return "null"
to_json = getattr(self.actual_instance, "to_json", None)
if callable(to_json):
return self.actual_instance.to_json(by_alias=by_alias)
else:
return json.dumps(self.actual_instance)
def to_dict(self, by_alias: bool = False) -> dict:
"""Returns the dict representation of the actual instance"""
if self.actual_instance is None:
return None
to_dict = getattr(self.actual_instance, "to_dict", None)
if callable(to_dict):
return self.actual_instance.to_dict(by_alias=by_alias)
else:
# primitive type
return self.actual_instance
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the actual instance"""
return pprint.pformat(self.dict(by_alias=by_alias))
| 7,794 | 41.36413 | 231 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_task_schema_base.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
import lightly.openapi_generated.swagger_client.models
from pydantic import Extra, BaseModel, Field, StrictStr, constr, validator
class PredictionTaskSchemaBase(BaseModel):
"""
The schema for predictions or labels when doing classification, object detection, keypoint detection or instance segmentation
"""
name: constr(strict=True, min_length=1) = Field(..., description="A name which is safe to have as a file/folder name in a file system")
type: StrictStr = Field(..., description="This is the TaskType. Due to openapi.oneOf fuckery with discriminators, this needs to be a string")
__properties = ["name", "type"]
@validator('name')
def name_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9 ._-]+$", value):
raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9][a-zA-Z0-9 ._-]+$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
# JSON field name that stores the object type
__discriminator_property_name = 'type'
# discriminator mappings
__discriminator_value_class_map = {
'PredictionTaskSchemaKeypoint': 'PredictionTaskSchemaKeypoint',
'PredictionTaskSchemaSimple': 'PredictionTaskSchemaSimple'
}
@classmethod
def get_discriminator_value(cls, obj: dict) -> str:
"""Returns the discriminator value (object type) of the data"""
discriminator_value = obj[cls.__discriminator_property_name]
if discriminator_value:
return cls.__discriminator_value_class_map.get(discriminator_value)
else:
return None
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> Union(PredictionTaskSchemaKeypoint, PredictionTaskSchemaSimple):
"""Create an instance of PredictionTaskSchemaBase from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> Union(PredictionTaskSchemaKeypoint, PredictionTaskSchemaSimple):
"""Create an instance of PredictionTaskSchemaBase from a dict"""
# look up the object type based on discriminator mapping
object_type = cls.get_discriminator_value(obj)
if object_type:
klass = getattr(lightly.openapi_generated.swagger_client.models, object_type)
return klass.from_dict(obj)
else:
raise ValueError("PredictionTaskSchemaBase failed to lookup discriminator value from " +
json.dumps(obj) + ". Discriminator property name: " + cls.__discriminator_property_name +
", mapping: " + json.dumps(cls.__discriminator_value_class_map))
| 4,055 | 39.56 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_task_schema_category.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field, conint, constr
class PredictionTaskSchemaCategory(BaseModel):
"""
The link between the categoryId and the name that should be used
"""
id: conint(strict=True, ge=0) = Field(..., description="The id of the category. Needs to be a positive integer but can be any integer (gaps are allowed, does not need to be sequential)")
name: constr(strict=True, min_length=1) = Field(..., description="The name of the category when it should be visualized")
__properties = ["id", "name"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionTaskSchemaCategory:
"""Create an instance of PredictionTaskSchemaCategory from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionTaskSchemaCategory:
"""Create an instance of PredictionTaskSchemaCategory from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionTaskSchemaCategory.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionTaskSchemaCategory) in the input: " + str(obj))
_obj = PredictionTaskSchemaCategory.parse_obj({
"id": obj.get("id"),
"name": obj.get("name")
})
return _obj
| 2,887 | 34.654321 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_task_schema_category_keypoints.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional
from pydantic import Extra, BaseModel, Field, conint, conlist, constr
class PredictionTaskSchemaCategoryKeypoints(BaseModel):
"""
PredictionTaskSchemaCategoryKeypoints
"""
id: conint(strict=True, ge=0) = Field(..., description="The id of the category. Needs to be a positive integer but can be any integer (gaps are allowed, does not need to be sequential)")
name: constr(strict=True, min_length=1) = Field(..., description="The name of the category when it should be visualized")
keypoint_names: Optional[conlist(constr(strict=True, min_length=1))] = Field(None, alias="keypointNames", description="The names of the individual keypoints. E.g left-shoulder, right-shoulder, nose, etc. Must be of equal length as the number of keypoints of a keypoint detection. ")
keypoint_skeleton: Optional[conlist(conlist(conint(strict=True, ge=0), max_items=2, min_items=2))] = Field(None, alias="keypointSkeleton", description="The keypoint skeleton of a category. It is used to show the overall connectivity between keypoints. Each entry in the array describes a a single connection between two keypoints by their index. e.g [1,3],[2,4],[3,4] ")
__properties = ["id", "name", "keypointNames", "keypointSkeleton"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionTaskSchemaCategoryKeypoints:
"""Create an instance of PredictionTaskSchemaCategoryKeypoints from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionTaskSchemaCategoryKeypoints:
"""Create an instance of PredictionTaskSchemaCategoryKeypoints from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionTaskSchemaCategoryKeypoints.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionTaskSchemaCategoryKeypoints) in the input: " + str(obj))
_obj = PredictionTaskSchemaCategoryKeypoints.parse_obj({
"id": obj.get("id"),
"name": obj.get("name"),
"keypoint_names": obj.get("keypointNames"),
"keypoint_skeleton": obj.get("keypointSkeleton")
})
return _obj
| 3,791 | 43.611765 | 374 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_task_schema_category_keypoints_all_of.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional
from pydantic import Extra, BaseModel, Field, conint, conlist, constr
class PredictionTaskSchemaCategoryKeypointsAllOf(BaseModel):
"""
The link between the categoryId and the name that should be used
"""
keypoint_names: Optional[conlist(constr(strict=True, min_length=1))] = Field(None, alias="keypointNames", description="The names of the individual keypoints. E.g left-shoulder, right-shoulder, nose, etc. Must be of equal length as the number of keypoints of a keypoint detection. ")
keypoint_skeleton: Optional[conlist(conlist(conint(strict=True, ge=0), max_items=2, min_items=2))] = Field(None, alias="keypointSkeleton", description="The keypoint skeleton of a category. It is used to show the overall connectivity between keypoints. Each entry in the array describes a a single connection between two keypoints by their index. e.g [1,3],[2,4],[3,4] ")
__properties = ["keypointNames", "keypointSkeleton"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionTaskSchemaCategoryKeypointsAllOf:
"""Create an instance of PredictionTaskSchemaCategoryKeypointsAllOf from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionTaskSchemaCategoryKeypointsAllOf:
"""Create an instance of PredictionTaskSchemaCategoryKeypointsAllOf from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionTaskSchemaCategoryKeypointsAllOf.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionTaskSchemaCategoryKeypointsAllOf) in the input: " + str(obj))
_obj = PredictionTaskSchemaCategoryKeypointsAllOf.parse_obj({
"keypoint_names": obj.get("keypointNames"),
"keypoint_skeleton": obj.get("keypointSkeleton")
})
return _obj
| 3,457 | 41.691358 | 374 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_task_schema_keypoint.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List
from pydantic import Extra, BaseModel, Field, conlist
from lightly.openapi_generated.swagger_client.models.prediction_task_schema_base import PredictionTaskSchemaBase
from lightly.openapi_generated.swagger_client.models.prediction_task_schema_category_keypoints import PredictionTaskSchemaCategoryKeypoints
class PredictionTaskSchemaKeypoint(PredictionTaskSchemaBase):
"""
PredictionTaskSchemaKeypoint
"""
categories: conlist(PredictionTaskSchemaCategoryKeypoints) = Field(..., description="An array of the categories that exist for this prediction task. The id needs to be unique")
__properties = ["name", "type", "categories"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionTaskSchemaKeypoint:
"""Create an instance of PredictionTaskSchemaKeypoint from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of each item in categories (list)
_items = []
if self.categories:
for _item in self.categories:
if _item:
_items.append(_item.to_dict(by_alias=by_alias))
_dict['categories' if by_alias else 'categories'] = _items
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionTaskSchemaKeypoint:
"""Create an instance of PredictionTaskSchemaKeypoint from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionTaskSchemaKeypoint.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionTaskSchemaKeypoint) in the input: " + str(obj))
_obj = PredictionTaskSchemaKeypoint.parse_obj({
"name": obj.get("name"),
"type": obj.get("type"),
"categories": [PredictionTaskSchemaCategoryKeypoints.from_dict(_item) for _item in obj.get("categories")] if obj.get("categories") is not None else None
})
return _obj
| 3,549 | 38.444444 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_task_schema_keypoint_all_of.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List
from pydantic import Extra, BaseModel, Field, conlist
from lightly.openapi_generated.swagger_client.models.prediction_task_schema_category_keypoints import PredictionTaskSchemaCategoryKeypoints
class PredictionTaskSchemaKeypointAllOf(BaseModel):
"""
The schema for predictions or labels when doing keypoint detection
"""
categories: conlist(PredictionTaskSchemaCategoryKeypoints) = Field(..., description="An array of the categories that exist for this prediction task. The id needs to be unique")
__properties = ["categories"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionTaskSchemaKeypointAllOf:
"""Create an instance of PredictionTaskSchemaKeypointAllOf from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of each item in categories (list)
_items = []
if self.categories:
for _item in self.categories:
if _item:
_items.append(_item.to_dict(by_alias=by_alias))
_dict['categories' if by_alias else 'categories'] = _items
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionTaskSchemaKeypointAllOf:
"""Create an instance of PredictionTaskSchemaKeypointAllOf from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionTaskSchemaKeypointAllOf.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionTaskSchemaKeypointAllOf) in the input: " + str(obj))
_obj = PredictionTaskSchemaKeypointAllOf.parse_obj({
"categories": [PredictionTaskSchemaCategoryKeypoints.from_dict(_item) for _item in obj.get("categories")] if obj.get("categories") is not None else None
})
return _obj
| 3,410 | 38.206897 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_task_schema_simple.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List
from pydantic import Extra, BaseModel, Field, conlist
from lightly.openapi_generated.swagger_client.models.prediction_task_schema_base import PredictionTaskSchemaBase
from lightly.openapi_generated.swagger_client.models.prediction_task_schema_category import PredictionTaskSchemaCategory
class PredictionTaskSchemaSimple(PredictionTaskSchemaBase):
"""
PredictionTaskSchemaSimple
"""
categories: conlist(PredictionTaskSchemaCategory) = Field(..., description="An array of the categories that exist for this prediction task. The id needs to be unique")
__properties = ["name", "type", "categories"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionTaskSchemaSimple:
"""Create an instance of PredictionTaskSchemaSimple from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of each item in categories (list)
_items = []
if self.categories:
for _item in self.categories:
if _item:
_items.append(_item.to_dict(by_alias=by_alias))
_dict['categories' if by_alias else 'categories'] = _items
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionTaskSchemaSimple:
"""Create an instance of PredictionTaskSchemaSimple from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionTaskSchemaSimple.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionTaskSchemaSimple) in the input: " + str(obj))
_obj = PredictionTaskSchemaSimple.parse_obj({
"name": obj.get("name"),
"type": obj.get("type"),
"categories": [PredictionTaskSchemaCategory.from_dict(_item) for _item in obj.get("categories")] if obj.get("categories") is not None else None
})
return _obj
| 3,494 | 37.833333 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_task_schema_simple_all_of.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List
from pydantic import Extra, BaseModel, Field, conlist
from lightly.openapi_generated.swagger_client.models.prediction_task_schema_category import PredictionTaskSchemaCategory
class PredictionTaskSchemaSimpleAllOf(BaseModel):
"""
The schema for predictions or labels when doing classification, object detection or instance segmentation
"""
categories: conlist(PredictionTaskSchemaCategory) = Field(..., description="An array of the categories that exist for this prediction task. The id needs to be unique")
__properties = ["categories"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionTaskSchemaSimpleAllOf:
"""Create an instance of PredictionTaskSchemaSimpleAllOf from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of each item in categories (list)
_items = []
if self.categories:
for _item in self.categories:
if _item:
_items.append(_item.to_dict(by_alias=by_alias))
_dict['categories' if by_alias else 'categories'] = _items
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionTaskSchemaSimpleAllOf:
"""Create an instance of PredictionTaskSchemaSimpleAllOf from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionTaskSchemaSimpleAllOf.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionTaskSchemaSimpleAllOf) in the input: " + str(obj))
_obj = PredictionTaskSchemaSimpleAllOf.parse_obj({
"categories": [PredictionTaskSchemaCategory.from_dict(_item) for _item in obj.get("categories")] if obj.get("categories") is not None else None
})
return _obj
| 3,396 | 38.045977 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/prediction_task_schemas.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List
from pydantic import Extra, BaseModel, Field, conint, conlist
from lightly.openapi_generated.swagger_client.models.prediction_task_schema import PredictionTaskSchema
class PredictionTaskSchemas(BaseModel):
"""
PredictionTaskSchemas
"""
prediction_uuid_timestamp: conint(strict=True, ge=0) = Field(..., alias="predictionUUIDTimestamp", description="unix timestamp in milliseconds")
schemas: conlist(PredictionTaskSchema) = Field(...)
__properties = ["predictionUUIDTimestamp", "schemas"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> PredictionTaskSchemas:
"""Create an instance of PredictionTaskSchemas from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of each item in schemas (list)
_items = []
if self.schemas:
for _item in self.schemas:
if _item:
_items.append(_item.to_dict(by_alias=by_alias))
_dict['schemas' if by_alias else 'schemas'] = _items
return _dict
@classmethod
def from_dict(cls, obj: dict) -> PredictionTaskSchemas:
"""Create an instance of PredictionTaskSchemas from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return PredictionTaskSchemas.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in PredictionTaskSchemas) in the input: " + str(obj))
_obj = PredictionTaskSchemas.parse_obj({
"prediction_uuid_timestamp": obj.get("predictionUUIDTimestamp"),
"schemas": [PredictionTaskSchema.from_dict(_item) for _item in obj.get("schemas")] if obj.get("schemas") is not None else None
})
return _obj
| 3,324 | 36.359551 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/profile_basic_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional
from pydantic import Extra, BaseModel, Field, StrictStr, conint, conlist
from lightly.openapi_generated.swagger_client.models.team_basic_data import TeamBasicData
class ProfileBasicData(BaseModel):
"""
ProfileBasicData
"""
id: StrictStr = Field(...)
nickname: Optional[StrictStr] = None
name: Optional[StrictStr] = None
given_name: Optional[StrictStr] = Field(None, alias="givenName")
family_name: Optional[StrictStr] = Field(None, alias="familyName")
email: Optional[StrictStr] = None
created_at: conint(strict=True, ge=0) = Field(..., alias="createdAt", description="unix timestamp in milliseconds")
teams: Optional[conlist(TeamBasicData)] = None
__properties = ["id", "nickname", "name", "givenName", "familyName", "email", "createdAt", "teams"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> ProfileBasicData:
"""Create an instance of ProfileBasicData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of each item in teams (list)
_items = []
if self.teams:
for _item in self.teams:
if _item:
_items.append(_item.to_dict(by_alias=by_alias))
_dict['teams' if by_alias else 'teams'] = _items
return _dict
@classmethod
def from_dict(cls, obj: dict) -> ProfileBasicData:
"""Create an instance of ProfileBasicData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return ProfileBasicData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in ProfileBasicData) in the input: " + str(obj))
_obj = ProfileBasicData.parse_obj({
"id": obj.get("id"),
"nickname": obj.get("nickname"),
"name": obj.get("name"),
"given_name": obj.get("givenName"),
"family_name": obj.get("familyName"),
"email": obj.get("email"),
"created_at": obj.get("createdAt"),
"teams": [TeamBasicData.from_dict(_item) for _item in obj.get("teams")] if obj.get("teams") is not None else None
})
return _obj
| 3,785 | 36.485149 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/profile_me_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional, Union
from pydantic import Extra, BaseModel, Field, StrictFloat, StrictInt, StrictStr, conint, conlist, constr
from lightly.openapi_generated.swagger_client.models.profile_me_data_settings import ProfileMeDataSettings
from lightly.openapi_generated.swagger_client.models.team_basic_data import TeamBasicData
from lightly.openapi_generated.swagger_client.models.user_type import UserType
class ProfileMeData(BaseModel):
"""
ProfileMeData
"""
id: StrictStr = Field(...)
user_type: UserType = Field(..., alias="userType")
email: StrictStr = Field(..., description="email of the user")
nickname: Optional[StrictStr] = None
name: Optional[StrictStr] = None
given_name: Optional[StrictStr] = Field(None, alias="givenName")
family_name: Optional[StrictStr] = Field(None, alias="familyName")
token: Optional[constr(strict=True, min_length=5)] = Field(None, description="The user's token to be used for authentication via token querystring")
created_at: conint(strict=True, ge=0) = Field(..., alias="createdAt", description="unix timestamp in milliseconds")
teams: Optional[conlist(TeamBasicData)] = None
settings: ProfileMeDataSettings = Field(...)
onboarding: Optional[Union[StrictFloat, StrictInt]] = None
__properties = ["id", "userType", "email", "nickname", "name", "givenName", "familyName", "token", "createdAt", "teams", "settings", "onboarding"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> ProfileMeData:
"""Create an instance of ProfileMeData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of each item in teams (list)
_items = []
if self.teams:
for _item in self.teams:
if _item:
_items.append(_item.to_dict(by_alias=by_alias))
_dict['teams' if by_alias else 'teams'] = _items
# override the default output from pydantic by calling `to_dict()` of settings
if self.settings:
_dict['settings' if by_alias else 'settings'] = self.settings.to_dict(by_alias=by_alias)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> ProfileMeData:
"""Create an instance of ProfileMeData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return ProfileMeData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in ProfileMeData) in the input: " + str(obj))
_obj = ProfileMeData.parse_obj({
"id": obj.get("id"),
"user_type": obj.get("userType"),
"email": obj.get("email"),
"nickname": obj.get("nickname"),
"name": obj.get("name"),
"given_name": obj.get("givenName"),
"family_name": obj.get("familyName"),
"token": obj.get("token"),
"created_at": obj.get("createdAt"),
"teams": [TeamBasicData.from_dict(_item) for _item in obj.get("teams")] if obj.get("teams") is not None else None,
"settings": ProfileMeDataSettings.from_dict(obj.get("settings")) if obj.get("settings") is not None else None,
"onboarding": obj.get("onboarding")
})
return _obj
| 4,850 | 41.552632 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/profile_me_data_settings.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field, StrictStr
class ProfileMeDataSettings(BaseModel):
"""
ProfileMeDataSettings
"""
locale: Optional[StrictStr] = Field('en', description="Which locale does the user prefer")
date_format: Optional[StrictStr] = Field(None, alias="dateFormat", description="Which format for dates does the user prefer")
number_format: Optional[StrictStr] = Field(None, alias="numberFormat", description="Which format for numbers does the user prefer")
additional_properties: Dict[str, Any] = {}
__properties = ["locale", "dateFormat", "numberFormat"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> ProfileMeDataSettings:
"""Create an instance of ProfileMeDataSettings from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
"additional_properties"
},
exclude_none=True)
# puts key-value pairs in additional_properties in the top level
if self.additional_properties is not None:
for _key, _value in self.additional_properties.items():
_dict[_key] = _value
return _dict
@classmethod
def from_dict(cls, obj: dict) -> ProfileMeDataSettings:
"""Create an instance of ProfileMeDataSettings from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return ProfileMeDataSettings.parse_obj(obj)
_obj = ProfileMeDataSettings.parse_obj({
"locale": obj.get("locale") if obj.get("locale") is not None else 'en',
"date_format": obj.get("dateFormat"),
"number_format": obj.get("numberFormat")
})
# store additional fields in additional_properties
for _key in obj.keys():
if _key not in cls.__properties:
_obj.additional_properties[_key] = obj.get(_key)
return _obj
| 3,261 | 35.244444 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/questionnaire_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, constr
from lightly.openapi_generated.swagger_client.models.sector import Sector
class QuestionnaireData(BaseModel):
"""
QuestionnaireData
"""
company: Optional[constr(strict=True, min_length=3)] = None
sector: Optional[Sector] = None
__properties = ["company", "sector"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> QuestionnaireData:
"""Create an instance of QuestionnaireData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> QuestionnaireData:
"""Create an instance of QuestionnaireData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return QuestionnaireData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in QuestionnaireData) in the input: " + str(obj))
_obj = QuestionnaireData.parse_obj({
"company": obj.get("company"),
"sector": obj.get("sector")
})
return _obj
| 2,642 | 31.231707 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/s3_region.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class S3Region(str, Enum):
"""
The region where your bucket is located (see https://docs.aws.amazon.com/general/latest/gr/s3.html for further information)
"""
"""
allowed enum values
"""
US_MINUS_EAST_MINUS_2 = 'us-east-2'
US_MINUS_EAST_MINUS_1 = 'us-east-1'
US_MINUS_WEST_MINUS_1 = 'us-west-1'
US_MINUS_WEST_MINUS_2 = 'us-west-2'
AF_MINUS_SOUTH_MINUS_1 = 'af-south-1'
AP_MINUS_EAST_MINUS_1 = 'ap-east-1'
AP_MINUS_SOUTH_MINUS_2 = 'ap-south-2'
AP_MINUS_SOUTHEAST_MINUS_3 = 'ap-southeast-3'
AP_MINUS_SOUTHEAST_MINUS_4 = 'ap-southeast-4'
AP_MINUS_SOUTH_MINUS_1 = 'ap-south-1'
AP_MINUS_NORTHEAST_MINUS_3 = 'ap-northeast-3'
AP_MINUS_NORTHEAST_MINUS_2 = 'ap-northeast-2'
AP_MINUS_SOUTHEAST_MINUS_1 = 'ap-southeast-1'
AP_MINUS_SOUTHEAST_MINUS_2 = 'ap-southeast-2'
AP_MINUS_NORTHEAST_MINUS_1 = 'ap-northeast-1'
CA_MINUS_CENTRAL_MINUS_1 = 'ca-central-1'
CN_MINUS_NORTHWEST_MINUS_1 = 'cn-northwest-1'
EU_MINUS_CENTRAL_MINUS_1 = 'eu-central-1'
EU_MINUS_WEST_MINUS_1 = 'eu-west-1'
EU_MINUS_WEST_MINUS_2 = 'eu-west-2'
EU_MINUS_SOUTH_MINUS_1 = 'eu-south-1'
EU_MINUS_WEST_MINUS_3 = 'eu-west-3'
EU_MINUS_NORTH_MINUS_1 = 'eu-north-1'
EU_MINUS_SOUTH_MINUS_2 = 'eu-south-2'
EU_MINUS_CENTRAL_MINUS_2 = 'eu-central-2'
ME_MINUS_SOUTH_MINUS_1 = 'me-south-1'
ME_MINUS_CENTRAL_MINUS_1 = 'me-central-1'
SA_MINUS_EAST_MINUS_1 = 'sa-east-1'
US_MINUS_GOV_MINUS_EAST = 'us-gov-east'
US_MINUS_GOV_MINUS_WEST = 'us-gov-west'
@classmethod
def from_json(cls, json_str: str) -> 'S3Region':
"""Create an instance of S3Region from a JSON string"""
return S3Region(json.loads(json_str))
| 2,289 | 31.253521 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sama_task.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field, StrictInt
from lightly.openapi_generated.swagger_client.models.sama_task_data import SamaTaskData
class SamaTask(BaseModel):
"""
SamaTask
"""
priority: Optional[StrictInt] = None
reserve_for: Optional[StrictInt] = None
data: SamaTaskData = Field(...)
__properties = ["priority", "reserve_for", "data"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SamaTask:
"""Create an instance of SamaTask from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of data
if self.data:
_dict['data' if by_alias else 'data'] = self.data.to_dict(by_alias=by_alias)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SamaTask:
"""Create an instance of SamaTask from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SamaTask.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SamaTask) in the input: " + str(obj))
_obj = SamaTask.parse_obj({
"priority": obj.get("priority"),
"reserve_for": obj.get("reserve_for"),
"data": SamaTaskData.from_dict(obj.get("data")) if obj.get("data") is not None else None
})
return _obj
| 2,928 | 32.666667 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sama_task_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field, StrictInt, StrictStr
class SamaTaskData(BaseModel):
"""
SamaTaskData
"""
id: StrictInt = Field(...)
url: StrictStr = Field(..., description="A URL which allows anyone in possession of said URL for the time specified by the expiresIn query param to access the resource")
image: Optional[StrictStr] = Field(None, description="A URL which allows anyone in possession of said URL for the time specified by the expiresIn query param to access the resource")
lightly_file_name: Optional[StrictStr] = Field(None, alias="lightlyFileName", description="The original fileName of the sample. This is unique within a dataset")
lightly_meta_info: Optional[StrictStr] = Field(None, alias="lightlyMetaInfo")
__properties = ["id", "url", "image", "lightlyFileName", "lightlyMetaInfo"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SamaTaskData:
"""Create an instance of SamaTaskData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SamaTaskData:
"""Create an instance of SamaTaskData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SamaTaskData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SamaTaskData) in the input: " + str(obj))
_obj = SamaTaskData.parse_obj({
"id": obj.get("id"),
"url": obj.get("url"),
"image": obj.get("image"),
"lightly_file_name": obj.get("lightlyFileName"),
"lightly_meta_info": obj.get("lightlyMetaInfo")
})
return _obj
| 3,268 | 36.574713 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sample_create_request.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Any, Dict, Optional
from pydantic import Extra, BaseModel, Field, StrictStr
from lightly.openapi_generated.swagger_client.models.crop_data import CropData
from lightly.openapi_generated.swagger_client.models.sample_meta_data import SampleMetaData
from lightly.openapi_generated.swagger_client.models.video_frame_data import VideoFrameData
class SampleCreateRequest(BaseModel):
"""
SampleCreateRequest
"""
file_name: StrictStr = Field(..., alias="fileName")
thumb_name: Optional[StrictStr] = Field(None, alias="thumbName")
exif: Optional[Dict[str, Any]] = None
meta_data: Optional[SampleMetaData] = Field(None, alias="metaData")
custom_meta_data: Optional[Dict[str, Any]] = Field(None, alias="customMetaData")
video_frame_data: Optional[VideoFrameData] = Field(None, alias="videoFrameData")
crop_data: Optional[CropData] = Field(None, alias="cropData")
__properties = ["fileName", "thumbName", "exif", "metaData", "customMetaData", "videoFrameData", "cropData"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SampleCreateRequest:
"""Create an instance of SampleCreateRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of meta_data
if self.meta_data:
_dict['metaData' if by_alias else 'meta_data'] = self.meta_data.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of video_frame_data
if self.video_frame_data:
_dict['videoFrameData' if by_alias else 'video_frame_data'] = self.video_frame_data.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of crop_data
if self.crop_data:
_dict['cropData' if by_alias else 'crop_data'] = self.crop_data.to_dict(by_alias=by_alias)
# set to None if custom_meta_data (nullable) is None
# and __fields_set__ contains the field
if self.custom_meta_data is None and "custom_meta_data" in self.__fields_set__:
_dict['customMetaData' if by_alias else 'custom_meta_data'] = None
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SampleCreateRequest:
"""Create an instance of SampleCreateRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SampleCreateRequest.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SampleCreateRequest) in the input: " + str(obj))
_obj = SampleCreateRequest.parse_obj({
"file_name": obj.get("fileName"),
"thumb_name": obj.get("thumbName"),
"exif": obj.get("exif"),
"meta_data": SampleMetaData.from_dict(obj.get("metaData")) if obj.get("metaData") is not None else None,
"custom_meta_data": obj.get("customMetaData"),
"video_frame_data": VideoFrameData.from_dict(obj.get("videoFrameData")) if obj.get("videoFrameData") is not None else None,
"crop_data": CropData.from_dict(obj.get("cropData")) if obj.get("cropData") is not None else None
})
return _obj
| 4,752 | 43.009259 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sample_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Any, Dict, Optional
from pydantic import Extra, BaseModel, Field, StrictInt, StrictStr, conint, constr, validator
from lightly.openapi_generated.swagger_client.models.crop_data import CropData
from lightly.openapi_generated.swagger_client.models.sample_meta_data import SampleMetaData
from lightly.openapi_generated.swagger_client.models.sample_type import SampleType
from lightly.openapi_generated.swagger_client.models.video_frame_data import VideoFrameData
class SampleData(BaseModel):
"""
SampleData
"""
id: constr(strict=True) = Field(..., description="MongoDB ObjectId")
type: SampleType = Field(...)
dataset_id: Optional[constr(strict=True)] = Field(None, alias="datasetId", description="MongoDB ObjectId")
file_name: StrictStr = Field(..., alias="fileName")
thumb_name: Optional[StrictStr] = Field(None, alias="thumbName")
exif: Optional[Dict[str, Any]] = None
index: Optional[StrictInt] = None
created_at: Optional[conint(strict=True, ge=0)] = Field(None, alias="createdAt", description="unix timestamp in milliseconds")
last_modified_at: Optional[conint(strict=True, ge=0)] = Field(None, alias="lastModifiedAt", description="unix timestamp in milliseconds")
meta_data: Optional[SampleMetaData] = Field(None, alias="metaData")
custom_meta_data: Optional[Dict[str, Any]] = Field(None, alias="customMetaData")
video_frame_data: Optional[VideoFrameData] = Field(None, alias="videoFrameData")
crop_data: Optional[CropData] = Field(None, alias="cropData")
__properties = ["id", "type", "datasetId", "fileName", "thumbName", "exif", "index", "createdAt", "lastModifiedAt", "metaData", "customMetaData", "videoFrameData", "cropData"]
@validator('id')
def id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('dataset_id')
def dataset_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SampleData:
"""Create an instance of SampleData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of meta_data
if self.meta_data:
_dict['metaData' if by_alias else 'meta_data'] = self.meta_data.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of video_frame_data
if self.video_frame_data:
_dict['videoFrameData' if by_alias else 'video_frame_data'] = self.video_frame_data.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of crop_data
if self.crop_data:
_dict['cropData' if by_alias else 'crop_data'] = self.crop_data.to_dict(by_alias=by_alias)
# set to None if thumb_name (nullable) is None
# and __fields_set__ contains the field
if self.thumb_name is None and "thumb_name" in self.__fields_set__:
_dict['thumbName' if by_alias else 'thumb_name'] = None
# set to None if custom_meta_data (nullable) is None
# and __fields_set__ contains the field
if self.custom_meta_data is None and "custom_meta_data" in self.__fields_set__:
_dict['customMetaData' if by_alias else 'custom_meta_data'] = None
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SampleData:
"""Create an instance of SampleData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SampleData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SampleData) in the input: " + str(obj))
_obj = SampleData.parse_obj({
"id": obj.get("id"),
"type": obj.get("type"),
"dataset_id": obj.get("datasetId"),
"file_name": obj.get("fileName"),
"thumb_name": obj.get("thumbName"),
"exif": obj.get("exif"),
"index": obj.get("index"),
"created_at": obj.get("createdAt"),
"last_modified_at": obj.get("lastModifiedAt"),
"meta_data": SampleMetaData.from_dict(obj.get("metaData")) if obj.get("metaData") is not None else None,
"custom_meta_data": obj.get("customMetaData"),
"video_frame_data": VideoFrameData.from_dict(obj.get("videoFrameData")) if obj.get("videoFrameData") is not None else None,
"crop_data": CropData.from_dict(obj.get("cropData")) if obj.get("cropData") is not None else None
})
return _obj
| 6,528 | 44.657343 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sample_data_modes.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Any, Dict, Optional
from pydantic import Extra, BaseModel, Field, StrictInt, StrictStr, conint, constr, validator
from lightly.openapi_generated.swagger_client.models.crop_data import CropData
from lightly.openapi_generated.swagger_client.models.sample_meta_data import SampleMetaData
from lightly.openapi_generated.swagger_client.models.sample_type import SampleType
from lightly.openapi_generated.swagger_client.models.video_frame_data import VideoFrameData
class SampleDataModes(BaseModel):
"""
SampleDataModes
"""
id: constr(strict=True) = Field(..., description="MongoDB ObjectId")
type: Optional[SampleType] = None
dataset_id: Optional[constr(strict=True)] = Field(None, alias="datasetId", description="MongoDB ObjectId")
file_name: Optional[StrictStr] = Field(None, alias="fileName")
thumb_name: Optional[StrictStr] = Field(None, alias="thumbName")
exif: Optional[Dict[str, Any]] = None
index: Optional[StrictInt] = None
created_at: Optional[conint(strict=True, ge=0)] = Field(None, alias="createdAt", description="unix timestamp in milliseconds")
last_modified_at: Optional[conint(strict=True, ge=0)] = Field(None, alias="lastModifiedAt", description="unix timestamp in milliseconds")
meta_data: Optional[SampleMetaData] = Field(None, alias="metaData")
custom_meta_data: Optional[Dict[str, Any]] = Field(None, alias="customMetaData")
video_frame_data: Optional[VideoFrameData] = Field(None, alias="videoFrameData")
crop_data: Optional[CropData] = Field(None, alias="cropData")
__properties = ["id", "type", "datasetId", "fileName", "thumbName", "exif", "index", "createdAt", "lastModifiedAt", "metaData", "customMetaData", "videoFrameData", "cropData"]
@validator('id')
def id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('dataset_id')
def dataset_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SampleDataModes:
"""Create an instance of SampleDataModes from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of meta_data
if self.meta_data:
_dict['metaData' if by_alias else 'meta_data'] = self.meta_data.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of video_frame_data
if self.video_frame_data:
_dict['videoFrameData' if by_alias else 'video_frame_data'] = self.video_frame_data.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of crop_data
if self.crop_data:
_dict['cropData' if by_alias else 'crop_data'] = self.crop_data.to_dict(by_alias=by_alias)
# set to None if thumb_name (nullable) is None
# and __fields_set__ contains the field
if self.thumb_name is None and "thumb_name" in self.__fields_set__:
_dict['thumbName' if by_alias else 'thumb_name'] = None
# set to None if custom_meta_data (nullable) is None
# and __fields_set__ contains the field
if self.custom_meta_data is None and "custom_meta_data" in self.__fields_set__:
_dict['customMetaData' if by_alias else 'custom_meta_data'] = None
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SampleDataModes:
"""Create an instance of SampleDataModes from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SampleDataModes.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SampleDataModes) in the input: " + str(obj))
_obj = SampleDataModes.parse_obj({
"id": obj.get("id"),
"type": obj.get("type"),
"dataset_id": obj.get("datasetId"),
"file_name": obj.get("fileName"),
"thumb_name": obj.get("thumbName"),
"exif": obj.get("exif"),
"index": obj.get("index"),
"created_at": obj.get("createdAt"),
"last_modified_at": obj.get("lastModifiedAt"),
"meta_data": SampleMetaData.from_dict(obj.get("metaData")) if obj.get("metaData") is not None else None,
"custom_meta_data": obj.get("customMetaData"),
"video_frame_data": VideoFrameData.from_dict(obj.get("videoFrameData")) if obj.get("videoFrameData") is not None else None,
"crop_data": CropData.from_dict(obj.get("cropData")) if obj.get("cropData") is not None else None
})
return _obj
| 6,588 | 45.076923 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sample_meta_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Any, Dict, List, Optional, Union
from pydantic import Extra, BaseModel, Field, confloat, conint, conlist
class SampleMetaData(BaseModel):
"""
SampleMetaData
"""
custom: Optional[Dict[str, Any]] = None
dynamic: Optional[Dict[str, Any]] = None
sharpness: Optional[Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)]] = None
luminance: Optional[Union[confloat(le=100, ge=0, strict=True), conint(le=100, ge=0, strict=True)]] = None
size_in_bytes: Optional[conint(strict=True, ge=0)] = Field(None, alias="sizeInBytes")
snr: Optional[Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)]] = None
uniform_row_ratio: Optional[Union[confloat(le=1, ge=0, strict=True), conint(le=1, ge=0, strict=True)]] = Field(None, alias="uniformRowRatio")
mean: Optional[conlist(Union[confloat(le=1, ge=0, strict=True), conint(le=1, ge=0, strict=True)], max_items=3, min_items=3)] = None
shape: Optional[conlist(conint(strict=True, ge=0), max_items=3, min_items=3)] = None
std: Optional[conlist(Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)], max_items=3, min_items=3)] = None
sum_of_squares: Optional[conlist(Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)], max_items=3, min_items=3)] = Field(None, alias="sumOfSquares")
sum_of_values: Optional[conlist(Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)], max_items=3, min_items=3)] = Field(None, alias="sumOfValues")
__properties = ["custom", "dynamic", "sharpness", "luminance", "sizeInBytes", "snr", "uniformRowRatio", "mean", "shape", "std", "sumOfSquares", "sumOfValues"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SampleMetaData:
"""Create an instance of SampleMetaData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SampleMetaData:
"""Create an instance of SampleMetaData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SampleMetaData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SampleMetaData) in the input: " + str(obj))
_obj = SampleMetaData.parse_obj({
"custom": obj.get("custom"),
"dynamic": obj.get("dynamic"),
"sharpness": obj.get("sharpness"),
"luminance": obj.get("luminance"),
"size_in_bytes": obj.get("sizeInBytes"),
"snr": obj.get("snr"),
"uniform_row_ratio": obj.get("uniformRowRatio"),
"mean": obj.get("mean"),
"shape": obj.get("shape"),
"std": obj.get("std"),
"sum_of_squares": obj.get("sumOfSquares"),
"sum_of_values": obj.get("sumOfValues")
})
return _obj
| 4,364 | 42.217822 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sample_partial_mode.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class SamplePartialMode(str, Enum):
"""
ids: return only the id fileNames: return the id and fileName full: return all data
"""
"""
allowed enum values
"""
IDS = 'ids'
FILENAMES = 'fileNames'
FULL = 'full'
@classmethod
def from_json(cls, json_str: str) -> 'SamplePartialMode':
"""Create an instance of SamplePartialMode from a JSON string"""
return SamplePartialMode(json.loads(json_str))
| 1,022 | 22.25 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sample_sort_by.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class SampleSortBy(str, Enum):
"""
SampleSortBy
"""
"""
allowed enum values
"""
ID = '_id'
INDEX = 'index'
@classmethod
def from_json(cls, json_str: str) -> 'SampleSortBy':
"""Create an instance of SampleSortBy from a JSON string"""
return SampleSortBy(json.loads(json_str))
| 903 | 20.023256 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sample_type.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class SampleType(str, Enum):
"""
Type of the sample (VideoFrame vs IMAGE vs CROP). Determined by the API!
"""
"""
allowed enum values
"""
CROP = 'CROP'
IMAGE = 'IMAGE'
VIDEO_FRAME = 'VIDEO_FRAME'
@classmethod
def from_json(cls, json_str: str) -> 'SampleType':
"""Create an instance of SampleType from a JSON string"""
return SampleType(json.loads(json_str))
| 990 | 21.522727 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sample_update_request.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Any, Dict, Optional
from pydantic import Extra, BaseModel, Field, StrictStr
from lightly.openapi_generated.swagger_client.models.sample_meta_data import SampleMetaData
class SampleUpdateRequest(BaseModel):
"""
SampleUpdateRequest
"""
file_name: Optional[StrictStr] = Field(None, alias="fileName")
thumb_name: Optional[StrictStr] = Field(None, alias="thumbName")
exif: Optional[Dict[str, Any]] = None
meta_data: Optional[SampleMetaData] = Field(None, alias="metaData")
custom_meta_data: Optional[Dict[str, Any]] = Field(None, alias="customMetaData")
__properties = ["fileName", "thumbName", "exif", "metaData", "customMetaData"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SampleUpdateRequest:
"""Create an instance of SampleUpdateRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of meta_data
if self.meta_data:
_dict['metaData' if by_alias else 'meta_data'] = self.meta_data.to_dict(by_alias=by_alias)
# set to None if custom_meta_data (nullable) is None
# and __fields_set__ contains the field
if self.custom_meta_data is None and "custom_meta_data" in self.__fields_set__:
_dict['customMetaData' if by_alias else 'custom_meta_data'] = None
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SampleUpdateRequest:
"""Create an instance of SampleUpdateRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SampleUpdateRequest.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SampleUpdateRequest) in the input: " + str(obj))
_obj = SampleUpdateRequest.parse_obj({
"file_name": obj.get("fileName"),
"thumb_name": obj.get("thumbName"),
"exif": obj.get("exif"),
"meta_data": SampleMetaData.from_dict(obj.get("metaData")) if obj.get("metaData") is not None else None,
"custom_meta_data": obj.get("customMetaData")
})
return _obj
| 3,694 | 37.489583 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sample_write_urls.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field, StrictStr
class SampleWriteUrls(BaseModel):
"""
SampleWriteUrls
"""
full: StrictStr = Field(...)
thumb: StrictStr = Field(...)
__properties = ["full", "thumb"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SampleWriteUrls:
"""Create an instance of SampleWriteUrls from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SampleWriteUrls:
"""Create an instance of SampleWriteUrls from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SampleWriteUrls.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SampleWriteUrls) in the input: " + str(obj))
_obj = SampleWriteUrls.parse_obj({
"full": obj.get("full"),
"thumb": obj.get("thumb")
})
return _obj
| 2,488 | 29.728395 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sampling_config.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field
from lightly.openapi_generated.swagger_client.models.sampling_config_stopping_condition import SamplingConfigStoppingCondition
class SamplingConfig(BaseModel):
"""
SamplingConfig
"""
stopping_condition: Optional[SamplingConfigStoppingCondition] = Field(None, alias="stoppingCondition")
__properties = ["stoppingCondition"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SamplingConfig:
"""Create an instance of SamplingConfig from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of stopping_condition
if self.stopping_condition:
_dict['stoppingCondition' if by_alias else 'stopping_condition'] = self.stopping_condition.to_dict(by_alias=by_alias)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SamplingConfig:
"""Create an instance of SamplingConfig from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SamplingConfig.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SamplingConfig) in the input: " + str(obj))
_obj = SamplingConfig.parse_obj({
"stopping_condition": SamplingConfigStoppingCondition.from_dict(obj.get("stoppingCondition")) if obj.get("stoppingCondition") is not None else None
})
return _obj
| 3,014 | 35.325301 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sampling_config_stopping_condition.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional, Union
from pydantic import Extra, BaseModel, Field, StrictFloat, StrictInt
class SamplingConfigStoppingCondition(BaseModel):
"""
SamplingConfigStoppingCondition
"""
n_samples: Optional[Union[StrictFloat, StrictInt]] = Field(None, alias="nSamples", description="How many samples/images should be used for the sampling. 0-1 represents a percentage of all. 1-N are absolute numbers")
min_distance: Optional[Union[StrictFloat, StrictInt]] = Field(None, alias="minDistance", description="The minimum distance sampled images should have. Before the distance would fall below, the sampling is stopped.")
__properties = ["nSamples", "minDistance"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SamplingConfigStoppingCondition:
"""Create an instance of SamplingConfigStoppingCondition from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SamplingConfigStoppingCondition:
"""Create an instance of SamplingConfigStoppingCondition from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SamplingConfigStoppingCondition.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SamplingConfigStoppingCondition) in the input: " + str(obj))
_obj = SamplingConfigStoppingCondition.parse_obj({
"n_samples": obj.get("nSamples"),
"min_distance": obj.get("minDistance")
})
return _obj
| 3,084 | 37.08642 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sampling_create_request.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional, Union
from pydantic import Extra, BaseModel, Field, StrictFloat, StrictInt, constr, validator
from lightly.openapi_generated.swagger_client.models.sampling_config import SamplingConfig
from lightly.openapi_generated.swagger_client.models.sampling_method import SamplingMethod
class SamplingCreateRequest(BaseModel):
"""
SamplingCreateRequest
"""
new_tag_name: constr(strict=True, min_length=3) = Field(..., alias="newTagName", description="The name of the tag")
method: SamplingMethod = Field(...)
config: SamplingConfig = Field(...)
preselected_tag_id: Optional[constr(strict=True)] = Field(None, alias="preselectedTagId", description="MongoDB ObjectId")
query_tag_id: Optional[constr(strict=True)] = Field(None, alias="queryTagId", description="MongoDB ObjectId")
score_type: Optional[constr(strict=True, min_length=1)] = Field(None, alias="scoreType", description="Type of active learning score")
row_count: Optional[Union[StrictFloat, StrictInt]] = Field(None, alias="rowCount", description="temporary rowCount until the API/DB is aware how many they are..")
__properties = ["newTagName", "method", "config", "preselectedTagId", "queryTagId", "scoreType", "rowCount"]
@validator('new_tag_name')
def new_tag_name_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9 .:;=@_-]+$", value):
raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9][a-zA-Z0-9 .:;=@_-]+$/")
return value
@validator('preselected_tag_id')
def preselected_tag_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('query_tag_id')
def query_tag_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('score_type')
def score_type_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-zA-Z0-9_+=,.@:\/-]*$", value):
raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9_+=,.@:\/-]*$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SamplingCreateRequest:
"""Create an instance of SamplingCreateRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of config
if self.config:
_dict['config' if by_alias else 'config'] = self.config.to_dict(by_alias=by_alias)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SamplingCreateRequest:
"""Create an instance of SamplingCreateRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SamplingCreateRequest.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SamplingCreateRequest) in the input: " + str(obj))
_obj = SamplingCreateRequest.parse_obj({
"new_tag_name": obj.get("newTagName"),
"method": obj.get("method"),
"config": SamplingConfig.from_dict(obj.get("config")) if obj.get("config") is not None else None,
"preselected_tag_id": obj.get("preselectedTagId"),
"query_tag_id": obj.get("queryTagId"),
"score_type": obj.get("scoreType"),
"row_count": obj.get("rowCount")
})
return _obj
| 5,504 | 40.390977 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sampling_method.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class SamplingMethod(str, Enum):
"""
SamplingMethod
"""
"""
allowed enum values
"""
ACTIVE_LEARNING = 'ACTIVE_LEARNING'
CORAL = 'CORAL'
CORESET = 'CORESET'
RANDOM = 'RANDOM'
@classmethod
def from_json(cls, json_str: str) -> 'SamplingMethod':
"""Create an instance of SamplingMethod from a JSON string"""
return SamplingMethod(json.loads(json_str))
| 984 | 20.888889 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/sector.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class Sector(str, Enum):
"""
Sector
"""
"""
allowed enum values
"""
ADVERTISING = 'ADVERTISING'
AGRICULTURE = 'AGRICULTURE'
AUTOMOTIVE = 'AUTOMOTIVE'
EDUCATION = 'EDUCATION'
ENERGY = 'ENERGY'
ENTERTAINMENT = 'ENTERTAINMENT'
ENVIRONMENTAL = 'ENVIRONMENTAL'
FINANCE = 'FINANCE'
FOOD = 'FOOD'
HEALTHCARE = 'HEALTHCARE'
INTERNET_OF_THINGS = 'INTERNET_OF_THINGS'
LOGISTICS = 'LOGISTICS'
MACHINE_LEARNING = 'MACHINE_LEARNING'
MANUFACTURING = 'MANUFACTURING'
MEDICINE = 'MEDICINE'
RECYCLING = 'RECYCLING'
RETAIL = 'RETAIL'
ROBOTICS = 'ROBOTICS'
SECURITY = 'SECURITY'
SOFTWARE_DEVELOPMENT = 'SOFTWARE_DEVELOPMENT'
SPORTS = 'SPORTS'
SURVEILLANCE = 'SURVEILLANCE'
TRANSPORTATION = 'TRANSPORTATION'
OTHER = 'OTHER'
@classmethod
def from_json(cls, json_str: str) -> 'Sector':
"""Create an instance of Sector from a JSON string"""
return Sector(json.loads(json_str))
| 1,570 | 23.169231 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/selection_config.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional, Union
from pydantic import Extra, BaseModel, Field, confloat, conint, conlist
from lightly.openapi_generated.swagger_client.models.selection_config_entry import SelectionConfigEntry
class SelectionConfig(BaseModel):
"""
SelectionConfig
"""
n_samples: Optional[conint(strict=True, ge=-1)] = Field(None, alias="nSamples")
proportion_samples: Optional[Union[confloat(le=1.0, ge=0.0, strict=True), conint(le=1, ge=0, strict=True)]] = Field(None, alias="proportionSamples")
strategies: conlist(SelectionConfigEntry, min_items=1) = Field(...)
__properties = ["nSamples", "proportionSamples", "strategies"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SelectionConfig:
"""Create an instance of SelectionConfig from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of each item in strategies (list)
_items = []
if self.strategies:
for _item in self.strategies:
if _item:
_items.append(_item.to_dict(by_alias=by_alias))
_dict['strategies' if by_alias else 'strategies'] = _items
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SelectionConfig:
"""Create an instance of SelectionConfig from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SelectionConfig.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SelectionConfig) in the input: " + str(obj))
_obj = SelectionConfig.parse_obj({
"n_samples": obj.get("nSamples"),
"proportion_samples": obj.get("proportionSamples"),
"strategies": [SelectionConfigEntry.from_dict(_item) for _item in obj.get("strategies")] if obj.get("strategies") is not None else None
})
return _obj
| 3,467 | 37.10989 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/selection_config_entry.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field
from lightly.openapi_generated.swagger_client.models.selection_config_entry_input import SelectionConfigEntryInput
from lightly.openapi_generated.swagger_client.models.selection_config_entry_strategy import SelectionConfigEntryStrategy
class SelectionConfigEntry(BaseModel):
"""
SelectionConfigEntry
"""
input: SelectionConfigEntryInput = Field(...)
strategy: SelectionConfigEntryStrategy = Field(...)
__properties = ["input", "strategy"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SelectionConfigEntry:
"""Create an instance of SelectionConfigEntry from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of input
if self.input:
_dict['input' if by_alias else 'input'] = self.input.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of strategy
if self.strategy:
_dict['strategy' if by_alias else 'strategy'] = self.strategy.to_dict(by_alias=by_alias)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SelectionConfigEntry:
"""Create an instance of SelectionConfigEntry from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SelectionConfigEntry.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SelectionConfigEntry) in the input: " + str(obj))
_obj = SelectionConfigEntry.parse_obj({
"input": SelectionConfigEntryInput.from_dict(obj.get("input")) if obj.get("input") is not None else None,
"strategy": SelectionConfigEntryStrategy.from_dict(obj.get("strategy")) if obj.get("strategy") is not None else None
})
return _obj
| 3,386 | 37.05618 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/selection_config_entry_input.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional
from pydantic import Extra, BaseModel, Field, StrictInt, conlist, constr, validator
from lightly.openapi_generated.swagger_client.models.selection_input_predictions_name import SelectionInputPredictionsName
from lightly.openapi_generated.swagger_client.models.selection_input_type import SelectionInputType
class SelectionConfigEntryInput(BaseModel):
"""
SelectionConfigEntryInput
"""
type: SelectionInputType = Field(...)
task: Optional[constr(strict=True)] = Field(None, description="Since we sometimes stitch together SelectionInputTask+ActiveLearningScoreType, they need to follow the same specs of ActiveLearningScoreType. However, this can be an empty string due to internal logic. ")
score: Optional[constr(strict=True, min_length=1)] = Field(None, description="Type of active learning score")
key: Optional[constr(strict=True, min_length=1)] = None
name: Optional[SelectionInputPredictionsName] = None
dataset_id: Optional[constr(strict=True)] = Field(None, alias="datasetId", description="MongoDB ObjectId")
tag_name: Optional[constr(strict=True, min_length=3)] = Field(None, alias="tagName", description="The name of the tag")
random_seed: Optional[StrictInt] = Field(None, alias="randomSeed")
categories: Optional[conlist(constr(strict=True, min_length=1), min_items=1, unique_items=True)] = None
__properties = ["type", "task", "score", "key", "name", "datasetId", "tagName", "randomSeed", "categories"]
@validator('task')
def task_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-zA-Z0-9_+=,.@:\/-]*$", value):
raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9_+=,.@:\/-]*$/")
return value
@validator('score')
def score_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-zA-Z0-9_+=,.@:\/-]*$", value):
raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9_+=,.@:\/-]*$/")
return value
@validator('dataset_id')
def dataset_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('tag_name')
def tag_name_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9 .:;=@_-]+$", value):
raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9][a-zA-Z0-9 .:;=@_-]+$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SelectionConfigEntryInput:
"""Create an instance of SelectionConfigEntryInput from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SelectionConfigEntryInput:
"""Create an instance of SelectionConfigEntryInput from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SelectionConfigEntryInput.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SelectionConfigEntryInput) in the input: " + str(obj))
_obj = SelectionConfigEntryInput.parse_obj({
"type": obj.get("type"),
"task": obj.get("task"),
"score": obj.get("score"),
"key": obj.get("key"),
"name": obj.get("name"),
"dataset_id": obj.get("datasetId"),
"tag_name": obj.get("tagName"),
"random_seed": obj.get("randomSeed"),
"categories": obj.get("categories")
})
return _obj
| 5,573 | 39.686131 | 271 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/selection_config_entry_strategy.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Any, Dict, Optional, Union
from pydantic import Extra, BaseModel, Field, StrictFloat, StrictInt
from lightly.openapi_generated.swagger_client.models.selection_strategy_threshold_operation import SelectionStrategyThresholdOperation
from lightly.openapi_generated.swagger_client.models.selection_strategy_type import SelectionStrategyType
class SelectionConfigEntryStrategy(BaseModel):
"""
SelectionConfigEntryStrategy
"""
type: SelectionStrategyType = Field(...)
stopping_condition_minimum_distance: Optional[Union[StrictFloat, StrictInt]] = None
threshold: Optional[Union[StrictFloat, StrictInt]] = None
operation: Optional[SelectionStrategyThresholdOperation] = None
target: Optional[Dict[str, Any]] = None
__properties = ["type", "stopping_condition_minimum_distance", "threshold", "operation", "target"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SelectionConfigEntryStrategy:
"""Create an instance of SelectionConfigEntryStrategy from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SelectionConfigEntryStrategy:
"""Create an instance of SelectionConfigEntryStrategy from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SelectionConfigEntryStrategy.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SelectionConfigEntryStrategy) in the input: " + str(obj))
_obj = SelectionConfigEntryStrategy.parse_obj({
"type": obj.get("type"),
"stopping_condition_minimum_distance": obj.get("stopping_condition_minimum_distance"),
"threshold": obj.get("threshold"),
"operation": obj.get("operation"),
"target": obj.get("target")
})
return _obj
| 3,405 | 37.269663 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/selection_input_predictions_name.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class SelectionInputPredictionsName(str, Enum):
"""
SelectionInputPredictionsName
"""
"""
allowed enum values
"""
CLASS_DISTRIBUTION = 'CLASS_DISTRIBUTION'
CATEGORY_COUNT = 'CATEGORY_COUNT'
@classmethod
def from_json(cls, json_str: str) -> 'SelectionInputPredictionsName':
"""Create an instance of SelectionInputPredictionsName from a JSON string"""
return SelectionInputPredictionsName(json.loads(json_str))
| 1,037 | 23.139535 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/selection_input_type.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class SelectionInputType(str, Enum):
"""
SelectionInputType
"""
"""
allowed enum values
"""
EMBEDDINGS = 'EMBEDDINGS'
SCORES = 'SCORES'
METADATA = 'METADATA'
PREDICTIONS = 'PREDICTIONS'
RANDOM = 'RANDOM'
@classmethod
def from_json(cls, json_str: str) -> 'SelectionInputType':
"""Create an instance of SelectionInputType from a JSON string"""
return SelectionInputType(json.loads(json_str))
| 1,030 | 21.413043 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/selection_strategy_threshold_operation.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class SelectionStrategyThresholdOperation(str, Enum):
"""
SelectionStrategyThresholdOperation
"""
"""
allowed enum values
"""
SMALLER = 'SMALLER'
SMALLER_EQUAL = 'SMALLER_EQUAL'
BIGGER = 'BIGGER'
BIGGER_EQUAL = 'BIGGER_EQUAL'
@classmethod
def from_json(cls, json_str: str) -> 'SelectionStrategyThresholdOperation':
"""Create an instance of SelectionStrategyThresholdOperation from a JSON string"""
return SelectionStrategyThresholdOperation(json.loads(json_str))
| 1,099 | 23.444444 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/selection_strategy_type.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class SelectionStrategyType(str, Enum):
"""
SelectionStrategyType
"""
"""
allowed enum values
"""
DIVERSITY = 'DIVERSITY'
WEIGHTS = 'WEIGHTS'
THRESHOLD = 'THRESHOLD'
BALANCE = 'BALANCE'
SIMILARITY = 'SIMILARITY'
@classmethod
def from_json(cls, json_str: str) -> 'SelectionStrategyType':
"""Create an instance of SelectionStrategyType from a JSON string"""
return SelectionStrategyType(json.loads(json_str))
| 1,047 | 21.782609 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/service_account_basic_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field, StrictStr, conint, constr
class ServiceAccountBasicData(BaseModel):
"""
ServiceAccountBasicData
"""
id: StrictStr = Field(...)
name: StrictStr = Field(...)
token: constr(strict=True, min_length=5) = Field(..., description="The user's token to be used for authentication via token querystring")
created_at: conint(strict=True, ge=0) = Field(..., alias="createdAt", description="unix timestamp in milliseconds")
__properties = ["id", "name", "token", "createdAt"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> ServiceAccountBasicData:
"""Create an instance of ServiceAccountBasicData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> ServiceAccountBasicData:
"""Create an instance of ServiceAccountBasicData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return ServiceAccountBasicData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in ServiceAccountBasicData) in the input: " + str(obj))
_obj = ServiceAccountBasicData.parse_obj({
"id": obj.get("id"),
"name": obj.get("name"),
"token": obj.get("token"),
"created_at": obj.get("createdAt")
})
return _obj
| 2,935 | 33.541176 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/set_embeddings_is_processed_flag_by_id_body_request.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Union
from pydantic import Extra, BaseModel, Field, StrictFloat, StrictInt
class SetEmbeddingsIsProcessedFlagByIdBodyRequest(BaseModel):
"""
SetEmbeddingsIsProcessedFlagByIdBodyRequest
"""
row_count: Union[StrictFloat, StrictInt] = Field(..., alias="rowCount", description="Number of rows in the embeddings file")
__properties = ["rowCount"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SetEmbeddingsIsProcessedFlagByIdBodyRequest:
"""Create an instance of SetEmbeddingsIsProcessedFlagByIdBodyRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SetEmbeddingsIsProcessedFlagByIdBodyRequest:
"""Create an instance of SetEmbeddingsIsProcessedFlagByIdBodyRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SetEmbeddingsIsProcessedFlagByIdBodyRequest.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SetEmbeddingsIsProcessedFlagByIdBodyRequest) in the input: " + str(obj))
_obj = SetEmbeddingsIsProcessedFlagByIdBodyRequest.parse_obj({
"row_count": obj.get("rowCount")
})
return _obj
| 2,804 | 34.506329 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/shared_access_config_create_request.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional
from pydantic import Extra, BaseModel, Field, StrictStr, conlist
from lightly.openapi_generated.swagger_client.models.creator import Creator
from lightly.openapi_generated.swagger_client.models.shared_access_type import SharedAccessType
class SharedAccessConfigCreateRequest(BaseModel):
"""
SharedAccessConfigCreateRequest
"""
access_type: SharedAccessType = Field(..., alias="accessType")
users: Optional[conlist(StrictStr)] = Field(None, description="List of users with access to the dataset.")
teams: Optional[conlist(StrictStr)] = Field(None, description="List of teams with access to the dataset.")
creator: Optional[Creator] = None
__properties = ["accessType", "users", "teams", "creator"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SharedAccessConfigCreateRequest:
"""Create an instance of SharedAccessConfigCreateRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SharedAccessConfigCreateRequest:
"""Create an instance of SharedAccessConfigCreateRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SharedAccessConfigCreateRequest.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SharedAccessConfigCreateRequest) in the input: " + str(obj))
_obj = SharedAccessConfigCreateRequest.parse_obj({
"access_type": obj.get("accessType"),
"users": obj.get("users"),
"teams": obj.get("teams"),
"creator": obj.get("creator")
})
return _obj
| 3,227 | 36.103448 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/shared_access_config_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List
from pydantic import Extra, BaseModel, Field, StrictStr, conint, conlist, constr, validator
from lightly.openapi_generated.swagger_client.models.shared_access_type import SharedAccessType
class SharedAccessConfigData(BaseModel):
"""
SharedAccessConfigData
"""
id: constr(strict=True) = Field(..., description="MongoDB ObjectId")
owner: StrictStr = Field(..., description="Id of the user who owns the dataset")
access_type: SharedAccessType = Field(..., alias="accessType")
users: conlist(StrictStr) = Field(..., description="List of user mails with access to the dataset")
teams: conlist(StrictStr) = Field(..., description="List of teams with access to the dataset")
created_at: conint(strict=True, ge=0) = Field(..., alias="createdAt", description="unix timestamp in milliseconds")
last_modified_at: conint(strict=True, ge=0) = Field(..., alias="lastModifiedAt", description="unix timestamp in milliseconds")
__properties = ["id", "owner", "accessType", "users", "teams", "createdAt", "lastModifiedAt"]
@validator('id')
def id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> SharedAccessConfigData:
"""Create an instance of SharedAccessConfigData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> SharedAccessConfigData:
"""Create an instance of SharedAccessConfigData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return SharedAccessConfigData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in SharedAccessConfigData) in the input: " + str(obj))
_obj = SharedAccessConfigData.parse_obj({
"id": obj.get("id"),
"owner": obj.get("owner"),
"access_type": obj.get("accessType"),
"users": obj.get("users"),
"teams": obj.get("teams"),
"created_at": obj.get("createdAt"),
"last_modified_at": obj.get("lastModifiedAt")
})
return _obj
| 3,890 | 38.30303 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/shared_access_type.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class SharedAccessType(str, Enum):
"""
SharedAccessType
"""
"""
allowed enum values
"""
WRITE = 'WRITE'
@classmethod
def from_json(cls, json_str: str) -> 'SharedAccessType':
"""Create an instance of SharedAccessType from a JSON string"""
return SharedAccessType(json.loads(json_str))
| 908 | 20.642857 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_active_learning_scores_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field, conint, constr, validator
class TagActiveLearningScoresData(BaseModel):
"""
Array of scores belonging to tag
"""
id: constr(strict=True) = Field(..., description="MongoDB ObjectId")
tag_id: constr(strict=True) = Field(..., alias="tagId", description="MongoDB ObjectId")
score_type: constr(strict=True, min_length=1) = Field(..., alias="scoreType", description="Type of active learning score")
created_at: conint(strict=True, ge=0) = Field(..., alias="createdAt", description="unix timestamp in milliseconds")
__properties = ["id", "tagId", "scoreType", "createdAt"]
@validator('id')
def id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('tag_id')
def tag_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('score_type')
def score_type_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-zA-Z0-9_+=,.@:\/-]*$", value):
raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9_+=,.@:\/-]*$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagActiveLearningScoresData:
"""Create an instance of TagActiveLearningScoresData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagActiveLearningScoresData:
"""Create an instance of TagActiveLearningScoresData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagActiveLearningScoresData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagActiveLearningScoresData) in the input: " + str(obj))
_obj = TagActiveLearningScoresData.parse_obj({
"id": obj.get("id"),
"tag_id": obj.get("tagId"),
"score_type": obj.get("scoreType"),
"created_at": obj.get("createdAt")
})
return _obj
| 3,963 | 36.396226 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_arithmetics_operation.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class TagArithmeticsOperation(str, Enum):
"""
The possible arithmetic operations that can be done between multiple tags.
"""
"""
allowed enum values
"""
UNION = 'UNION'
INTERSECTION = 'INTERSECTION'
DIFFERENCE = 'DIFFERENCE'
@classmethod
def from_json(cls, json_str: str) -> 'TagArithmeticsOperation':
"""Create an instance of TagArithmeticsOperation from a JSON string"""
return TagArithmeticsOperation(json.loads(json_str))
| 1,058 | 23.068182 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_arithmetics_request.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field, constr, validator
from lightly.openapi_generated.swagger_client.models.tag_arithmetics_operation import TagArithmeticsOperation
from lightly.openapi_generated.swagger_client.models.tag_creator import TagCreator
class TagArithmeticsRequest(BaseModel):
"""
TagArithmeticsRequest
"""
tag_id1: constr(strict=True) = Field(..., alias="tagId1", description="MongoDB ObjectId")
tag_id2: constr(strict=True) = Field(..., alias="tagId2", description="MongoDB ObjectId")
operation: TagArithmeticsOperation = Field(...)
new_tag_name: Optional[constr(strict=True, min_length=3)] = Field(None, alias="newTagName", description="The name of the tag")
creator: Optional[TagCreator] = None
run_id: Optional[constr(strict=True)] = Field(None, alias="runId", description="MongoDB ObjectId")
__properties = ["tagId1", "tagId2", "operation", "newTagName", "creator", "runId"]
@validator('tag_id1')
def tag_id1_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('tag_id2')
def tag_id2_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('new_tag_name')
def new_tag_name_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9 .:;=@_-]+$", value):
raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9][a-zA-Z0-9 .:;=@_-]+$/")
return value
@validator('run_id')
def run_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagArithmeticsRequest:
"""Create an instance of TagArithmeticsRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagArithmeticsRequest:
"""Create an instance of TagArithmeticsRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagArithmeticsRequest.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagArithmeticsRequest) in the input: " + str(obj))
_obj = TagArithmeticsRequest.parse_obj({
"tag_id1": obj.get("tagId1"),
"tag_id2": obj.get("tagId2"),
"operation": obj.get("operation"),
"new_tag_name": obj.get("newTagName"),
"creator": obj.get("creator"),
"run_id": obj.get("runId")
})
return _obj
| 4,767 | 37.144 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_arithmetics_response.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
from inspect import getfullargspec
import json
import pprint
import re # noqa: F401
from typing import Any, List, Optional
from pydantic import BaseModel, Field, StrictStr, ValidationError, validator
from lightly.openapi_generated.swagger_client.models.create_entity_response import CreateEntityResponse
from lightly.openapi_generated.swagger_client.models.tag_bit_mask_response import TagBitMaskResponse
from typing import Any, List
from pydantic import StrictStr, Field, Extra
TAGARITHMETICSRESPONSE_ONE_OF_SCHEMAS = ["CreateEntityResponse", "TagBitMaskResponse"]
class TagArithmeticsResponse(BaseModel):
"""
TagArithmeticsResponse
"""
# data type: CreateEntityResponse
oneof_schema_1_validator: Optional[CreateEntityResponse] = None
# data type: TagBitMaskResponse
oneof_schema_2_validator: Optional[TagBitMaskResponse] = None
actual_instance: Any
one_of_schemas: List[str] = Field(TAGARITHMETICSRESPONSE_ONE_OF_SCHEMAS, const=True)
class Config:
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def __init__(self, *args, **kwargs):
if args:
if len(args) > 1:
raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`")
if kwargs:
raise ValueError("If a position argument is used, keyword arguments cannot be used.")
super().__init__(actual_instance=args[0])
else:
super().__init__(**kwargs)
@validator('actual_instance')
def actual_instance_must_validate_oneof(cls, v):
instance = TagArithmeticsResponse.construct()
error_messages = []
match = 0
# validate data type: CreateEntityResponse
if not isinstance(v, CreateEntityResponse):
error_messages.append(f"Error! Input type `{type(v)}` is not `CreateEntityResponse`")
else:
match += 1
# validate data type: TagBitMaskResponse
if not isinstance(v, TagBitMaskResponse):
error_messages.append(f"Error! Input type `{type(v)}` is not `TagBitMaskResponse`")
else:
match += 1
if match > 1:
# more than 1 match
raise ValueError("Multiple matches found when setting `actual_instance` in TagArithmeticsResponse with oneOf schemas: CreateEntityResponse, TagBitMaskResponse. Details: " + ", ".join(error_messages))
elif match == 0:
# no match
raise ValueError("No match found when setting `actual_instance` in TagArithmeticsResponse with oneOf schemas: CreateEntityResponse, TagBitMaskResponse. Details: " + ", ".join(error_messages))
else:
return v
@classmethod
def from_dict(cls, obj: dict) -> TagArithmeticsResponse:
return cls.from_json(json.dumps(obj))
@classmethod
def from_json(cls, json_str: str) -> TagArithmeticsResponse:
"""Returns the object represented by the json string"""
instance = TagArithmeticsResponse.construct()
error_messages = []
match = 0
# deserialize data into CreateEntityResponse
try:
instance.actual_instance = CreateEntityResponse.from_json(json_str)
match += 1
except (ValidationError, ValueError) as e:
error_messages.append(str(e))
# deserialize data into TagBitMaskResponse
try:
instance.actual_instance = TagBitMaskResponse.from_json(json_str)
match += 1
except (ValidationError, ValueError) as e:
error_messages.append(str(e))
if match > 1:
# more than 1 match
raise ValueError("Multiple matches found when deserializing the JSON string into TagArithmeticsResponse with oneOf schemas: CreateEntityResponse, TagBitMaskResponse. Details: " + ", ".join(error_messages))
elif match == 0:
# no match
raise ValueError("No match found when deserializing the JSON string into TagArithmeticsResponse with oneOf schemas: CreateEntityResponse, TagBitMaskResponse. Details: " + ", ".join(error_messages))
else:
return instance
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the actual instance"""
if self.actual_instance is None:
return "null"
to_json = getattr(self.actual_instance, "to_json", None)
if callable(to_json):
return self.actual_instance.to_json(by_alias=by_alias)
else:
return json.dumps(self.actual_instance)
def to_dict(self, by_alias: bool = False) -> dict:
"""Returns the dict representation of the actual instance"""
if self.actual_instance is None:
return None
to_dict = getattr(self.actual_instance, "to_dict", None)
if callable(to_dict):
return self.actual_instance.to_dict(by_alias=by_alias)
else:
# primitive type
return self.actual_instance
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the actual instance"""
return pprint.pformat(self.dict(by_alias=by_alias))
| 5,726 | 39.617021 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_bit_mask_response.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field, constr, validator
class TagBitMaskResponse(BaseModel):
"""
TagBitMaskResponse
"""
bit_mask_data: constr(strict=True) = Field(..., alias="bitMaskData", description="BitMask as a base16 (hex) string")
__properties = ["bitMaskData"]
@validator('bit_mask_data')
def bit_mask_data_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^0x[a-f0-9]+$", value):
raise ValueError(r"must validate the regular expression /^0x[a-f0-9]+$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagBitMaskResponse:
"""Create an instance of TagBitMaskResponse from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagBitMaskResponse:
"""Create an instance of TagBitMaskResponse from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagBitMaskResponse.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagBitMaskResponse) in the input: " + str(obj))
_obj = TagBitMaskResponse.parse_obj({
"bit_mask_data": obj.get("bitMaskData")
})
return _obj
| 2,852 | 32.174419 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_change_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel
from lightly.openapi_generated.swagger_client.models.tag_change_data_arithmetics import TagChangeDataArithmetics
from lightly.openapi_generated.swagger_client.models.tag_change_data_initial import TagChangeDataInitial
from lightly.openapi_generated.swagger_client.models.tag_change_data_metadata import TagChangeDataMetadata
from lightly.openapi_generated.swagger_client.models.tag_change_data_rename import TagChangeDataRename
from lightly.openapi_generated.swagger_client.models.tag_change_data_sampler import TagChangeDataSampler
from lightly.openapi_generated.swagger_client.models.tag_change_data_samples import TagChangeDataSamples
from lightly.openapi_generated.swagger_client.models.tag_change_data_scatterplot import TagChangeDataScatterplot
from lightly.openapi_generated.swagger_client.models.tag_change_data_upsize import TagChangeDataUpsize
class TagChangeData(BaseModel):
"""
TagChangeData
"""
initial: Optional[TagChangeDataInitial] = None
rename: Optional[TagChangeDataRename] = None
upsize: Optional[TagChangeDataUpsize] = None
arithmetics: Optional[TagChangeDataArithmetics] = None
metadata: Optional[TagChangeDataMetadata] = None
samples: Optional[TagChangeDataSamples] = None
scatterplot: Optional[TagChangeDataScatterplot] = None
sampler: Optional[TagChangeDataSampler] = None
additional_properties: Dict[str, Any] = {}
__properties = ["initial", "rename", "upsize", "arithmetics", "metadata", "samples", "scatterplot", "sampler"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagChangeData:
"""Create an instance of TagChangeData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
"additional_properties"
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of initial
if self.initial:
_dict['initial' if by_alias else 'initial'] = self.initial.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of rename
if self.rename:
_dict['rename' if by_alias else 'rename'] = self.rename.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of upsize
if self.upsize:
_dict['upsize' if by_alias else 'upsize'] = self.upsize.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of arithmetics
if self.arithmetics:
_dict['arithmetics' if by_alias else 'arithmetics'] = self.arithmetics.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of metadata
if self.metadata:
_dict['metadata' if by_alias else 'metadata'] = self.metadata.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of samples
if self.samples:
_dict['samples' if by_alias else 'samples'] = self.samples.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of scatterplot
if self.scatterplot:
_dict['scatterplot' if by_alias else 'scatterplot'] = self.scatterplot.to_dict(by_alias=by_alias)
# override the default output from pydantic by calling `to_dict()` of sampler
if self.sampler:
_dict['sampler' if by_alias else 'sampler'] = self.sampler.to_dict(by_alias=by_alias)
# puts key-value pairs in additional_properties in the top level
if self.additional_properties is not None:
for _key, _value in self.additional_properties.items():
_dict[_key] = _value
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagChangeData:
"""Create an instance of TagChangeData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagChangeData.parse_obj(obj)
_obj = TagChangeData.parse_obj({
"initial": TagChangeDataInitial.from_dict(obj.get("initial")) if obj.get("initial") is not None else None,
"rename": TagChangeDataRename.from_dict(obj.get("rename")) if obj.get("rename") is not None else None,
"upsize": TagChangeDataUpsize.from_dict(obj.get("upsize")) if obj.get("upsize") is not None else None,
"arithmetics": TagChangeDataArithmetics.from_dict(obj.get("arithmetics")) if obj.get("arithmetics") is not None else None,
"metadata": TagChangeDataMetadata.from_dict(obj.get("metadata")) if obj.get("metadata") is not None else None,
"samples": TagChangeDataSamples.from_dict(obj.get("samples")) if obj.get("samples") is not None else None,
"scatterplot": TagChangeDataScatterplot.from_dict(obj.get("scatterplot")) if obj.get("scatterplot") is not None else None,
"sampler": TagChangeDataSampler.from_dict(obj.get("sampler")) if obj.get("sampler") is not None else None
})
# store additional fields in additional_properties
for _key in obj.keys():
if _key not in cls.__properties:
_obj.additional_properties[_key] = obj.get(_key)
return _obj
| 6,648 | 49.371212 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_change_data_arithmetics.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field, StrictStr
class TagChangeDataArithmetics(BaseModel):
"""
TagChangeDataArithmetics
"""
operation: StrictStr = Field(...)
tag1: StrictStr = Field(...)
tag2: StrictStr = Field(...)
__properties = ["operation", "tag1", "tag2"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagChangeDataArithmetics:
"""Create an instance of TagChangeDataArithmetics from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagChangeDataArithmetics:
"""Create an instance of TagChangeDataArithmetics from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagChangeDataArithmetics.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagChangeDataArithmetics) in the input: " + str(obj))
_obj = TagChangeDataArithmetics.parse_obj({
"operation": obj.get("operation"),
"tag1": obj.get("tag1"),
"tag2": obj.get("tag2")
})
return _obj
| 2,663 | 31.096386 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_change_data_initial.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field, constr, validator
class TagChangeDataInitial(BaseModel):
"""
TagChangeDataInitial
"""
run_id: Optional[constr(strict=True)] = Field(None, alias="runId", description="MongoDB ObjectId")
__properties = ["runId"]
@validator('run_id')
def run_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagChangeDataInitial:
"""Create an instance of TagChangeDataInitial from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagChangeDataInitial:
"""Create an instance of TagChangeDataInitial from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagChangeDataInitial.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagChangeDataInitial) in the input: " + str(obj))
_obj = TagChangeDataInitial.parse_obj({
"run_id": obj.get("runId")
})
return _obj
| 2,900 | 31.595506 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_change_data_metadata.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Any, Dict, Union
from pydantic import Extra, BaseModel, Field, confloat, conint
from lightly.openapi_generated.swagger_client.models.tag_change_data_operation_method import TagChangeDataOperationMethod
class TagChangeDataMetadata(BaseModel):
"""
TagChangeDataMetadata
"""
method: TagChangeDataOperationMethod = Field(...)
count: Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)] = Field(...)
added: Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)] = Field(...)
removed: Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)] = Field(...)
changes: Dict[str, Any] = Field(...)
__properties = ["method", "count", "added", "removed", "changes"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagChangeDataMetadata:
"""Create an instance of TagChangeDataMetadata from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagChangeDataMetadata:
"""Create an instance of TagChangeDataMetadata from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagChangeDataMetadata.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagChangeDataMetadata) in the input: " + str(obj))
_obj = TagChangeDataMetadata.parse_obj({
"method": obj.get("method"),
"count": obj.get("count"),
"added": obj.get("added"),
"removed": obj.get("removed"),
"changes": obj.get("changes")
})
return _obj
| 3,156 | 34.875 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_change_data_operation_method.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class TagChangeDataOperationMethod(str, Enum):
"""
TagChangeDataOperationMethod
"""
"""
allowed enum values
"""
SELECTED = 'selected'
ADDED = 'added'
REMOVED = 'removed'
@classmethod
def from_json(cls, json_str: str) -> 'TagChangeDataOperationMethod':
"""Create an instance of TagChangeDataOperationMethod from a JSON string"""
return TagChangeDataOperationMethod(json.loads(json_str))
| 1,018 | 22.159091 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_change_data_rename.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field, StrictStr
class TagChangeDataRename(BaseModel):
"""
TagChangeDataRename
"""
var_from: StrictStr = Field(..., alias="from")
to: StrictStr = Field(...)
__properties = ["from", "to"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagChangeDataRename:
"""Create an instance of TagChangeDataRename from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagChangeDataRename:
"""Create an instance of TagChangeDataRename from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagChangeDataRename.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagChangeDataRename) in the input: " + str(obj))
_obj = TagChangeDataRename.parse_obj({
"var_from": obj.get("from"),
"to": obj.get("to")
})
return _obj
| 2,534 | 30.296296 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_change_data_sampler.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field, StrictStr
class TagChangeDataSampler(BaseModel):
"""
TagChangeDataSampler
"""
method: StrictStr = Field(...)
__properties = ["method"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagChangeDataSampler:
"""Create an instance of TagChangeDataSampler from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagChangeDataSampler:
"""Create an instance of TagChangeDataSampler from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagChangeDataSampler.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagChangeDataSampler) in the input: " + str(obj))
_obj = TagChangeDataSampler.parse_obj({
"method": obj.get("method")
})
return _obj
| 2,459 | 30.139241 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_change_data_samples.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Union
from pydantic import Extra, BaseModel, Field, confloat, conint
from lightly.openapi_generated.swagger_client.models.tag_change_data_operation_method import TagChangeDataOperationMethod
class TagChangeDataSamples(BaseModel):
"""
TagChangeDataSamples
"""
method: TagChangeDataOperationMethod = Field(...)
count: Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)] = Field(...)
added: Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)] = Field(...)
removed: Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)] = Field(...)
__properties = ["method", "count", "added", "removed"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagChangeDataSamples:
"""Create an instance of TagChangeDataSamples from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagChangeDataSamples:
"""Create an instance of TagChangeDataSamples from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagChangeDataSamples.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagChangeDataSamples) in the input: " + str(obj))
_obj = TagChangeDataSamples.parse_obj({
"method": obj.get("method"),
"count": obj.get("count"),
"added": obj.get("added"),
"removed": obj.get("removed")
})
return _obj
| 3,041 | 34.372093 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_change_data_scatterplot.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional, Union
from pydantic import Extra, BaseModel, Field, StrictStr, confloat, conint
from lightly.openapi_generated.swagger_client.models.tag_change_data_operation_method import TagChangeDataOperationMethod
class TagChangeDataScatterplot(BaseModel):
"""
TagChangeDataScatterplot
"""
method: TagChangeDataOperationMethod = Field(...)
view: Optional[StrictStr] = None
count: Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)] = Field(...)
added: Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)] = Field(...)
removed: Union[confloat(ge=0, strict=True), conint(ge=0, strict=True)] = Field(...)
__properties = ["method", "view", "count", "added", "removed"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagChangeDataScatterplot:
"""Create an instance of TagChangeDataScatterplot from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagChangeDataScatterplot:
"""Create an instance of TagChangeDataScatterplot from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagChangeDataScatterplot.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagChangeDataScatterplot) in the input: " + str(obj))
_obj = TagChangeDataScatterplot.parse_obj({
"method": obj.get("method"),
"view": obj.get("view"),
"count": obj.get("count"),
"added": obj.get("added"),
"removed": obj.get("removed")
})
return _obj
| 3,180 | 35.147727 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_change_data_upsize.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional, Union
from pydantic import Extra, BaseModel, Field, StrictFloat, StrictInt, constr, validator
class TagChangeDataUpsize(BaseModel):
"""
TagChangeDataUpsize
"""
run_id: Optional[constr(strict=True)] = Field(None, alias="runId", description="MongoDB ObjectId")
var_from: Union[StrictFloat, StrictInt] = Field(..., alias="from")
to: Union[StrictFloat, StrictInt] = Field(...)
__properties = ["runId", "from", "to"]
@validator('run_id')
def run_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagChangeDataUpsize:
"""Create an instance of TagChangeDataUpsize from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagChangeDataUpsize:
"""Create an instance of TagChangeDataUpsize from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagChangeDataUpsize.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagChangeDataUpsize) in the input: " + str(obj))
_obj = TagChangeDataUpsize.parse_obj({
"run_id": obj.get("runId"),
"var_from": obj.get("from"),
"to": obj.get("to")
})
return _obj
| 3,132 | 32.688172 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_change_entry.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field, StrictStr, conint
from lightly.openapi_generated.swagger_client.models.tag_change_data import TagChangeData
from lightly.openapi_generated.swagger_client.models.tag_creator import TagCreator
class TagChangeEntry(BaseModel):
"""
TagChangeEntry
"""
user_id: StrictStr = Field(..., alias="userId")
creator: TagCreator = Field(...)
ts: conint(strict=True, ge=0) = Field(..., description="unix timestamp in milliseconds")
changes: TagChangeData = Field(...)
__properties = ["userId", "creator", "ts", "changes"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagChangeEntry:
"""Create an instance of TagChangeEntry from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of changes
if self.changes:
_dict['changes' if by_alias else 'changes'] = self.changes.to_dict(by_alias=by_alias)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagChangeEntry:
"""Create an instance of TagChangeEntry from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagChangeEntry.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagChangeEntry) in the input: " + str(obj))
_obj = TagChangeEntry.parse_obj({
"user_id": obj.get("userId"),
"creator": obj.get("creator"),
"ts": obj.get("ts"),
"changes": TagChangeData.from_dict(obj.get("changes")) if obj.get("changes") is not None else None
})
return _obj
| 3,199 | 34.555556 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_create_request.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field, StrictInt, constr, validator
from lightly.openapi_generated.swagger_client.models.tag_change_data import TagChangeData
from lightly.openapi_generated.swagger_client.models.tag_creator import TagCreator
class TagCreateRequest(BaseModel):
"""
TagCreateRequest
"""
name: constr(strict=True, min_length=3) = Field(..., description="The name of the tag")
prev_tag_id: constr(strict=True) = Field(..., alias="prevTagId", description="MongoDB ObjectId")
query_tag_id: Optional[constr(strict=True)] = Field(None, alias="queryTagId", description="MongoDB ObjectId")
preselected_tag_id: Optional[constr(strict=True)] = Field(None, alias="preselectedTagId", description="MongoDB ObjectId")
bit_mask_data: constr(strict=True) = Field(..., alias="bitMaskData", description="BitMask as a base16 (hex) string")
tot_size: StrictInt = Field(..., alias="totSize")
creator: Optional[TagCreator] = None
changes: Optional[TagChangeData] = None
run_id: Optional[constr(strict=True)] = Field(None, alias="runId", description="MongoDB ObjectId")
__properties = ["name", "prevTagId", "queryTagId", "preselectedTagId", "bitMaskData", "totSize", "creator", "changes", "runId"]
@validator('name')
def name_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9 .:;=@_-]+$", value):
raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9][a-zA-Z0-9 .:;=@_-]+$/")
return value
@validator('prev_tag_id')
def prev_tag_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('query_tag_id')
def query_tag_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('preselected_tag_id')
def preselected_tag_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('bit_mask_data')
def bit_mask_data_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^0x[a-f0-9]+$", value):
raise ValueError(r"must validate the regular expression /^0x[a-f0-9]+$/")
return value
@validator('run_id')
def run_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagCreateRequest:
"""Create an instance of TagCreateRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of changes
if self.changes:
_dict['changes' if by_alias else 'changes'] = self.changes.to_dict(by_alias=by_alias)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagCreateRequest:
"""Create an instance of TagCreateRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagCreateRequest.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagCreateRequest) in the input: " + str(obj))
_obj = TagCreateRequest.parse_obj({
"name": obj.get("name"),
"prev_tag_id": obj.get("prevTagId"),
"query_tag_id": obj.get("queryTagId"),
"preselected_tag_id": obj.get("preselectedTagId"),
"bit_mask_data": obj.get("bitMaskData"),
"tot_size": obj.get("totSize"),
"creator": obj.get("creator"),
"changes": TagChangeData.from_dict(obj.get("changes")) if obj.get("changes") is not None else None,
"run_id": obj.get("runId")
})
return _obj
| 6,141 | 39.675497 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_creator.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class TagCreator(str, Enum):
"""
TagCreator
"""
"""
allowed enum values
"""
UNKNOWN = 'UNKNOWN'
USER_WEBAPP = 'USER_WEBAPP'
USER_PIP = 'USER_PIP'
USER_PIP_LIGHTLY_MAGIC = 'USER_PIP_LIGHTLY_MAGIC'
USER_WORKER = 'USER_WORKER'
SAMPLER_ACTIVE_LEARNING = 'SAMPLER_ACTIVE_LEARNING'
SAMPLER_CORAL = 'SAMPLER_CORAL'
SAMPLER_CORESET = 'SAMPLER_CORESET'
SAMPLER_RANDOM = 'SAMPLER_RANDOM'
@classmethod
def from_json(cls, json_str: str) -> 'TagCreator':
"""Create an instance of TagCreator from a JSON string"""
return TagCreator(json.loads(json_str))
| 1,196 | 22.94 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import List, Optional
from pydantic import Extra, BaseModel, Field, StrictInt, conint, conlist, constr, validator
from lightly.openapi_generated.swagger_client.models.tag_change_entry import TagChangeEntry
class TagData(BaseModel):
"""
TagData
"""
id: constr(strict=True) = Field(..., description="MongoDB ObjectId")
dataset_id: constr(strict=True) = Field(..., alias="datasetId", description="MongoDB ObjectId")
prev_tag_id: Optional[constr(strict=True)] = Field(..., alias="prevTagId", description="MongoObjectID or null. Generally: The prevTagId is this tag's parent, i.e. it is a superset of this tag. Sampler: The prevTagId is the initial-tag if there was no preselectedTagId, otherwise, it's the preselectedTagId. ")
query_tag_id: Optional[constr(strict=True)] = Field(None, alias="queryTagId", description="MongoDB ObjectId")
preselected_tag_id: Optional[constr(strict=True)] = Field(None, alias="preselectedTagId", description="MongoDB ObjectId")
name: constr(strict=True, min_length=3) = Field(..., description="The name of the tag")
bit_mask_data: constr(strict=True) = Field(..., alias="bitMaskData", description="BitMask as a base16 (hex) string")
tot_size: StrictInt = Field(..., alias="totSize")
created_at: conint(strict=True, ge=0) = Field(..., alias="createdAt", description="unix timestamp in milliseconds")
last_modified_at: Optional[conint(strict=True, ge=0)] = Field(None, alias="lastModifiedAt", description="unix timestamp in milliseconds")
changes: Optional[conlist(TagChangeEntry)] = None
run_id: Optional[constr(strict=True)] = Field(None, alias="runId", description="MongoDB ObjectId")
__properties = ["id", "datasetId", "prevTagId", "queryTagId", "preselectedTagId", "name", "bitMaskData", "totSize", "createdAt", "lastModifiedAt", "changes", "runId"]
@validator('id')
def id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('dataset_id')
def dataset_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('prev_tag_id')
def prev_tag_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('query_tag_id')
def query_tag_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('preselected_tag_id')
def preselected_tag_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
@validator('name')
def name_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9 .:;=@_-]+$", value):
raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9][a-zA-Z0-9 .:;=@_-]+$/")
return value
@validator('bit_mask_data')
def bit_mask_data_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^0x[a-f0-9]+$", value):
raise ValueError(r"must validate the regular expression /^0x[a-f0-9]+$/")
return value
@validator('run_id')
def run_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagData:
"""Create an instance of TagData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of each item in changes (list)
_items = []
if self.changes:
for _item in self.changes:
if _item:
_items.append(_item.to_dict(by_alias=by_alias))
_dict['changes' if by_alias else 'changes'] = _items
# set to None if prev_tag_id (nullable) is None
# and __fields_set__ contains the field
if self.prev_tag_id is None and "prev_tag_id" in self.__fields_set__:
_dict['prevTagId' if by_alias else 'prev_tag_id'] = None
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagData:
"""Create an instance of TagData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagData) in the input: " + str(obj))
_obj = TagData.parse_obj({
"id": obj.get("id"),
"dataset_id": obj.get("datasetId"),
"prev_tag_id": obj.get("prevTagId"),
"query_tag_id": obj.get("queryTagId"),
"preselected_tag_id": obj.get("preselectedTagId"),
"name": obj.get("name"),
"bit_mask_data": obj.get("bitMaskData"),
"tot_size": obj.get("totSize"),
"created_at": obj.get("createdAt"),
"last_modified_at": obj.get("lastModifiedAt"),
"changes": [TagChangeEntry.from_dict(_item) for _item in obj.get("changes")] if obj.get("changes") is not None else None,
"run_id": obj.get("runId")
})
return _obj
| 7,846 | 42.115385 | 314 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_update_request.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field, constr, validator
from lightly.openapi_generated.swagger_client.models.tag_change_data import TagChangeData
from lightly.openapi_generated.swagger_client.models.tag_creator import TagCreator
class TagUpdateRequest(BaseModel):
"""
TagUpdateRequest
"""
update_creator: Optional[TagCreator] = Field(None, alias="updateCreator")
name: constr(strict=True, min_length=3) = Field(..., description="The name of the tag")
bit_mask_data: Optional[constr(strict=True)] = Field(None, alias="bitMaskData", description="BitMask as a base16 (hex) string")
changes: Optional[TagChangeData] = None
__properties = ["updateCreator", "name", "bitMaskData", "changes"]
@validator('name')
def name_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9 .:;=@_-]+$", value):
raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9][a-zA-Z0-9 .:;=@_-]+$/")
return value
@validator('bit_mask_data')
def bit_mask_data_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^0x[a-f0-9]+$", value):
raise ValueError(r"must validate the regular expression /^0x[a-f0-9]+$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagUpdateRequest:
"""Create an instance of TagUpdateRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
# override the default output from pydantic by calling `to_dict()` of changes
if self.changes:
_dict['changes' if by_alias else 'changes'] = self.changes.to_dict(by_alias=by_alias)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagUpdateRequest:
"""Create an instance of TagUpdateRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagUpdateRequest.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagUpdateRequest) in the input: " + str(obj))
_obj = TagUpdateRequest.parse_obj({
"update_creator": obj.get("updateCreator"),
"name": obj.get("name"),
"bit_mask_data": obj.get("bitMaskData"),
"changes": TagChangeData.from_dict(obj.get("changes")) if obj.get("changes") is not None else None
})
return _obj
| 4,083 | 37.168224 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/tag_upsize_request.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field, constr, validator
from lightly.openapi_generated.swagger_client.models.tag_creator import TagCreator
class TagUpsizeRequest(BaseModel):
"""
TagUpsizeRequest
"""
upsize_tag_name: constr(strict=True, min_length=3) = Field(..., alias="upsizeTagName", description="The name of the tag")
upsize_tag_creator: TagCreator = Field(..., alias="upsizeTagCreator")
run_id: Optional[constr(strict=True)] = Field(None, alias="runId", description="MongoDB ObjectId")
__properties = ["upsizeTagName", "upsizeTagCreator", "runId"]
@validator('upsize_tag_name')
def upsize_tag_name_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-zA-Z0-9][a-zA-Z0-9 .:;=@_-]+$", value):
raise ValueError(r"must validate the regular expression /^[a-zA-Z0-9][a-zA-Z0-9 .:;=@_-]+$/")
return value
@validator('run_id')
def run_id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if value is None:
return value
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TagUpsizeRequest:
"""Create an instance of TagUpsizeRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TagUpsizeRequest:
"""Create an instance of TagUpsizeRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TagUpsizeRequest.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TagUpsizeRequest) in the input: " + str(obj))
_obj = TagUpsizeRequest.parse_obj({
"upsize_tag_name": obj.get("upsizeTagName"),
"upsize_tag_creator": obj.get("upsizeTagCreator"),
"run_id": obj.get("runId")
})
return _obj
| 3,648 | 35.128713 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/task_type.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class TaskType(str, Enum):
"""
The type of the prediction or label task
"""
"""
allowed enum values
"""
CLASSIFICATION = 'CLASSIFICATION'
OBJECT_DETECTION = 'OBJECT_DETECTION'
SEMANTIC_SEGMENTATION = 'SEMANTIC_SEGMENTATION'
INSTANCE_SEGMENTATION = 'INSTANCE_SEGMENTATION'
KEYPOINT_DETECTION = 'KEYPOINT_DETECTION'
@classmethod
def from_json(cls, json_str: str) -> 'TaskType':
"""Create an instance of TaskType from a JSON string"""
return TaskType(json.loads(json_str))
| 1,110 | 23.152174 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/team_basic_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field, StrictStr, constr, validator
from lightly.openapi_generated.swagger_client.models.team_role import TeamRole
class TeamBasicData(BaseModel):
"""
TeamBasicData
"""
id: constr(strict=True) = Field(..., description="MongoDB ObjectId")
name: StrictStr = Field(...)
role: TeamRole = Field(...)
__properties = ["id", "name", "role"]
@validator('id')
def id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TeamBasicData:
"""Create an instance of TeamBasicData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TeamBasicData:
"""Create an instance of TeamBasicData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TeamBasicData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TeamBasicData) in the input: " + str(obj))
_obj = TeamBasicData.parse_obj({
"id": obj.get("id"),
"name": obj.get("name"),
"role": obj.get("role")
})
return _obj
| 2,955 | 31.483516 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/team_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field, StrictStr, conint, constr, validator
class TeamData(BaseModel):
"""
TeamData
"""
id: constr(strict=True) = Field(..., description="MongoDB ObjectId")
name: StrictStr = Field(...)
created_at: conint(strict=True, ge=0) = Field(..., alias="createdAt", description="unix timestamp in milliseconds")
valid_until: conint(strict=True, ge=0) = Field(..., alias="validUntil", description="unix timestamp in milliseconds")
__properties = ["id", "name", "createdAt", "validUntil"]
@validator('id')
def id_validate_regular_expression(cls, value):
"""Validates the regular expression"""
if not re.match(r"^[a-f0-9]{24}$", value):
raise ValueError(r"must validate the regular expression /^[a-f0-9]{24}$/")
return value
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> TeamData:
"""Create an instance of TeamData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> TeamData:
"""Create an instance of TeamData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return TeamData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in TeamData) in the input: " + str(obj))
_obj = TeamData.parse_obj({
"id": obj.get("id"),
"name": obj.get("name"),
"created_at": obj.get("createdAt"),
"valid_until": obj.get("validUntil")
})
return _obj
| 3,129 | 33.021739 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/team_role.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class TeamRole(str, Enum):
"""
TeamRole
"""
"""
allowed enum values
"""
OWNER = 'OWNER'
ADMIN = 'ADMIN'
MEMBER = 'MEMBER'
SERVICEACCOUNT = 'SERVICEACCOUNT'
@classmethod
def from_json(cls, json_str: str) -> 'TeamRole':
"""Create an instance of TeamRole from a JSON string"""
return TeamRole(json.loads(json_str))
| 948 | 20.088889 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/trigger2d_embedding_job_request.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field
from lightly.openapi_generated.swagger_client.models.dimensionality_reduction_method import DimensionalityReductionMethod
class Trigger2dEmbeddingJobRequest(BaseModel):
"""
Trigger2dEmbeddingJobRequest
"""
dimensionality_reduction_method: DimensionalityReductionMethod = Field(..., alias="dimensionalityReductionMethod")
__properties = ["dimensionalityReductionMethod"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> Trigger2dEmbeddingJobRequest:
"""Create an instance of Trigger2dEmbeddingJobRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> Trigger2dEmbeddingJobRequest:
"""Create an instance of Trigger2dEmbeddingJobRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return Trigger2dEmbeddingJobRequest.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in Trigger2dEmbeddingJobRequest) in the input: " + str(obj))
_obj = Trigger2dEmbeddingJobRequest.parse_obj({
"dimensionality_reduction_method": obj.get("dimensionalityReductionMethod")
})
return _obj
| 2,797 | 33.975 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/update_docker_worker_registry_entry_request.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional
from pydantic import Extra, BaseModel, Field, StrictStr
from lightly.openapi_generated.swagger_client.models.docker_worker_state import DockerWorkerState
class UpdateDockerWorkerRegistryEntryRequest(BaseModel):
"""
UpdateDockerWorkerRegistryEntryRequest
"""
state: DockerWorkerState = Field(...)
docker_version: Optional[StrictStr] = Field(None, alias="dockerVersion")
__properties = ["state", "dockerVersion"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> UpdateDockerWorkerRegistryEntryRequest:
"""Create an instance of UpdateDockerWorkerRegistryEntryRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> UpdateDockerWorkerRegistryEntryRequest:
"""Create an instance of UpdateDockerWorkerRegistryEntryRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return UpdateDockerWorkerRegistryEntryRequest.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in UpdateDockerWorkerRegistryEntryRequest) in the input: " + str(obj))
_obj = UpdateDockerWorkerRegistryEntryRequest.parse_obj({
"state": obj.get("state"),
"docker_version": obj.get("dockerVersion")
})
return _obj
| 2,900 | 34.378049 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/update_team_membership_request.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field
from lightly.openapi_generated.swagger_client.models.team_role import TeamRole
class UpdateTeamMembershipRequest(BaseModel):
"""
UpdateTeamMembershipRequest
"""
role: TeamRole = Field(...)
__properties = ["role"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> UpdateTeamMembershipRequest:
"""Create an instance of UpdateTeamMembershipRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> UpdateTeamMembershipRequest:
"""Create an instance of UpdateTeamMembershipRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return UpdateTeamMembershipRequest.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in UpdateTeamMembershipRequest) in the input: " + str(obj))
_obj = UpdateTeamMembershipRequest.parse_obj({
"role": obj.get("role")
})
return _obj
| 2,581 | 31.275 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/user_type.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from enum import Enum
from aenum import no_arg # type: ignore
class UserType(str, Enum):
"""
UserType
"""
"""
allowed enum values
"""
FREE = 'FREE'
PROFESSIONAL = 'PROFESSIONAL'
ENTERPRISE = 'ENTERPRISE'
ADMIN = 'ADMIN'
@classmethod
def from_json(cls, json_str: str) -> 'UserType':
"""Create an instance of UserType from a JSON string"""
return UserType(json.loads(json_str))
| 950 | 20.133333 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/video_frame_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from typing import Optional, Union
from pydantic import Extra, BaseModel, Field, StrictFloat, StrictInt, StrictStr
class VideoFrameData(BaseModel):
"""
VideoFrameData
"""
source_video: Optional[StrictStr] = Field(None, alias="sourceVideo", description="Name of the source video.")
source_video_frame_index: Optional[Union[StrictFloat, StrictInt]] = Field(None, alias="sourceVideoFrameIndex", description="Index of the frame in the source video.")
source_video_frame_timestamp: Optional[Union[StrictFloat, StrictInt]] = Field(None, alias="sourceVideoFrameTimestamp", description="Timestamp of the frame in the source video.")
__properties = ["sourceVideo", "sourceVideoFrameIndex", "sourceVideoFrameTimestamp"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> VideoFrameData:
"""Create an instance of VideoFrameData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> VideoFrameData:
"""Create an instance of VideoFrameData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return VideoFrameData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in VideoFrameData) in the input: " + str(obj))
_obj = VideoFrameData.parse_obj({
"source_video": obj.get("sourceVideo"),
"source_video_frame_index": obj.get("sourceVideoFrameIndex"),
"source_video_frame_timestamp": obj.get("sourceVideoFrameTimestamp")
})
return _obj
| 3,120 | 36.60241 | 220 | py |
lightly | lightly-master/lightly/openapi_generated/swagger_client/models/write_csv_url_data.py | # coding: utf-8
"""
Lightly API
Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: support@lightly.ai
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from pydantic import Extra, BaseModel, Field, StrictStr
class WriteCSVUrlData(BaseModel):
"""
WriteCSVUrlData
"""
signed_write_url: StrictStr = Field(..., alias="signedWriteUrl")
embedding_id: StrictStr = Field(..., alias="embeddingId")
__properties = ["signedWriteUrl", "embeddingId"]
class Config:
"""Pydantic configuration"""
allow_population_by_field_name = True
validate_assignment = True
use_enum_values = True
extra = Extra.forbid
def to_str(self, by_alias: bool = False) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.dict(by_alias=by_alias))
def to_json(self, by_alias: bool = False) -> str:
"""Returns the JSON representation of the model"""
return json.dumps(self.to_dict(by_alias=by_alias))
@classmethod
def from_json(cls, json_str: str) -> WriteCSVUrlData:
"""Create an instance of WriteCSVUrlData from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self, by_alias: bool = False):
"""Returns the dictionary representation of the model"""
_dict = self.dict(by_alias=by_alias,
exclude={
},
exclude_none=True)
return _dict
@classmethod
def from_dict(cls, obj: dict) -> WriteCSVUrlData:
"""Create an instance of WriteCSVUrlData from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return WriteCSVUrlData.parse_obj(obj)
# raise errors for additional fields in the input
for _key in obj.keys():
if _key not in cls.__properties:
raise ValueError("Error due to additional fields (not defined in WriteCSVUrlData) in the input: " + str(obj))
_obj = WriteCSVUrlData.parse_obj({
"signed_write_url": obj.get("signedWriteUrl"),
"embedding_id": obj.get("embeddingId")
})
return _obj
| 2,603 | 31.148148 | 220 | py |
lightly | lightly-master/lightly/transforms/__init__.py | """The lightly.transforms package transforms for various self-supervised learning
methods.
It also contains some additional transforms that are not part of torchvisions
transforms.
"""
# Copyright (c) 2020. Lightly AG and its affiliates.
# All Rights Reserved
from lightly.transforms.dino_transform import DINOTransform, DINOViewTransform
from lightly.transforms.fast_siam_transform import FastSiamTransform
from lightly.transforms.gaussian_blur import GaussianBlur
from lightly.transforms.jigsaw import Jigsaw
from lightly.transforms.mae_transform import MAETransform
from lightly.transforms.moco_transform import MoCoV1Transform, MoCoV2Transform
from lightly.transforms.msn_transform import MSNTransform, MSNViewTransform
from lightly.transforms.pirl_transform import PIRLTransform
from lightly.transforms.rotation import (
RandomRotate,
RandomRotateDegrees,
random_rotation_transform,
)
from lightly.transforms.simclr_transform import SimCLRTransform, SimCLRViewTransform
from lightly.transforms.simsiam_transform import SimSiamTransform, SimSiamViewTransform
from lightly.transforms.smog_transform import SMoGTransform, SmoGViewTransform
from lightly.transforms.solarize import RandomSolarization
from lightly.transforms.swav_transform import SwaVTransform, SwaVViewTransform
from lightly.transforms.vicreg_transform import VICRegTransform, VICRegViewTransform
from lightly.transforms.vicregl_transform import VICRegLTransform, VICRegLViewTransform
| 1,468 | 46.387097 | 87 | py |
lightly | lightly-master/lightly/transforms/dino_transform.py | from typing import Optional, Tuple, Union
import PIL
import torchvision.transforms as T
from PIL.Image import Image
from torch import Tensor
from lightly.transforms.gaussian_blur import GaussianBlur
from lightly.transforms.multi_view_transform import MultiViewTransform
from lightly.transforms.rotation import random_rotation_transform
from lightly.transforms.solarize import RandomSolarization
from lightly.transforms.utils import IMAGENET_NORMALIZE
class DINOTransform(MultiViewTransform):
"""Implements the global and local view augmentations for DINO [0].
Input to this transform:
PIL Image or Tensor.
Output of this transform:
List of Tensor of length 2 * global + n_local_views. (8 by default)
Applies the following augmentations by default:
- Random resized crop
- Random horizontal flip
- Color jitter
- Random gray scale
- Gaussian blur
- Random solarization
- ImageNet normalization
This class generates two global and a user defined number of local views
for each image in a batch. The code is adapted from [1].
- [0]: DINO, 2021, https://arxiv.org/abs/2104.14294
- [1]: https://github.com/facebookresearch/dino
Attributes:
global_crop_size:
Crop size of the global views.
global_crop_scale:
Tuple of min and max scales relative to global_crop_size.
local_crop_size:
Crop size of the local views.
local_crop_scale:
Tuple of min and max scales relative to local_crop_size.
n_local_views:
Number of generated local views.
hf_prob:
Probability that horizontal flip is applied.
vf_prob:
Probability that vertical flip is applied.
rr_prob:
Probability that random rotation is applied.
rr_degrees:
Range of degrees to select from for random rotation. If rr_degrees is None,
images are rotated by 90 degrees. If rr_degrees is a (min, max) tuple,
images are rotated by a random angle in [min, max]. If rr_degrees is a
single number, images are rotated by a random angle in
[-rr_degrees, +rr_degrees]. All rotations are counter-clockwise.
cj_prob:
Probability that color jitter is applied.
cj_strength:
Strength of the color jitter. `cj_bright`, `cj_contrast`, `cj_sat`, and
`cj_hue` are multiplied by this value.
cj_bright:
How much to jitter brightness.
cj_contrast:
How much to jitter constrast.
cj_sat:
How much to jitter saturation.
cj_hue:
How much to jitter hue.
random_gray_scale:
Probability of conversion to grayscale.
gaussian_blur:
Tuple of probabilities to apply gaussian blur on the different
views. The input is ordered as follows:
(global_view_0, global_view_1, local_views)
kernel_size:
Will be deprecated in favor of `sigmas` argument. If set, the old behavior applies and `sigmas` is ignored.
Used to calculate sigma of gaussian blur with kernel_size * input_size.
kernel_scale:
Old argument. Value is deprecated in favor of sigmas. If set, the old behavior applies and `sigmas` is ignored.
Used to scale the `kernel_size` of a factor of `kernel_scale`
sigmas:
Tuple of min and max value from which the std of the gaussian kernel is sampled.
Is ignored if `kernel_size` is set.
solarization:
Probability to apply solarization on the second global view.
normalize:
Dictionary with 'mean' and 'std' for torchvision.transforms.Normalize.
"""
def __init__(
self,
global_crop_size: int = 224,
global_crop_scale: Tuple[float, float] = (0.4, 1.0),
local_crop_size: int = 96,
local_crop_scale: Tuple[float, float] = (0.05, 0.4),
n_local_views: int = 6,
hf_prob: float = 0.5,
vf_prob: float = 0,
rr_prob: float = 0,
rr_degrees: Union[None, float, Tuple[float, float]] = None,
cj_prob: float = 0.8,
cj_strength: float = 0.5,
cj_bright: float = 0.8,
cj_contrast: float = 0.8,
cj_sat: float = 0.4,
cj_hue: float = 0.2,
random_gray_scale: float = 0.2,
gaussian_blur: Tuple[float, float, float] = (1.0, 0.1, 0.5),
kernel_size: Optional[float] = None,
kernel_scale: Optional[float] = None,
sigmas: Tuple[float, float] = (0.1, 2),
solarization_prob: float = 0.2,
normalize: Union[None, dict] = IMAGENET_NORMALIZE,
):
# first global crop
global_transform_0 = DINOViewTransform(
crop_size=global_crop_size,
crop_scale=global_crop_scale,
hf_prob=hf_prob,
vf_prob=vf_prob,
rr_prob=rr_prob,
rr_degrees=rr_degrees,
cj_prob=cj_prob,
cj_strength=cj_strength,
cj_bright=cj_bright,
cj_contrast=cj_contrast,
cj_hue=cj_hue,
cj_sat=cj_sat,
random_gray_scale=random_gray_scale,
gaussian_blur=gaussian_blur[0],
kernel_size=kernel_size,
kernel_scale=kernel_scale,
sigmas=sigmas,
solarization_prob=0,
normalize=normalize,
)
# second global crop
global_transform_1 = DINOViewTransform(
crop_size=global_crop_size,
crop_scale=global_crop_scale,
hf_prob=hf_prob,
vf_prob=vf_prob,
rr_prob=rr_prob,
rr_degrees=rr_degrees,
cj_prob=cj_prob,
cj_bright=cj_bright,
cj_contrast=cj_contrast,
cj_hue=cj_hue,
cj_sat=cj_sat,
random_gray_scale=random_gray_scale,
gaussian_blur=gaussian_blur[1],
kernel_size=kernel_size,
kernel_scale=kernel_scale,
sigmas=sigmas,
solarization_prob=solarization_prob,
normalize=normalize,
)
# transformation for the local small crops
local_transform = DINOViewTransform(
crop_size=local_crop_size,
crop_scale=local_crop_scale,
hf_prob=hf_prob,
vf_prob=vf_prob,
rr_prob=rr_prob,
rr_degrees=rr_degrees,
cj_prob=cj_prob,
cj_strength=cj_strength,
cj_bright=cj_bright,
cj_contrast=cj_contrast,
cj_hue=cj_hue,
cj_sat=cj_sat,
random_gray_scale=random_gray_scale,
gaussian_blur=gaussian_blur[2],
kernel_size=kernel_size,
kernel_scale=kernel_scale,
sigmas=sigmas,
solarization_prob=0,
normalize=normalize,
)
local_transforms = [local_transform] * n_local_views
transforms = [global_transform_0, global_transform_1]
transforms.extend(local_transforms)
super().__init__(transforms)
class DINOViewTransform:
def __init__(
self,
crop_size: int = 224,
crop_scale: Tuple[float, float] = (0.4, 1.0),
hf_prob: float = 0.5,
vf_prob: float = 0,
rr_prob: float = 0,
rr_degrees: Union[None, float, Tuple[float, float]] = None,
cj_prob: float = 0.8,
cj_strength: float = 0.5,
cj_bright: float = 0.8,
cj_contrast: float = 0.8,
cj_sat: float = 0.4,
cj_hue: float = 0.2,
random_gray_scale: float = 0.2,
gaussian_blur: float = 1.0,
kernel_size: Optional[float] = None,
kernel_scale: Optional[float] = None,
sigmas: Tuple[float, float] = (0.1, 2),
solarization_prob: float = 0.2,
normalize: Union[None, dict] = IMAGENET_NORMALIZE,
):
transform = [
T.RandomResizedCrop(
size=crop_size,
scale=crop_scale,
interpolation=PIL.Image.BICUBIC,
),
T.RandomHorizontalFlip(p=hf_prob),
T.RandomVerticalFlip(p=vf_prob),
random_rotation_transform(rr_prob=rr_prob, rr_degrees=rr_degrees),
T.RandomApply(
[
T.ColorJitter(
brightness=cj_strength * cj_bright,
contrast=cj_strength * cj_contrast,
saturation=cj_strength * cj_sat,
hue=cj_strength * cj_hue,
)
],
p=cj_prob,
),
T.RandomGrayscale(p=random_gray_scale),
GaussianBlur(
kernel_size=kernel_size,
scale=kernel_scale,
sigmas=sigmas,
prob=gaussian_blur,
),
RandomSolarization(prob=solarization_prob),
T.ToTensor(),
]
if normalize:
transform += [T.Normalize(mean=normalize["mean"], std=normalize["std"])]
self.transform = T.Compose(transform)
def __call__(self, image: Union[Tensor, Image]) -> Tensor:
"""
Applies the transforms to the input image.
Args:
image:
The input image to apply the transforms to.
Returns:
The transformed image.
"""
return self.transform(image)
| 9,587 | 35.045113 | 123 | py |
lightly | lightly-master/lightly/transforms/fast_siam_transform.py | from typing import Optional, Tuple, Union
from lightly.transforms.multi_view_transform import MultiViewTransform
from lightly.transforms.simsiam_transform import SimSiamViewTransform
from lightly.transforms.utils import IMAGENET_NORMALIZE
class FastSiamTransform(MultiViewTransform):
"""Implements the transformations for FastSiam.
Input to this transform:
PIL Image or Tensor.
Output of this transform:
List of Tensor of length 4.
Applies the following augmentations by default:
- Random resized crop
- Random horizontal flip
- Color jitter
- Random gray scale
- Gaussian blur
- ImageNet normalization
Attributes:
num_views:
Number of views (num_views = K+1 where K is the number of target views).
input_size:
Size of the input image in pixels.
cj_prob:
Probability that color jitter is applied.
cj_strength:
Strength of the color jitter. `cj_bright`, `cj_contrast`, `cj_sat`, and
`cj_hue` are multiplied by this value. For datasets with small images,
such as CIFAR, it is recommended to set `cj_strength` to 0.5.
cj_bright:
How much to jitter brightness.
cj_contrast:
How much to jitter constrast.
cj_sat:
How much to jitter saturation.
cj_hue:
How much to jitter hue.
min_scale:
Minimum size of the randomized crop relative to the input_size.
random_gray_scale:
Probability of conversion to grayscale.
gaussian_blur:
Probability of Gaussian blur.
kernel_size:
Will be deprecated in favor of `sigmas` argument. If set, the old behavior applies and `sigmas` is ignored.
Used to calculate sigma of gaussian blur with kernel_size * input_size.
sigmas:
Tuple of min and max value from which the std of the gaussian kernel is sampled.
Is ignored if `kernel_size` is set.
vf_prob:
Probability that vertical flip is applied.
hf_prob:
Probability that horizontal flip is applied.
rr_prob:
Probability that random rotation is applied.
rr_degrees:
Range of degrees to select from for random rotation. If rr_degrees is None,
images are rotated by 90 degrees. If rr_degrees is a (min, max) tuple,
images are rotated by a random angle in [min, max]. If rr_degrees is a
single number, images are rotated by a random angle in
[-rr_degrees, +rr_degrees]. All rotations are counter-clockwise.
normalize:
Dictionary with 'mean' and 'std' for torchvision.transforms.Normalize.
"""
def __init__(
self,
num_views: int = 4,
input_size: int = 224,
cj_prob: float = 0.8,
cj_strength: float = 1.0,
cj_bright: float = 0.4,
cj_contrast: float = 0.4,
cj_sat: float = 0.4,
cj_hue: float = 0.1,
min_scale: float = 0.2,
random_gray_scale: float = 0.2,
gaussian_blur: float = 0.5,
kernel_size: Optional[float] = None,
sigmas: Tuple[float, float] = (0.1, 2),
vf_prob: float = 0.0,
hf_prob: float = 0.5,
rr_prob: float = 0.0,
rr_degrees: Union[None, float, Tuple[float, float]] = None,
normalize: Union[None, dict] = IMAGENET_NORMALIZE,
):
transforms = [
SimSiamViewTransform(
input_size=input_size,
cj_prob=cj_prob,
cj_strength=cj_strength,
cj_bright=cj_bright,
cj_contrast=cj_contrast,
cj_sat=cj_sat,
cj_hue=cj_hue,
min_scale=min_scale,
random_gray_scale=random_gray_scale,
gaussian_blur=gaussian_blur,
kernel_size=kernel_size,
sigmas=sigmas,
vf_prob=vf_prob,
hf_prob=hf_prob,
rr_prob=rr_prob,
rr_degrees=rr_degrees,
normalize=normalize,
)
for _ in range(num_views)
]
super().__init__(transforms=transforms)
| 4,323 | 35.957265 | 119 | py |
lightly | lightly-master/lightly/transforms/gaussian_blur.py | # Copyright (c) 2020. Lightly AG and its affiliates.
# All Rights Reserved
from typing import Tuple, Union
from warnings import warn
import numpy as np
from PIL import ImageFilter
class GaussianBlur:
"""Implementation of random Gaussian blur.
Utilizes the built-in ImageFilter method from PIL to apply a Gaussian
blur to the input image with a certain probability. The blur is further
randomized by sampling uniformly the values of the standard deviation of
the Gaussian kernel.
Attributes:
kernel_size:
Will be deprecated in favor of `sigmas` argument. If set, the old behavior applies and `sigmas` is ignored.
Used to calculate sigma of gaussian blur with kernel_size * input_size.
prob:
Probability with which the blur is applied.
scale:
Will be deprecated in favor of `sigmas` argument. If set, the old behavior applies and `sigmas` is ignored.
Used to scale the `kernel_size` of a factor of `kernel_scale`
sigmas:
Tuple of min and max value from which the std of the gaussian kernel is sampled.
Is ignored if `kernel_size` is set.
"""
def __init__(
self,
kernel_size: Union[float, None] = None,
prob: float = 0.5,
scale: Union[float, None] = None,
sigmas: Tuple[float, float] = (0.2, 2),
):
if scale != None or kernel_size != None:
warn(
"The 'kernel_size' and 'scale' arguments of the GaussianBlur augmentation will be deprecated. "
"Please use the 'sigmas' parameter instead.",
DeprecationWarning,
)
self.prob = prob
self.sigmas = sigmas
def __call__(self, sample):
"""Blurs the image with a given probability.
Args:
sample:
PIL image to which blur will be applied.
Returns:
Blurred image or original image.
"""
prob = np.random.random_sample()
if prob < self.prob:
# choose randomized std for Gaussian filtering
sigma = np.random.uniform(self.sigmas[0], self.sigmas[1])
# PIL GaussianBlur https://github.com/python-pillow/Pillow/blob/76478c6865c78af10bf48868345db2af92f86166/src/PIL/ImageFilter.py#L154 label the
# sigma parameter of the gaussian filter as radius. Before, the radius of the patch was passed as the argument.
# The issue was addressed here https://github.com/lightly-ai/lightly/issues/1051 and solved by AurelienGauffre.
return sample.filter(ImageFilter.GaussianBlur(radius=sigma))
# return original image
return sample
| 2,724 | 37.928571 | 154 | py |
lightly | lightly-master/lightly/transforms/ijepa_transform.py | from typing import Tuple, Union
import torchvision.transforms as T
from PIL.Image import Image
from torch import Tensor
from lightly.transforms.utils import IMAGENET_NORMALIZE
class IJEPATransform:
"""Implements the augmentations for I-JEPA [0, 1].
Experimental: Support for I-JEPA is experimental, there might be breaking changes
in the future.
- [0]: Joint-Embedding Predictive Architecture, 2023, https://arxiv.org/abs/2301.08243
- [1]: https://github.com/facebookresearch/ijepa
Attributes:
input_size:
Size of the input image in pixels.
min_scale:
Minimum size of the randomized crop relative to the input_size.
normalize:
Dictionary with 'mean' and 'std' for torchvision.transforms.Normalize.
"""
def __init__(
self,
input_size: Union[int, Tuple[int, int]] = 224,
min_scale: float = 0.2,
normalize: dict = IMAGENET_NORMALIZE,
):
transforms = [
T.RandomResizedCrop(
input_size, scale=(min_scale, 1.0), interpolation=3
), # 3 is bicubic
T.RandomHorizontalFlip(),
T.ToTensor(),
]
if normalize:
transforms.append(T.Normalize(mean=normalize["mean"], std=normalize["std"]))
self.transform = T.Compose(transforms)
def __call__(self, image: Union[Tensor, Image]) -> Tensor:
"""Applies the transforms to the input image.
Args:
image:
The input image to apply the transforms to.
Returns:
The transformed image.
"""
return self.transform(image)
| 1,674 | 27.389831 | 90 | py |