id int64 0 458k | file_name stringlengths 4 119 | file_path stringlengths 14 227 | content stringlengths 24 9.96M | size int64 24 9.96M | language stringclasses 1 value | extension stringclasses 14 values | total_lines int64 1 219k | avg_line_length float64 2.52 4.63M | max_line_length int64 5 9.91M | alphanum_fraction float64 0 1 | repo_name stringlengths 7 101 | repo_stars int64 100 139k | repo_forks int64 0 26.4k | repo_open_issues int64 0 2.27k | repo_license stringclasses 12 values | repo_extraction_date stringclasses 433 values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
24,300 | failover_reboot.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/failover_reboot.py | # Copyright (c) - iXsystems Inc.
#
# Licensed under the terms of the TrueNAS Enterprise License Agreement
# See the file LICENSE.IX for complete terms and conditions
from middlewared.api.base import BaseModel, single_argument_result
from .system_reboot import SystemRebootInfoResult
__all__ = ["FailoverRebootInfoArgs", "FailoverRebootInfoResult",
"FailoverRebootOtherNodeArgs", "FailoverRebootOtherNodeResult"]
class FailoverRebootInfoArgs(BaseModel):
pass
@single_argument_result
class FailoverRebootInfoResult(BaseModel):
this_node: SystemRebootInfoResult
other_node: SystemRebootInfoResult | None
class FailoverRebootOtherNodeArgs(BaseModel):
pass
class FailoverRebootOtherNodeResult(BaseModel):
result: None
| 753 | Python | .py | 18 | 38.611111 | 74 | 0.820937 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,301 | smartctl.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/smartctl.py | from middlewared.api.base import BaseModel
__all__ = ["AtaSelfTest", "NvmeSelfTest", "ScsiSelfTest"]
class AtaSelfTest(BaseModel):
num: int
description: str
status: str
status_verbose: str
remaining: float
lifetime: int
power_on_hours_ago: int
lba_of_first_error: int | None = None
class NvmeSelfTest(BaseModel):
num: int
description: str
status: str
status_verbose: str
power_on_hours: int
power_on_hours_ago: int
failing_lba: int | None = None
nsid: int | None = None
seg: int | None = None
sct: int | None = 0x0
code: int | None = 0x0
class ScsiSelfTest(BaseModel):
num: int
description: str
status: str
status_verbose: str
power_on_hours_ago: int
segment_number: int | None = None
lifetime: int | None = None
lba_of_first_error: int | None = None
| 866 | Python | .py | 32 | 22.46875 | 57 | 0.668682 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,302 | cloud_sync.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/cloud_sync.py | from pydantic import Secret
from middlewared.api.base import (BaseModel, Excluded, excluded_field, ForUpdateMetaclass, NonEmptyString,
single_argument_args, single_argument_result)
__all__ = ["CloudCredentialEntry",
"CloudCredentialCreateArgs", "CloudCredentialCreateResult",
"CloudCredentialUpdateArgs", "CloudCredentialUpdateResult",
"CloudCredentialDeleteArgs", "CloudCredentialDeleteResult",
"CloudCredentialVerifyArgs", "CloudCredentialVerifyResult"]
class CloudCredentialEntry(BaseModel):
id: int
name: NonEmptyString
provider: str
attributes: Secret[dict]
class CloudCredentialCreate(CloudCredentialEntry):
id: Excluded = excluded_field()
class CloudCredentialUpdate(CloudCredentialCreate, metaclass=ForUpdateMetaclass):
pass
class CloudCredentialCreateArgs(BaseModel):
cloud_sync_credentials_create: CloudCredentialCreate
class CloudCredentialCreateResult(BaseModel):
result: CloudCredentialEntry
class CloudCredentialUpdateArgs(BaseModel):
id: int
cloud_sync_credentials_update: CloudCredentialUpdate
class CloudCredentialUpdateResult(BaseModel):
result: CloudCredentialEntry
class CloudCredentialDeleteArgs(BaseModel):
id: int
class CloudCredentialDeleteResult(BaseModel):
result: bool
@single_argument_args("cloud_sync_credentials_create")
class CloudCredentialVerifyArgs(BaseModel):
provider: str
attributes: Secret[dict]
@single_argument_result
class CloudCredentialVerifyResult(BaseModel):
valid: bool
error: str | None = None
excerpt: str | None = None
| 1,642 | Python | .py | 39 | 36.641026 | 106 | 0.786574 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,303 | vendor.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/vendor.py | from middlewared.api.base import BaseModel
class VendorNameArgs(BaseModel):
pass
class VendorNameResult(BaseModel):
result: str | None
class UnvendorArgs(BaseModel):
pass
class UnvendorResult(BaseModel):
result: None
class IsVendoredArgs(BaseModel):
pass
class IsVendoredResult(BaseModel):
result: bool
| 339 | Python | .py | 13 | 22.307692 | 42 | 0.802548 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,304 | auth.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/auth.py | from middlewared.api.base import BaseModel, single_argument_result
from middlewared.utils.auth import AuthMech, AuthResp
from pydantic import Field, Secret
from typing import Literal
from .user import UserGetUserObjResult
class AuthMeArgs(BaseModel):
pass
class AuthUserInfo(UserGetUserObjResult.model_fields["result"].annotation):
attributes: dict
two_factor_config: dict
privilege: dict
account_attributes: list[str]
class AuthLegacyUsernamePassword(BaseModel):
username: str
password: Secret[str]
class AuthLegacyTwoFactorArgs(AuthLegacyUsernamePassword):
pass
class AuthLegacyPasswordLoginArgs(AuthLegacyUsernamePassword):
otp_token: Secret[str | None] = None
class AuthLegacyApiKeyLoginArgs(BaseModel):
api_key: Secret[str]
class AuthLegacyTokenLoginArgs(BaseModel):
token: Secret[str]
class AuthLegacyResult(BaseModel):
result: bool
@single_argument_result
class AuthMeResult(AuthUserInfo):
pass
class AuthCommonOptions(BaseModel):
user_info: bool = True # include auth.me in successful result
class AuthApiKeyPlain(BaseModel):
mechanism: Literal[AuthMech.API_KEY_PLAIN]
username: str
api_key: Secret[str]
login_options: AuthCommonOptions = Field(default=AuthCommonOptions())
class AuthPasswordPlain(BaseModel):
mechanism: Literal[AuthMech.PASSWORD_PLAIN]
username: str
password: Secret[str]
login_options: AuthCommonOptions = Field(default=AuthCommonOptions())
class AuthTokenPlain(BaseModel):
mechanism: Literal[AuthMech.TOKEN_PLAIN]
token: Secret[str]
login_options: AuthCommonOptions = Field(default=AuthCommonOptions())
class AuthOTPToken(BaseModel):
mechanism: Literal[AuthMech.OTP_TOKEN]
otp_token: Secret[str]
login_options: AuthCommonOptions = Field(default=AuthCommonOptions())
class AuthRespSuccess(BaseModel):
response_type: Literal[AuthResp.SUCCESS]
user_info: AuthUserInfo | None
class AuthRespAuthErr(BaseModel):
response_type: Literal[AuthResp.AUTH_ERR]
class AuthRespExpired(BaseModel):
response_type: Literal[AuthResp.EXPIRED]
class AuthRespOTPRequired(BaseModel):
response_type: Literal[AuthResp.OTP_REQUIRED]
username: str
class AuthLoginExArgs(BaseModel):
login_data: AuthApiKeyPlain | AuthPasswordPlain | AuthTokenPlain | AuthOTPToken
class AuthLoginExContinueArgs(BaseModel):
login_data: AuthOTPToken
class AuthLoginExResult(BaseModel):
result: AuthRespSuccess | AuthRespAuthErr | AuthRespExpired | AuthRespOTPRequired
class AuthMechChoicesArgs(BaseModel):
pass
class AuthMechChoicesResult(BaseModel):
result: list[str]
| 2,652 | Python | .py | 68 | 35.029412 | 85 | 0.804177 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,305 | keychain.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/keychain.py | from typing import Literal
from pydantic import Field, Secret
from middlewared.api.base import (BaseModel, Excluded, excluded_field, ForUpdateMetaclass, HttpUrl, NonEmptyString,
single_argument_args, single_argument_result)
__all__ = ["KeychainCredentialEntry",
"KeychainCredentialCreateArgs", "KeychainCredentialCreateResult",
"KeychainCredentialUpdateArgs", "KeychainCredentialUpdateResult",
"KeychainCredentialDeleteArgs", "KeychainCredentialDeleteResult",
"KeychainCredentialUsedByArgs", "KeychainCredentialUsedByResult",
"KeychainCredentialGetOfTypeArgs", "KeychainCredentialGetOfTypeResult",
"KeychainCredentialGenerateSSHKeyPairArgs", "KeychainCredentialGenerateSSHKeyPairResult",
"KeychainCredentialRemoteSSHHostKeyScanArgs", "KeychainCredentialRemoteSSHHostKeyScanResult",
"KeychainCredentialRemoteSSHSemiautomaticSetupArgs", "KeychainCredentialRemoteSSHSemiautomaticSetupResult",
"KeychainCredentialSSHPairArgs", "KeychainCredentialSSHPairResult",
"KeychainCredentialSetupSSHConnectionArgs", "KeychainCredentialSetupSSHConnectionResult"]
class KeychainCredentialEntry(BaseModel):
id: int
name: NonEmptyString
type: str
attributes: Secret[dict]
class KeychainCredentialCreate(KeychainCredentialEntry):
id: Excluded = excluded_field()
class KeychainCredentialUpdate(KeychainCredentialCreate, metaclass=ForUpdateMetaclass):
type: Excluded = excluded_field()
class KeychainCredentialCreateArgs(BaseModel):
keychain_credential_create: KeychainCredentialCreate
class KeychainCredentialCreateResult(BaseModel):
result: KeychainCredentialEntry
class KeychainCredentialUpdateArgs(BaseModel):
id: int
keychain_credential_update: KeychainCredentialUpdate
class KeychainCredentialUpdateResult(BaseModel):
result: KeychainCredentialEntry
class KeychainCredentialDeleteOptions(BaseModel):
cascade: bool = False
class KeychainCredentialDeleteArgs(BaseModel):
id: int
options: KeychainCredentialDeleteOptions = Field(default=KeychainCredentialDeleteOptions())
class KeychainCredentialDeleteResult(BaseModel):
result: None
class KeychainCredentialUsedByArgs(BaseModel):
id: int
class UsedKeychainCredential(BaseModel):
title: str
unbind_method: Literal["delete", "disable"]
class KeychainCredentialUsedByResult(BaseModel):
result: list[UsedKeychainCredential]
class KeychainCredentialGetOfTypeArgs(BaseModel):
id: int
type: str
@single_argument_result
class KeychainCredentialGetOfTypeResult(KeychainCredentialEntry):
pass
class KeychainCredentialGenerateSSHKeyPairArgs(BaseModel):
pass
@single_argument_result
class KeychainCredentialGenerateSSHKeyPairResult(BaseModel):
private_key: str
public_key: str
@single_argument_args("keychain_remote_ssh_host_key_scan")
class KeychainCredentialRemoteSSHHostKeyScanArgs(BaseModel):
host: NonEmptyString
port: int = 22
connect_timeout: int = 10
class KeychainCredentialRemoteSSHHostKeyScanResult(BaseModel):
result: str
@single_argument_args("keychain_remote_ssh_semiautomatic_setup")
class KeychainCredentialRemoteSSHSemiautomaticSetupArgs(BaseModel):
name: NonEmptyString
url: HttpUrl
verify_ssl: bool = True
token: Secret[str | None] = None
admin_username: str = "root"
password: Secret[str | None] = None
otp_token: Secret[str | None] = None
username: str = "root"
private_key: Secret[int]
connect_timeout: int = 10
sudo: bool = False
class KeychainCredentialRemoteSSHSemiautomaticSetupResult(BaseModel):
result: KeychainCredentialEntry
@single_argument_args("keychain_ssh_pair")
class KeychainCredentialSSHPairArgs(BaseModel):
remote_hostname: NonEmptyString
username: str = "root"
public_key: NonEmptyString
class KeychainCredentialSSHPairResult(BaseModel):
result: None
class KeychainCredentialSetupSSHConnectionPrivateKey(BaseModel):
generate_key: bool = True
existing_key_id: int | None = None
name: NonEmptyString
class KeychainCredentialSetupSSHConnectionSemiAutomaticSetup(
KeychainCredentialRemoteSSHSemiautomaticSetupArgs.model_fields["keychain_remote_ssh_semiautomatic_setup"].annotation
):
name: Excluded = excluded_field()
private_key: Excluded = excluded_field()
@single_argument_args("setup_ssh_connection")
class KeychainCredentialSetupSSHConnectionArgs(BaseModel):
private_key: KeychainCredentialSetupSSHConnectionPrivateKey | None = None
connection_name: NonEmptyString
setup_type: Literal["SEMI-AUTOMATIC", "MANUAL"] = "MANUAL"
semi_automatic_setup: KeychainCredentialSetupSSHConnectionSemiAutomaticSetup | None = None
manual_setup: dict | None = None
@single_argument_result
class KeychainCredentialSetupSSHConnectionResult(KeychainCredentialEntry):
pass
| 4,943 | Python | .py | 107 | 41.186916 | 120 | 0.804771 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,306 | __init__.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/__init__.py | from .acme_protocol import * # noqa
from .alert import * # noqa
from .alertservice import * # noqa
from .api_key import * # noqa
from .auth import * # noqa
from .cloud_sync import * # noqa
from .common import * # noqa
from .core import * # noqa
from .disk import * # noqa
from .failover_reboot import * # noqa
from .group import * # noqa
from .iscsi_auth import * # noqa
from .keychain import * # noqa
from .privilege import * # noqa
from .rdma import * # noqa
from .smartctl import * # noqa
from .static_route import * # noqa
from .system_lifecycle import * # noqa
from .system_reboot import * # noqa
from .user import * # noqa
from .vendor import * # noqa
from .virt import * # noqa
| 707 | Python | .py | 22 | 31.136364 | 39 | 0.69927 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,307 | api_key.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/api_key.py | from datetime import datetime
from typing import Literal, TypeAlias
from typing_extensions import Annotated
from pydantic import Secret, StringConstraints
from middlewared.api.base import (
BaseModel, Excluded, excluded_field, ForUpdateMetaclass, NonEmptyString,
LocalUsername, RemoteUsername
)
HttpVerb: TypeAlias = Literal["GET", "POST", "PUT", "DELETE", "CALL", "SUBSCRIBE", "*"]
class AllowListItem(BaseModel):
method: HttpVerb
resource: NonEmptyString
class ApiKeyEntry(BaseModel):
id: int
name: Annotated[NonEmptyString, StringConstraints(max_length=200)]
username: LocalUsername | RemoteUsername
user_identifier: int | str
keyhash: Secret[str]
created_at: datetime
expires_at: datetime | None = None
local: bool
revoked: bool
class ApiKeyEntryWithKey(ApiKeyEntry):
key: Secret[str]
class ApiKeyCreate(ApiKeyEntry):
id: Excluded = excluded_field()
user_identifier: Excluded = excluded_field()
keyhash: Excluded = excluded_field()
created_at: Excluded = excluded_field()
local: Excluded = excluded_field()
revoked: Excluded = excluded_field()
class ApiKeyCreateArgs(BaseModel):
api_key_create: ApiKeyCreate
class ApiKeyCreateResult(BaseModel):
result: ApiKeyEntryWithKey
class ApiKeyUpdate(ApiKeyCreate, metaclass=ForUpdateMetaclass):
username: Excluded = excluded_field()
reset: bool
class ApiKeyUpdateArgs(BaseModel):
id: int
api_key_update: ApiKeyUpdate
class ApiKeyUpdateResult(BaseModel):
result: ApiKeyEntryWithKey
class ApiKeyDeleteArgs(BaseModel):
id: int
class ApiKeyDeleteResult(BaseModel):
result: Literal[True]
class ApiKeyMyKeysArgs(BaseModel):
pass
class ApiKeyMyKeysResult(BaseModel):
result: list[ApiKeyEntry]
| 1,785 | Python | .py | 51 | 30.980392 | 87 | 0.774648 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,308 | virt.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/virt.py | from typing import Literal, List, Union, Optional, TypeAlias
from typing_extensions import Annotated
from pydantic import Field, StringConstraints
from middlewared.api.base import (
BaseModel, ForUpdateMetaclass, NonEmptyString,
LocalGID, LocalUID,
single_argument_args, single_argument_result,
)
class VirtGlobalEntry(BaseModel):
id: int
pool: str | None = None
dataset: str | None = None
bridge: str | None = None
v4_network: str | None = None
v6_network: str | None = None
state: Literal['INITIALIZING', 'INITIALIZED', 'NO_POOL', 'ERROR', 'LOCKED'] | None = None
@single_argument_args('virt_global_update')
class VirtGlobalUpdateArgs(BaseModel, metaclass=ForUpdateMetaclass):
pool: NonEmptyString | None = None
bridge: NonEmptyString | None = None
v4_network: str | None = None
v6_network: str | None = None
class VirtGlobalUpdateResult(BaseModel):
result: VirtGlobalEntry
class VirtGlobalBridgeChoicesArgs(BaseModel):
pass
class VirtGlobalBridgeChoicesResult(BaseModel):
result: dict
class VirtGlobalPoolChoicesArgs(BaseModel):
pass
class VirtGlobalPoolChoicesResult(BaseModel):
result: dict
class VirtGlobalGetNetworkArgs(BaseModel):
name: NonEmptyString
@single_argument_result
class VirtGlobalGetNetworkResult(BaseModel):
type: Literal['BRIDGE']
managed: bool
ipv4_address: NonEmptyString
ipv4_nat: bool
ipv6_address: NonEmptyString
ipv6_nat: bool
REMOTE_CHOICES: TypeAlias = Literal['LINUX_CONTAINERS']
@single_argument_args('virt_instances_image_choices')
class VirtInstanceImageChoicesArgs(BaseModel):
remote: REMOTE_CHOICES = 'LINUX_CONTAINERS'
class ImageChoiceItem(BaseModel):
label: str
os: str
release: str
arch: str
variant: str
class VirtInstanceImageChoicesResult(BaseModel):
result: dict[str, ImageChoiceItem]
class Device(BaseModel):
name: Optional[NonEmptyString] = None
readonly: bool = False
class Disk(Device):
dev_type: Literal['DISK']
source: Optional[str] = None
destination: Optional[str] = None
class NIC(Device):
dev_type: Literal['NIC']
network: NonEmptyString
class USB(Device):
dev_type: Literal['USB']
bus: Optional[int] = None
dev: Optional[int] = None
product_id: Optional[str] = None
vendor_id: Optional[str] = None
Proto: TypeAlias = Literal['UDP', 'TCP']
class Proxy(Device):
dev_type: Literal['PROXY']
source_proto: Proto
source_port: int
dest_proto: Proto
dest_port: int
class TPM(Device):
dev_type: Literal['TPM']
path: Optional[str] = None
pathrm: Optional[str] = None
GPUType: TypeAlias = Literal['PHYSICAL', 'MDEV', 'MIG', 'SRIOV']
class GPU(Device):
dev_type: Literal['GPU']
gpu_type: GPUType
id: str | None = None
gid: LocalGID | None = None
uid: LocalUID | None = None
mode: Optional[str] = None
mdev: Optional[NonEmptyString] = None
mig_uuid: Optional[NonEmptyString] = None
pci: Optional[NonEmptyString] = None
productid: Optional[NonEmptyString] = None
vendorid: Optional[NonEmptyString] = None
DeviceType: TypeAlias = Annotated[
Union[Disk, GPU, Proxy, TPM, USB, NIC],
Field(discriminator='dev_type')
]
class VirtInstanceAlias(BaseModel):
type: Literal['INET', 'INET6']
address: NonEmptyString
netmask: int
InstanceType: TypeAlias = Literal['CONTAINER', 'VM']
class VirtInstanceEntry(BaseModel):
id: str
name: Annotated[NonEmptyString, StringConstraints(max_length=200)]
type: InstanceType = 'CONTAINER'
status: Literal['RUNNING', 'STOPPED']
cpu: str | None
memory: int
autostart: bool
environment: dict[str, str]
aliases: List[VirtInstanceAlias]
raw: dict
@single_argument_args('virt_instance_create')
class VirtInstanceCreateArgs(BaseModel):
name: Annotated[NonEmptyString, StringConstraints(max_length=200)]
image: Annotated[NonEmptyString, StringConstraints(max_length=200)]
remote: REMOTE_CHOICES = 'LINUX_CONTAINERS'
instance_type: InstanceType = 'CONTAINER'
environment: dict | None = None
autostart: bool | None = None
cpu: str | None = None
memory: int | None = None
devices: List[DeviceType] = None
class VirtInstanceCreateResult(BaseModel):
result: dict
class VirtInstanceUpdate(BaseModel, metaclass=ForUpdateMetaclass):
environment: dict | None = None
autostart: bool | None = None
cpu: str | None = None
memory: int | None = None
class VirtInstanceUpdateArgs(BaseModel):
id: str
virt_instance_update: VirtInstanceUpdate
class VirtInstanceUpdateResult(BaseModel):
result: VirtInstanceEntry
class VirtInstanceDeleteArgs(BaseModel):
id: str
class VirtInstanceDeleteResult(BaseModel):
result: Literal[True]
class VirtInstanceDeviceListArgs(BaseModel):
id: str
class VirtInstanceDeviceListResult(BaseModel):
result: List[DeviceType]
class VirtInstanceDeviceAddArgs(BaseModel):
id: str
device: DeviceType
class VirtInstanceDeviceAddResult(BaseModel):
result: dict
class VirtInstanceDeviceDeleteArgs(BaseModel):
id: str
name: str
class VirtInstanceDeviceDeleteResult(BaseModel):
result: dict
class VirtInstanceStartArgs(BaseModel):
id: str
class VirtInstanceStartResult(BaseModel):
result: bool
class StopArgs(BaseModel):
timeout: int = -1
force: bool = False
class VirtInstanceStopArgs(BaseModel):
id: str
stop_args: StopArgs
class VirtInstanceStopResult(BaseModel):
result: bool
class VirtInstanceRestartArgs(BaseModel):
id: str
stop_args: StopArgs
class VirtInstanceRestartResult(BaseModel):
result: bool
class VirtDeviceUSBChoicesArgs(BaseModel):
pass
class USBChoice(BaseModel):
vendor_id: str
product_id: str
bus: int
dev: int
product: str
manufacturer: str
class VirtDeviceUSBChoicesResult(BaseModel):
result: dict[str, USBChoice]
class VirtDeviceGPUChoicesArgs(BaseModel):
instance_type: InstanceType
gpu_type: GPUType
class GPUChoice(BaseModel):
bus: int
slot: int
description: str
vendor: Optional[str] = None
class VirtDeviceGPUChoicesResult(BaseModel):
result: dict[str, GPUChoice]
class VirtDeviceDiskChoicesArgs(BaseModel):
pass
class VirtDeviceDiskChoicesResult(BaseModel):
result: dict[str, str]
class VirtImageUploadArgs(BaseModel):
pass
@single_argument_result
class VirtImageUploadResult(BaseModel):
fingerprint: NonEmptyString
size: int
| 6,573 | Python | .py | 203 | 28.137931 | 93 | 0.750639 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,309 | core.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/core.py | from middlewared.api.base import BaseModel, ForUpdateMetaclass, single_argument_result
__all__ = ["CoreSetOptionsArgs", "CoreSetOptionsResult", "CoreSubscribeArgs", "CoreSubscribeResult",
"CoreUnsubscribeArgs", "CoreUnsubscribeResult"]
class CoreSetOptionsOptions(BaseModel, metaclass=ForUpdateMetaclass):
py_exceptions: bool
class CoreSetOptionsArgs(BaseModel):
options: CoreSetOptionsOptions
CoreSetOptionsResult = single_argument_result(None, "CoreSetOptionsResult")
class CoreSubscribeArgs(BaseModel):
event: str
CoreSubscribeResult = single_argument_result(str, "CoreSubscribeResult")
class CoreUnsubscribeArgs(BaseModel):
id_: str
CoreUnsubscribeResult = single_argument_result(None, "CoreUnsubscribeResult")
| 757 | Python | .py | 14 | 50.071429 | 100 | 0.821429 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,310 | system_lifecycle.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/system_lifecycle.py | from pydantic import Field
from middlewared.api.base import BaseModel, NonEmptyString
__all__ = ["SystemRebootArgs", "SystemRebootResult",
"SystemShutdownArgs", "SystemShutdownResult"]
class SystemRebootOptions(BaseModel):
delay: int | None = None
class SystemRebootArgs(BaseModel):
reason: NonEmptyString
options: SystemRebootOptions = Field(default=SystemRebootOptions())
class SystemRebootResult(BaseModel):
result: None
class SystemShutdownOptions(BaseModel):
delay: int | None = None
class SystemShutdownArgs(BaseModel):
reason: NonEmptyString
options: SystemShutdownOptions = Field(default=SystemShutdownOptions())
class SystemShutdownResult(BaseModel):
result: None
| 730 | Python | .py | 18 | 36.388889 | 75 | 0.795129 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,311 | privilege.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/privilege.py | from middlewared.api.base import BaseModel, Excluded, excluded_field, ForUpdateMetaclass, NonEmptyString, SID
from .api_key import AllowListItem
from .group import GroupEntry
__all__ = ["PrivilegeEntry",
"PrivilegeCreateArgs", "PrivilegeCreateResult",
"PrivilegeUpdateArgs", "PrivilegeUpdateResult",
"PrivilegeDeleteArgs", "PrivilegeDeleteResult"]
class PrivilegeEntry(BaseModel):
id: int
builtin_name: str | None
name: NonEmptyString
local_groups: list[GroupEntry]
ds_groups: list[GroupEntry]
allowlist: list[AllowListItem] = []
roles: list[str] = []
web_shell: bool
class PrivilegeCreate(PrivilegeEntry):
id: Excluded = excluded_field()
builtin_name: Excluded = excluded_field()
local_groups: list[int] = []
ds_groups: list[int | SID] = []
class PrivilegeCreateArgs(BaseModel):
privilege_create: PrivilegeCreate
class PrivilegeCreateResult(BaseModel):
result: PrivilegeEntry
class PrivilegeUpdate(PrivilegeCreate, metaclass=ForUpdateMetaclass):
pass
class PrivilegeUpdateArgs(BaseModel):
id: int
privilege_update: PrivilegeUpdate
class PrivilegeUpdateResult(BaseModel):
result: PrivilegeEntry
class PrivilegeDeleteArgs(BaseModel):
id: int
class PrivilegeDeleteResult(BaseModel):
result: bool
| 1,326 | Python | .py | 36 | 32.166667 | 109 | 0.759245 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,312 | disk.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/disk.py | from middlewared.api.base import BaseModel
from .alert import Alert
class DiskTemperatureAlertsArgs(BaseModel):
names: list[str]
class DiskTemperatureAlertsResult(BaseModel):
result: list[Alert]
| 207 | Python | .py | 6 | 31.5 | 45 | 0.832487 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,313 | static_route.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/static_route.py | from middlewared.api.base import BaseModel, Excluded, excluded_field, ForUpdateMetaclass
from pydantic import IPvAnyAddress, IPvAnyNetwork
__all__ = [
"StaticRouteEntry",
"StaticRouteUpdateArgs",
"StaticRouteUpdateResult",
"StaticRouteCreateArgs",
"StaticRouteCreateResult",
"StaticRouteDeleteArgs",
"StaticRouteDeleteResult",
]
class StaticRouteEntry(BaseModel):
destination: IPvAnyNetwork
gateway: IPvAnyAddress
description: str = ""
id: int
class StaticRouteCreate(StaticRouteEntry):
id: Excluded = excluded_field()
class StaticRouteCreateArgs(BaseModel):
data: StaticRouteCreate
class StaticRouteCreateResult(BaseModel):
result: StaticRouteEntry
class StaticRouteUpdate(StaticRouteCreate, metaclass=ForUpdateMetaclass):
pass
class StaticRouteUpdateArgs(BaseModel):
id: int
data: StaticRouteUpdate
class StaticRouteUpdateResult(BaseModel):
result: StaticRouteEntry
class StaticRouteDeleteArgs(BaseModel):
id: int
class StaticRouteDeleteResult(BaseModel):
result: bool
| 1,072 | Python | .py | 33 | 28.424242 | 88 | 0.802554 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,314 | common.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/common.py | from typing_extensions import Annotated, Self
from middlewared.api.base import BaseModel
from middlewared.utils import filters
from pydantic import AfterValidator, model_validator
__all__ = ["QueryFilters", "QueryOptions", "QueryArgs"]
filter_obj = filters()
def validate_query_filters(qf: list) -> list:
filter_obj.validate_filters(qf)
return qf
QueryFilters = Annotated[list, AfterValidator(validate_query_filters)]
class QueryOptions(BaseModel):
relationships: bool = True
extend: str | None = None
extend_context: str | None = None
prefix: str | None = None
extra: dict = {}
order_by: list[str] = []
select: list[str | list] = []
count: bool = False
get: bool = False
offset: int = 0
limit: int = 0
force_sql_filters: bool = False
@model_validator(mode='after')
def validate_query_options(self) -> Self:
filter_obj.validate_options(self.dict())
return self
class QueryArgs(BaseModel):
filters: QueryFilters = []
options: QueryOptions = QueryOptions()
| 1,056 | Python | .py | 30 | 30.833333 | 70 | 0.706811 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,315 | alert.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/alert.py | from datetime import datetime
from typing import Any
from pydantic import Field
from middlewared.api.base import BaseModel, LongString
__all__ = [
'AlertDismissArgs', 'AlertListArgs', 'AlertDismissResult', 'AlertListResult', 'AlertListCategoriesArgs',
'AlertListCategoriesResult', 'AlertListPoliciesArgs', 'AlertListPoliciesResult', 'AlertRestoreArgs',
'AlertRestoreResult', 'AlertOneshotCreateArgs', 'AlertOneshotCreateResult', 'AlertOneshotDeleteArgs',
'AlertOneshotDeleteResult', 'AlertClassesEntry', 'AlertClassesUpdateArgs', 'AlertClassesUpdateResult', 'Alert',
]
class Alert(BaseModel):
uuid: str
source: str
klass: str
args: Any
node: str
key: LongString
datetime_: datetime = Field(..., alias='datetime')
last_occurrence: datetime
dismissed: bool
mail: Any
text: LongString
id: str
level: str
formatted: LongString | None
one_shot: bool
class AlertCategoryClass(BaseModel):
id: str
title: str
level: str
proactive_support: bool
class AlertCategory(BaseModel):
id: str
title: str
classes: list[AlertCategoryClass]
class AlertClassesUpdate(BaseModel):
classes: dict = {}
class AlertClassesEntry(AlertClassesUpdate):
id: int
class AlertDismissArgs(BaseModel):
uuid: str
class AlertDismissResult(BaseModel):
result: None
class AlertListArgs(BaseModel):
pass
class AlertListResult(BaseModel):
result: list[Alert]
class AlertListCategoriesArgs(BaseModel):
pass
class AlertListCategoriesResult(BaseModel):
result: list[AlertCategory]
class AlertListPoliciesArgs(BaseModel):
pass
class AlertListPoliciesResult(BaseModel):
result: list[str]
class AlertOneshotCreateArgs(BaseModel):
klass: str
args: Any
class AlertOneshotCreateResult(BaseModel):
result: None
class AlertOneshotDeleteArgs(BaseModel):
klass: str | list[str]
query: Any = None
class AlertOneshotDeleteResult(BaseModel):
result: None
class AlertRestoreArgs(BaseModel):
uuid: str
class AlertRestoreResult(BaseModel):
result: None
class AlertClassesUpdateArgs(BaseModel):
alertclasses_update: AlertClassesUpdate = Field(default=AlertClassesUpdate())
class AlertClassesUpdateResult(BaseModel):
result: AlertClassesEntry
| 2,310 | Python | .py | 73 | 27.479452 | 115 | 0.776712 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,316 | group.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/group.py | from typing import Literal
from pydantic import Field
from middlewared.api.base import (BaseModel, Excluded, excluded_field, ForUpdateMetaclass, LocalUID, NonEmptyString,
single_argument_args, single_argument_result)
__all__ = ["GroupEntry",
"GroupCreateArgs", "GroupCreateResult",
"GroupUpdateArgs", "GroupUpdateResult",
"GroupDeleteArgs", "GroupDeleteResult",
"GroupGetNextGidArgs", "GroupGetNextGidResult",
"GroupGetGroupObjArgs", "GroupGetGroupObjResult",
"GroupHasPasswordEnabledUserArgs", "GroupHasPasswordEnabledUserResult"]
class GroupEntry(BaseModel):
id: int
gid: int
name: NonEmptyString
builtin: bool
sudo_commands: list[NonEmptyString] = []
sudo_commands_nopasswd: list[NonEmptyString] = []
smb: bool = True
"Specifies whether the group should be mapped into an NT group."
group: NonEmptyString
id_type_both: bool
local: bool
sid: str | None
roles: list[str]
users: list[int] = []
"A list of user ids (`id` attribute from `user.query`)."
class GroupCreate(GroupEntry):
id: Excluded = excluded_field()
builtin: Excluded = excluded_field()
group: Excluded = excluded_field()
id_type_both: Excluded = excluded_field()
local: Excluded = excluded_field()
sid: Excluded = excluded_field()
roles: Excluded = excluded_field()
gid: LocalUID | None = None
"If `null`, it is automatically filled with the next one available."
class GroupCreateArgs(BaseModel):
group_create: GroupCreate
class GroupCreateResult(BaseModel):
result: int
class GroupUpdate(GroupCreate, metaclass=ForUpdateMetaclass):
gid: Excluded = excluded_field()
class GroupUpdateArgs(BaseModel):
id: int
group_update: GroupUpdate
class GroupUpdateResult(BaseModel):
result: int
class GroupDeleteOptions(BaseModel):
delete_users: bool = False
"Deletes all users that have this group as their primary group."
class GroupDeleteArgs(BaseModel):
id: int
options: GroupDeleteOptions = Field(default=GroupDeleteOptions())
class GroupDeleteResult(BaseModel):
result: int
class GroupGetNextGidArgs(BaseModel):
pass
class GroupGetNextGidResult(BaseModel):
result: int
@single_argument_args("get_group_obj")
class GroupGetGroupObjArgs(BaseModel):
groupname: str | None = None
gid: int | None = None
sid_info: bool = False
@single_argument_result
class GroupGetGroupObjResult(BaseModel):
gr_name: str
"name of the group"
gr_gid: int
"group id of the group"
gr_mem: list[int]
"list of gids that are members of the group"
sid: str | None = None
"optional SID value for the account that is present if `sid_info` is specified in payload."
source: Literal['LOCAL', 'ACTIVEDIRECTORY', 'LDAP']
"""
the name server switch module that provided the user. Options are:
FILES - local user in passwd file of server,
WINBIND - user provided by winbindd,
SSS - user provided by SSSD.
"""
local: bool
"boolean indicating whether this group is local to the NAS or provided by a directory service."
class GroupHasPasswordEnabledUserArgs(BaseModel):
gids: list[int]
exclude_user_ids: list[int] = []
class GroupHasPasswordEnabledUserResult(BaseModel):
result: bool
| 3,390 | Python | .py | 89 | 32.730337 | 116 | 0.718836 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,317 | system_reboot.py | truenas_middleware/src/middlewared/middlewared/api/v25_04_0/system_reboot.py | from middlewared.api.base import BaseModel, single_argument_result
__all__ = ["SystemRebootInfoArgs", "SystemRebootInfoResult"]
class SystemRebootInfoArgs(BaseModel):
pass
class RebootRequiredReason(BaseModel):
code: str
reason: str
@single_argument_result
class SystemRebootInfoResult(BaseModel):
boot_id: str
reboot_required_reasons: list[RebootRequiredReason]
| 390 | Python | .py | 11 | 32 | 66 | 0.806452 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,318 | decorator.py | truenas_middleware/src/middlewared/middlewared/api/base/decorator.py | import asyncio
import functools
from typing import Callable
from .handler.accept import accept_params
from ..base.model import BaseModel
from middlewared.schema.processor import calculate_args_index
__all__ = ["api_method"]
def api_method(
accepts: type[BaseModel],
returns: type[BaseModel],
*,
audit: str | None = None,
audit_callback: bool = False,
audit_extended: Callable[..., str] | None = None,
rate_limit=True,
roles: list[str] | None = None,
private: bool = False,
):
"""
Mark a `Service` class method as an API method.
`accepts` and `returns` are classes derived from `BaseModel` that correspond to the method's call arguments and
return value.
`audit` is the message that will be logged to the audit log when the decorated function is called.
If `audit_callback` is `True` then an additional `audit_callback` argument will be prepended to the function
arguments list. This callback must be called with a single string argument that will be appended to the audit
message to be logged.
`audit_extended` is the function that takes the same arguments as the decorated function and returns the string
that will be appended to the audit message to be logged.
`rate_limit` specifies whether the method calls should be rate limited when calling without authentication.
`roles` is a list of user roles that will gain access to this method.
`private` is `True` when the method should not be exposed in the public API. By default, the method is public.
"""
if list(returns.model_fields.keys()) != ["result"]:
raise TypeError("`returns` model must only have one field called `result`")
def wrapper(func):
args_index = calculate_args_index(func, audit_callback)
if asyncio.iscoroutinefunction(func):
@functools.wraps(func)
async def wrapped(*args):
args = list(args[:args_index]) + accept_params(accepts, args[args_index:])
result = await func(*args)
return result
else:
@functools.wraps(func)
def wrapped(*args):
args = list(args[:args_index]) + accept_params(accepts, args[args_index:])
result = func(*args)
return result
wrapped.audit = audit
wrapped.audit_callback = audit_callback
wrapped.audit_extended = audit_extended
wrapped.rate_limit = rate_limit
wrapped.roles = roles or []
wrapped._private = private
# FIXME: This is only here for backwards compatibility and should be removed eventually
wrapped.accepts = []
wrapped.returns = []
wrapped.new_style_accepts = accepts
wrapped.new_style_returns = returns
return wrapped
return wrapper
| 2,839 | Python | .py | 61 | 39.032787 | 115 | 0.679362 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,319 | jsonschema.py | truenas_middleware/src/middlewared/middlewared/api/base/jsonschema.py | __all__ = ["get_json_schema"]
def get_json_schema(model):
schema = model.model_json_schema()
schema = replace_refs(schema, schema.get("$defs", {}))
schema = add_attrs(schema)
return [schema["properties"][field] for field in model.model_fields]
def replace_refs(data, defs=None):
if isinstance(data, dict):
if set(data.keys()) == {"$ref"}:
data = defs[data["$ref"].removeprefix("#/$defs/")]
return {k: replace_refs(v, defs) for k, v in data.items()}
elif isinstance(data, list):
return [replace_refs(v, defs) for v in data]
else:
return data
def add_attrs(schema):
# FIXME: This is only here for backwards compatibility and should be removed eventually
if isinstance(schema, dict):
if isinstance(schema.get("properties"), dict):
schema = {
**schema,
"_attrs_order_": list(schema["properties"].keys()),
"properties": {
k: {
**v,
"title": k,
"_name_": k,
"_required_": k in schema.get("required", [])
}
for k, v in schema["properties"].items()},
}
if schema.get("type") == "array" and "items" in schema and not isinstance(schema["items"], list):
schema["items"] = [schema["items"]] # FIXME: Non-standard compliant
return {k: add_attrs(v) for k, v in schema.items()}
elif isinstance(schema, list):
return [add_attrs(s) for s in schema]
else:
return schema
| 1,631 | Python | .py | 38 | 31.973684 | 105 | 0.542009 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,320 | __init__.py | truenas_middleware/src/middlewared/middlewared/api/base/__init__.py | from .excluded import * # noqa
from .model import * # noqa
from .types import * # noqa
| 90 | Python | .py | 3 | 29 | 31 | 0.689655 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,321 | model.py | truenas_middleware/src/middlewared/middlewared/api/base/model.py | import copy
import functools
import inspect
from types import NoneType
import typing
from pydantic import BaseModel as PydanticBaseModel, ConfigDict, create_model, Field, model_serializer, Secret
from pydantic._internal._model_construction import ModelMetaclass
from pydantic.main import IncEx
from typing_extensions import Annotated
from middlewared.api.base.types.base import SECRET_VALUE
from middlewared.utils.lang import undefined
__all__ = ["BaseModel", "ForUpdateMetaclass", "single_argument_args", "single_argument_result"]
class BaseModel(PydanticBaseModel):
model_config = ConfigDict(
extra="forbid",
strict=True,
str_max_length=1024,
)
@classmethod
def __pydantic_init_subclass__(cls, **kwargs: typing.Any) -> None:
for k, v in cls.model_fields.items():
if typing.get_origin(v.annotation) is typing.Union:
for option in typing.get_args(v.annotation):
if typing.get_origin(option) is Secret:
def dump(t):
return str(t).replace("typing.", "").replace("middlewared.api.base.types.base.", "")
raise TypeError(
f"Model {cls.__name__} has field {k} defined as {dump(v.annotation)}. {dump(option)} "
"cannot be a member of an Optional or a Union, please make the whole field Private."
)
def model_dump(
self,
*,
mode: typing.Literal['json', 'python'] | str = 'python',
include: IncEx = None,
exclude: IncEx = None,
context: dict[str, typing.Any] | None = None,
by_alias: bool = False,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
round_trip: bool = False,
warnings: bool | typing.Literal['none', 'warn', 'error'] = True,
serialize_as_any: bool = False,
) -> dict[str, typing.Any]:
return self.__pydantic_serializer__.to_python(
self,
mode=mode,
by_alias=by_alias,
include=include,
exclude=exclude,
context=context,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
round_trip=round_trip,
warnings=warnings,
serialize_as_any=serialize_as_any,
fallback=functools.partial(self._model_dump_fallback, context),
)
def _model_dump_fallback(self, context, value):
if isinstance(value, Secret):
if context["expose_secrets"]:
return value.get_secret_value()
else:
return SECRET_VALUE
return value
@classmethod
def from_previous(cls, value):
"""
Converts model value from a preceding API version to this API version. `value` can be modified in-place.
:param value: value of the same model in the preceding API version.
:return: value in this API version.
"""
return value
@classmethod
def to_previous(cls, value):
"""
Converts model value from this API version to a preceding API version. `value` can be modified in-place.
:param value: value in this API version.
:return: value of the same model in the preceding API version.
"""
return value
class ForUpdateMetaclass(ModelMetaclass):
"""
Using this metaclass on a model will change all of its fields default values to `undefined`.
Such a model might be instantiated with any subset of its fields, which can be useful to validate request bodies
for requests with PATCH semantics.
"""
def __new__(mcls, name, bases, namespaces, **kwargs):
skip_patching = kwargs.pop("__ForUpdateMetaclass_skip_patching", False)
cls = super().__new__(mcls, name, bases, namespaces, **kwargs)
if skip_patching:
return cls
return create_model(
cls.__name__,
__base__=(cls, _ForUpdateSerializerMixin),
__module__=cls.__module__,
__cls_kwargs__={"__ForUpdateMetaclass_skip_patching": True},
**{
k: _field_for_update(v)
for k, v in cls.model_fields.items()
},
)
class _ForUpdateSerializerMixin(PydanticBaseModel):
@model_serializer(mode="wrap")
def serialize_model(self, serializer):
return {k: v for k, v in serializer(self).items() if v != undefined}
def _field_for_update(field):
new = copy.deepcopy(field)
new.default = undefined
new.default_factory = None
return new.annotation, new
def single_argument_args(name: str):
"""
Model class decorator used to define an arguments model for a method that accepts a single dictionary argument.
:param name: name for that single argument.
:return: a model class that consists of unique `name` field that is represented by a class being decorated.
Class name will be preserved.
"""
def wrapper(klass):
model = create_model(
klass.__name__,
__base__=(BaseModel,),
__module__=klass.__module__,
**{name: Annotated[klass, Field()]},
)
model.from_previous = classmethod(klass.from_previous)
model.to_previous = classmethod(klass.to_previous)
return model
return wrapper
def single_argument_result(klass, klass_name=None):
"""
Can be used as:
* Decorator for a class. In that case, it will create a class that represents a return value for a function that
returns a single dictionary, represented by the decorated class.
* Standalone model generator. Will return a model class named `klass_name` that consists of a single field
represented by `klass` (in that case, `klass` can be a primitive type).
:param klass: class or a primitive type to create model from.
:param klass_name: required, when being called as a standalone model generator. Returned class will have that name.
(otherwise, the decorated class name will be preserved).
:return: a model class that consists of unique `result` field that corresponds to `klass`.
"""
if klass is None:
klass = NoneType
if klass.__module__ == "builtins":
if klass_name is None:
raise TypeError("You must specify class name when using `single_argument_result` for built-in types")
else:
klass_name = klass_name or klass.__name__
model = create_model(
klass_name,
__base__=(BaseModel,),
__module__=inspect.getmodule(inspect.stack()[1][0]),
**{"result": Annotated[klass, Field()]},
)
if issubclass(klass, BaseModel):
model.from_previous = classmethod(klass.from_previous)
model.to_previous = classmethod(klass.to_previous)
return model
| 6,977 | Python | .py | 160 | 34.7625 | 119 | 0.634247 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,322 | excluded.py | truenas_middleware/src/middlewared/middlewared/api/base/excluded.py | from typing import Any
from pydantic_core import CoreSchema, core_schema, PydanticCustomError
from pydantic import Field, GetCoreSchemaHandler
from pydantic.json_schema import SkipJsonSchema
from middlewared.utils.lang import undefined
__all__ = ["Excluded", "excluded_field"]
class ExcludedField(Any):
@classmethod
def __get_pydantic_core_schema__(
cls, source_type: Any, handler: GetCoreSchemaHandler
) -> CoreSchema:
def validate(value, info):
raise PydanticCustomError("", "Extra inputs are not permitted")
return core_schema.with_info_after_validator_function(validate, handler(Any))
Excluded = SkipJsonSchema[ExcludedField]
def excluded_field():
return Field(default=undefined, exclude=True)
| 760 | Python | .py | 17 | 40.058824 | 85 | 0.763984 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,323 | result.py | truenas_middleware/src/middlewared/middlewared/api/base/handler/result.py | __all__ = ["serialize_result"]
def serialize_result(model, result, expose_secrets):
if expose_secrets:
return result
return model(result=result).model_dump(
context={"expose_secrets": expose_secrets},
warnings=False,
by_alias=True,
)["result"]
| 291 | Python | .py | 9 | 26.111111 | 52 | 0.65233 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,324 | accept.py | truenas_middleware/src/middlewared/middlewared/api/base/handler/accept.py | from pydantic_core import ValidationError
from middlewared.api.base.model import BaseModel
from middlewared.service_exception import CallError, ValidationErrors
def accept_params(model: type[BaseModel], args: list, *, exclude_unset=False, expose_secrets=True) -> list:
"""
Accepts a list of `args` for a method call and validates it using `model`.
Parameters are accepted in the order they are defined in the `model`.
Returns the list of valid parameters (or raises `ValidationErrors`).
:param model: `BaseModel` that defines method args.
:param args: a list of method args.
:param exclude_unset: if true, will not append default parameters to the list.
:param expose_secrets: if false, will replace `Private` parameters with a placeholder.
:return: a validated list of method args.
"""
args_as_dict = model_dict_from_list(model, args)
dump = validate_model(model, args_as_dict, exclude_unset=exclude_unset, expose_secrets=expose_secrets)
fields = list(model.model_fields)
if exclude_unset:
fields = fields[:len(args)]
return [dump[field] for field in fields]
def model_dict_from_list(model: type[BaseModel], args: list) -> dict:
"""
Converts a list of `args` for a method call to a dictionary using `model`.
Parameters are accepted in the order they are defined in the `model`.
For example, given the model that has fields `b` and `a`, and `args` equal to `[1, 2]`, it will return
`{"b": 1, "a": 2"}`.
:param model: `BaseModel` that defines method args.
:param args: a list of method args.
:return: a dictionary of method args.
"""
if len(args) > len(model.model_fields):
raise CallError(f"Too many arguments (expected {len(model.model_fields)}, found {len(args)})")
return {
field: value
for field, value in zip(model.model_fields.keys(), args)
}
def validate_model(model: type[BaseModel], data: dict, *, exclude_unset=False, expose_secrets=True) -> dict:
"""
Validates `data` against the `model`, sanitizes values, sets defaults.
Raises `ValidationErrors` if any validation errors occur.
:param model: `BaseModel` subclass.
:param data: provided data.
:param exclude_unset: if true, will not add default values.
:param expose_secrets: if false, will replace `Private` fields with a placeholder.
:return: validated data.
"""
try:
instance = model(**data)
except ValidationError as e:
verrors = ValidationErrors()
for error in e.errors():
verrors.add(".".join(map(str, error["loc"])), error["msg"])
raise verrors from None
return instance.model_dump(
context={"expose_secrets": expose_secrets},
exclude_unset=exclude_unset,
warnings=False,
by_alias=True
)
| 2,846 | Python | .py | 59 | 42.338983 | 108 | 0.691612 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,325 | dump_params.py | truenas_middleware/src/middlewared/middlewared/api/base/handler/dump_params.py | import itertools
import typing
from pydantic import Secret
from middlewared.api.base import BaseModel, SECRET_VALUE
from middlewared.service_exception import ValidationErrors
from .accept import accept_params
from .inspect import model_field_is_model, model_field_is_list_of_models
__all__ = ["dump_params"]
def dump_params(model: type[BaseModel], args: list, expose_secrets: bool) -> list:
"""
Dumps a list of `args` for a method call that accepts `model` parameters.
:param model: `BaseModel` that defines method args.
:param args: a list of method args.
:param expose_secrets: if false, will replace `Private` parameters with a placeholder.
:return: A list of method call arguments ready to be printed.
"""
try:
return accept_params(model, args, exclude_unset=True, expose_secrets=expose_secrets)
except ValidationErrors:
# These are invalid params, so we fall back to redacting secrets this way
return [
remove_secrets(field.annotation, arg) if field is not None else arg
for field, arg in itertools.zip_longest(model.model_fields.values(), args, fillvalue=None)
]
def remove_secrets(model: type[BaseModel], value):
"""
Removes `Private` values from a model value.
:param model: `BaseModel` that corresponds to `value`.
:param value: value that potentially contains `Private` data.
:return: `value` with `Private` parameters replaced with a placeholder.
"""
if isinstance(value, dict) and (nested_model := model_field_is_model(model)):
return {
k: remove_secrets(v.annotation, value[k])
for k, v in nested_model.model_fields.items()
if k in value
}
elif isinstance(value, list) and (nested_model := model_field_is_list_of_models(model)):
return [remove_secrets(nested_model, v) for v in value]
elif typing.get_origin(model) is Secret:
return SECRET_VALUE
else:
return value
| 1,994 | Python | .py | 43 | 40.348837 | 102 | 0.701493 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,326 | inspect.py | truenas_middleware/src/middlewared/middlewared/api/base/handler/inspect.py | import typing
from middlewared.api.base import BaseModel
def model_field_is_model(model) -> type[BaseModel] | None:
"""
Return` model` if it is an API model. Otherwise, returns `None`.
:param model: potentially, API model.
:return: `model` or `None`
"""
if isinstance(model, type) and issubclass(model, BaseModel):
return model
def model_field_is_list_of_models(model) -> type[BaseModel] | None:
"""
If` model` represents a list of API models X, then it will return that model X. Otherwise, returns `None`.
:param model: potentially, a model that represents a list of API models.
:return: nested API model or `None`
"""
if typing.get_origin(model) is list and len(args := typing.get_args(model)) == 1:
return args[0]
| 787 | Python | .py | 18 | 38.888889 | 110 | 0.684555 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,327 | version.py | truenas_middleware/src/middlewared/middlewared/api/base/handler/version.py | import enum
from types import ModuleType
from middlewared.api.base import BaseModel, ForUpdateMetaclass
from .accept import validate_model
from .inspect import model_field_is_model, model_field_is_list_of_models
class Direction(enum.StrEnum):
DOWNGRADE = "DOWNGRADE"
UPGRADE = "UPGRADE"
class APIVersionDoesNotExistException(Exception):
def __init__(self, version: str):
self.version = version
super().__init__(f"API Version {self.version!r} does not exist")
class APIVersionDoesNotContainModelException(Exception):
def __init__(self, version: str, model_name: str):
self.version = version
self.model_name = model_name
super().__init__(f"API version {version!r} does not contain model {model_name!r}")
class APIVersion:
def __init__(self, version: str, models: dict[str, type[BaseModel]]):
"""
:param version: API version name
:param models: a dictionary which keys are model names and values are models used in the API version
"""
self.version: str = version
self.models: dict[str, type[BaseModel]] = models
@classmethod
def from_module(cls, version: str, module: ModuleType) -> "APIVersion":
"""
Create `APIVersion` from a module (e.g. `middlewared.api.v25_04_0`).
:param version: API version name
:param module: module object
:return: `APIVersion` instance
"""
return cls(
version,
{
model_name: model
for model_name, model in [
(model_name, getattr(module, model_name))
for model_name in dir(module)
]
if isinstance(model, type) and issubclass(model, BaseModel)
},
)
def __repr__(self):
return f"<APIVersion {self.version}>"
class APIVersionsAdapter:
"""
Converts method parameters and return results between different API versions.
"""
def __init__(self, versions: list[APIVersion]):
"""
:param versions: A chronologically sorted list of API versions.
"""
self.versions: dict[str, APIVersion] = {version.version: version for version in versions}
self.versions_history: list[str] = list(self.versions.keys())
self.current_version: str = self.versions_history[-1]
def adapt(self, value: dict, model_name: str, version1: str, version2: str) -> dict:
"""
Adapts `value` (that matches a model identified by `model_name`) from API `version1` to API `version2`).
:param value: a value to convert
:param model_name: a name of the model. Must exist in all API versions, including intermediate ones, or
`APIVersionDoesNotContainModelException` will be raised.
:param version1: original API version from which the `value` comes from
:param version2: target API version that needs `value`
:return: converted value
"""
try:
version1_index = self.versions_history.index(version1)
except ValueError:
raise APIVersionDoesNotExistException(version1) from None
try:
version2_index = self.versions_history.index(version2)
except ValueError:
raise APIVersionDoesNotExistException(version2) from None
current_version = self.versions[version1]
try:
current_version_model = current_version.models[model_name]
except KeyError:
raise APIVersionDoesNotContainModelException(current_version.version, model_name)
value = validate_model(current_version_model, value)
if version1_index < version2_index:
step = 1
direction = Direction.UPGRADE
else:
step = -1
direction = Direction.DOWNGRADE
for version_index in range(version1_index + step, version2_index + step, step):
new_version = self.versions[self.versions_history[version_index]]
value = self._adapt_model(value, model_name, current_version, new_version, direction)
current_version = new_version
return value
def _adapt_model(self, value: dict, model_name: str, current_version: APIVersion, new_version: APIVersion,
direction: Direction):
try:
current_model = current_version.models[model_name]
except KeyError:
raise APIVersionDoesNotContainModelException(current_version.version, model_name) from None
try:
new_model = new_version.models[model_name]
except KeyError:
raise APIVersionDoesNotContainModelException(new_version.version, model_name) from None
return self._adapt_value(value, current_model, new_model, direction)
def _adapt_value(self, value: dict, current_model: type[BaseModel], new_model: type[BaseModel],
direction: Direction):
for k in value:
if k in current_model.model_fields and k in new_model.model_fields:
current_model_field = current_model.model_fields[k].annotation
new_model_field = new_model.model_fields[k].annotation
if (
isinstance(value[k], dict) and
(current_nested_model := model_field_is_model(current_model_field)) and
(new_nested_model := model_field_is_model(new_model_field)) and
current_nested_model.__class__.__name__ == new_nested_model.__class__.__name__
):
value[k] = self._adapt_value(value[k], current_nested_model, new_nested_model, direction)
elif (
isinstance(value[k], list) and
(current_nested_model := model_field_is_list_of_models(current_model_field)) and
(current_nested_model := model_field_is_model(current_nested_model)) and
(new_nested_model := model_field_is_list_of_models(new_model_field)) and
(new_nested_model := model_field_is_model(new_nested_model)) and
current_nested_model.__class__.__name__ == new_nested_model.__class__.__name__
):
value[k] = [
self._adapt_value(v, current_nested_model, new_nested_model, direction)
for v in value[k]
]
if new_model.__class__ is not ForUpdateMetaclass:
for k, field in new_model.model_fields.items():
if k not in value and not field.is_required():
value[k] = field.get_default()
match direction:
case Direction.DOWNGRADE:
value = current_model.to_previous(value)
case Direction.UPGRADE:
value = new_model.from_previous(value)
for k in list(value):
if k in current_model.model_fields and k not in new_model.model_fields:
value.pop(k)
return value
| 7,068 | Python | .py | 141 | 38.595745 | 112 | 0.618184 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,328 | user.py | truenas_middleware/src/middlewared/middlewared/api/base/types/user.py | import string
from annotated_types import Ge, Le
from pydantic.functional_validators import AfterValidator
from typing_extensions import Annotated
from middlewared.utils.sid import sid_is_valid
__all__ = ["LocalUsername", "RemoteUsername", "LocalUID", "LocalGID", "SID"]
# TRUENAS_IDMAP_MAX + 1
INCUS_IDMAP_MIN = 2147000001
# Each unpriviliged container with isolated idmap will require at least 65536.
# Lets reserve enough so we can run at least 7 of these.
# Increasing this would go above signed 32 bits (>= 2147483648) which might
# cause problems for programs that do not expect it (e.g. filesystems like
# devpts and some syscalls like setfsuid())
INCUS_MAX_ISOLATED_CONTAINER = 7
INCUS_IDMAP_COUNT = 65536 * INCUS_MAX_ISOLATED_CONTAINER
INCUS_IDMAP_MAX = INCUS_IDMAP_MIN + INCUS_IDMAP_COUNT
TRUENAS_IDMAP_DEFAULT_LOW = 90000001
DEFAULT_VALID_CHARS = string.ascii_letters + string.digits + '_' + '-' + '$' + '.'
DEFAULT_VALID_START = string.ascii_letters + '_'
DEFAULT_MAX_LENGTH = 32
def validate_username(
val: str,
valid_chars: str = DEFAULT_VALID_CHARS,
valid_start_chars : str | None = DEFAULT_VALID_START,
max_length: int | None = DEFAULT_MAX_LENGTH
) -> str:
val_len = len(val)
assert val_len > 0, 'Username must be at least 1 character in length'
if max_length is not None:
assert val_len <= max_length, f'Username cannot exceed {max_length} charaters in length'
if valid_start_chars is not None:
assert val[0] in valid_start_chars, 'Username must start with a letter or an underscore'
assert '$' not in val or val[-1] == '$', 'Username must end with a dollar sign character'
assert all(char in valid_chars for char in val), f'Valid characters for a username are: {", ".join(valid_chars)!r}'
return val
def validate_local_username(val: str) -> str:
# see man 8 useradd, specifically the CAVEATS section
# NOTE: we are ignoring the man page's recommendation for insistence
# upon the starting character of a username be a lower-case letter.
# We aren't enforcing this for maximum backwards compatibility
return validate_username(val)
def validate_remote_username(val: str) -> str:
# Restrictions on names returned by nss_winbind are more lax than we place
# on our local usernames. \\ is used as a separator for domain and username
return validate_username(val, DEFAULT_VALID_CHARS + '\\', None, None)
def validate_sid(value: str) -> str:
value = value.strip()
value = value.upper()
assert sid_is_valid(value), ('SID is malformed. See MS-DTYP Section 2.4 for SID type specifications. Typically '
'SIDs refer to existing objects on the local or remote server and so an appropriate '
'value should be queried prior to submitting to API endpoints.')
return value
LocalUsername = Annotated[str, AfterValidator(validate_local_username)]
RemoteUsername = Annotated[str, AfterValidator(validate_remote_username)]
LocalUID = Annotated[int, Ge(0), Le(TRUENAS_IDMAP_DEFAULT_LOW - 1)]
LocalGID = Annotated[int, Ge(0), Le(TRUENAS_IDMAP_DEFAULT_LOW - 1)]
SID = Annotated[str, AfterValidator(validate_sid)]
| 3,202 | Python | .py | 57 | 51.754386 | 119 | 0.726633 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,329 | iscsi.py | truenas_middleware/src/middlewared/middlewared/api/base/types/iscsi.py | from typing import Literal, TypeAlias
__all__ = ["IscsiAuthType"]
IscsiAuthType: TypeAlias = Literal['NONE', 'CHAP', 'CHAP_MUTUAL']
| 134 | Python | .py | 3 | 43 | 65 | 0.736434 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,330 | __init__.py | truenas_middleware/src/middlewared/middlewared/api/base/types/__init__.py | from .base import * # noqa
from .iscsi import * # noqa
from .user import * # noqa
| 85 | Python | .py | 3 | 27.333333 | 28 | 0.670732 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,331 | base.py | truenas_middleware/src/middlewared/middlewared/api/base/types/base.py | from typing import Any
from pydantic import AfterValidator, BeforeValidator, Field, GetCoreSchemaHandler, HttpUrl as _HttpUrl, PlainSerializer
from pydantic_core import CoreSchema, core_schema, PydanticKnownError
from typing_extensions import Annotated
from middlewared.utils.lang import undefined
__all__ = ["HttpUrl", "LongString", "NonEmptyString", "SECRET_VALUE"]
HttpUrl = Annotated[_HttpUrl, AfterValidator(str)]
class LongStringWrapper:
"""
We have to box our long strings in this class to bypass the global limit for string length.
"""
max_length = 2 ** 31 - 1
def __init__(self, value):
if isinstance(value, LongStringWrapper):
value = value.value
if not isinstance(value, str):
raise PydanticKnownError("string_type")
if len(value) > self.max_length:
raise PydanticKnownError("string_too_long", {"max_length": self.max_length})
self.value = value
@classmethod
def __get_pydantic_core_schema__(
cls, source_type: Any, handler: GetCoreSchemaHandler
) -> CoreSchema:
return core_schema.json_or_python_schema(
json_schema=core_schema.str_schema(),
python_schema=core_schema.no_info_after_validator_function(
cls,
core_schema.is_instance_schema(LongStringWrapper),
),
)
# By default, our strings are no more than 1024 characters long. This string is 2**31-1 characters long (SQLite limit).
LongString = Annotated[
LongStringWrapper,
BeforeValidator(LongStringWrapper),
PlainSerializer(lambda x: undefined if x == undefined else x.value),
]
NonEmptyString = Annotated[str, Field(min_length=1)]
SECRET_VALUE = "********"
| 1,744 | Python | .py | 39 | 38.076923 | 119 | 0.69627 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,332 | legacy_api_method.py | truenas_middleware/src/middlewared/middlewared/api/base/server/legacy_api_method.py | from typing import TYPE_CHECKING
from middlewared.api.base.handler.accept import model_dict_from_list
from middlewared.api.base.handler.dump_params import dump_params
from middlewared.api.base.handler.version import APIVersionsAdapter
from middlewared.api.base.server.method import Method
from middlewared.utils.service.call import MethodNotFoundError
from middlewared.utils.service.crud import real_crud_method
if TYPE_CHECKING:
from middlewared.api.base.server.ws_handler.rpc import RpcWebSocketApp
from middlewared.main import Middleware
class LegacyAPIMethod(Method):
"""
Represents a middleware legacy API method used in JSON-RPC server. Converts method parameters and return value
between most recent API version (used in the code) and predetermined legacy API version.
"""
def __init__(self, middleware: "Middleware", name: str, api_version: str, adapter: APIVersionsAdapter):
"""
:param middleware: `Middleware` instance
:param name: method name
:param api_version: API version name used to convert parameters and return value
:param adapter: `APIVersionsAdapter` instance
"""
super().__init__(middleware, name)
self.api_version = api_version
self.adapter = adapter
methodobj = self.methodobj
if crud_methodobj := real_crud_method(methodobj):
methodobj = crud_methodobj
if hasattr(methodobj, "new_style_accepts"):
self.accepts_model = methodobj.new_style_accepts
self.returns_model = methodobj.new_style_returns
else:
self.accepts_model = None
self.returns_model = None
async def call(self, app: "RpcWebSocketApp", params):
if self.accepts_model:
return self._adapt_result(await super().call(app, self._adapt_params(params)))
return await super().call(app, params)
def _adapt_params(self, params):
try:
legacy_accepts_model = self.adapter.versions[self.api_version].models[self.accepts_model.__name__]
except KeyError:
# The legacy API does not contain signature definition for this method, which means it didn't exist
# when that API was released.
raise MethodNotFoundError(*self.name.rsplit(".", 1))
params_dict = model_dict_from_list(legacy_accepts_model, params)
adapted_params_dict = self.adapter.adapt(
params_dict,
legacy_accepts_model.__name__,
self.api_version,
self.adapter.current_version,
)
return [adapted_params_dict[field] for field in self.accepts_model.model_fields]
def _adapt_result(self, result):
return self.adapter.adapt(
{"result": result},
self.returns_model.__name__,
self.adapter.current_version,
self.api_version,
)["result"]
def dump_args(self, params):
if self.accepts_model:
return dump_params(self.accepts_model, params, False)
return super().dump_args(params)
| 3,087 | Python | .py | 64 | 39.625 | 114 | 0.680186 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,333 | app.py | truenas_middleware/src/middlewared/middlewared/api/base/server/app.py | import logging
import uuid
from middlewared.auth import SessionManagerCredentials, AuthenticationContext
from middlewared.utils.origin import ConnectionOrigin
logger = logging.getLogger(__name__)
class App:
def __init__(self, origin: ConnectionOrigin):
self.origin = origin
self.session_id = str(uuid.uuid4())
self.authenticated = False
self.authentication_context: AuthenticationContext = AuthenticationContext()
self.authenticated_credentials: SessionManagerCredentials | None = None
self.py_exceptions = False
self.websocket = False
self.rest = False
| 626 | Python | .py | 15 | 35.933333 | 84 | 0.744646 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,334 | method.py | truenas_middleware/src/middlewared/middlewared/api/base/server/method.py | import types
from typing import TYPE_CHECKING
from middlewared.job import Job
if TYPE_CHECKING:
from middlewared.api.base.server.ws_handler.rpc import RpcWebSocketApp
from middlewared.main import Middleware
class Method:
"""
Represents a middleware API method used in JSON-RPC server.
"""
def __init__(self, middleware: "Middleware", name: str):
"""
:param middleware: `Middleware` instance
:param name: method name
"""
self.middleware = middleware
self.name = name
self.serviceobj, self.methodobj = self.middleware.get_method(self.name)
async def call(self, app: "RpcWebSocketApp", params: list):
"""
Calls the method in the context of a given `app`.
:param app: `RpcWebSocketApp` instance.
:param params: method arguments.
:return: method return value.
"""
methodobj = self.methodobj
await self.middleware.authorize_method_call(app, self.name, methodobj, params)
if mock := self.middleware._mock_method(self.name, params):
methodobj = mock
result = await self.middleware.call_with_audit(self.name, self.serviceobj, methodobj, params, app)
if isinstance(result, Job):
result = result.id
elif isinstance(result, types.GeneratorType):
result = list(result)
elif isinstance(result, types.AsyncGeneratorType):
result = [i async for i in result]
return result
def dump_args(self, params: list) -> list:
"""
Dumps the method call params (i.e., removes secrets).
:param params: method call arguments.
:return: dumped method call arguments.
"""
return self.middleware.dump_args(params, method_name=self.name)
| 1,811 | Python | .py | 44 | 33.136364 | 106 | 0.656784 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,335 | rpc_factory.py | truenas_middleware/src/middlewared/middlewared/api/base/server/ws_handler/rpc_factory.py | from typing import Callable, TYPE_CHECKING
from ..method import Method
from .rpc import RpcWebSocketHandler
if TYPE_CHECKING:
from middlewared.main import Middleware
def create_rpc_ws_handler(middleware: "Middleware", method_factory: Callable[["Middleware", str], Method]):
"""
Creates a `RpcWebSocketHandler` instance.
:param middleware: `Middleware` instance.
:param method_factory: a callable that creates `Method` instance. Will be called for each discovered middleware
method.
:return: `RpcWebSocketHandler` instance.
"""
methods = {}
for service_name, service in middleware.get_services().items():
for attribute in dir(service):
if attribute.startswith("_"):
continue
if not callable(getattr(service, attribute)):
continue
method_name = f"{service_name}.{attribute}"
methods[method_name] = method_factory(middleware, method_name)
return RpcWebSocketHandler(middleware, methods)
| 1,028 | Python | .py | 23 | 37.434783 | 115 | 0.696088 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,336 | __init__.py | truenas_middleware/src/middlewared/middlewared/api/base/server/ws_handler/__init__.py | # -*- coding=utf-8 -*-
import logging
logger = logging.getLogger(__name__)
__all__ = []
| 90 | Python | .py | 4 | 21 | 36 | 0.619048 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,337 | rpc.py | truenas_middleware/src/middlewared/middlewared/api/base/server/ws_handler/rpc.py | import asyncio
import binascii
from collections import defaultdict
import enum
import errno
import pickle
import sys
import traceback
from typing import Any, Callable
from aiohttp.http_websocket import WSCloseCode, WSMessage
from aiohttp.web import WebSocketResponse, WSMsgType
import jsonschema
from truenas_api_client import json
from truenas_api_client.jsonrpc import JSONRPCError
from middlewared.schema import Error
from middlewared.service_exception import (CallException, CallError, ValidationError, ValidationErrors, adapt_exception,
get_errname)
from middlewared.utils.debug import get_frame_details
from middlewared.utils.limits import MsgSizeError, MsgSizeLimit, parse_message
from middlewared.utils.lock import SoftHardSemaphore, SoftHardSemaphoreLimit
from middlewared.utils.origin import ConnectionOrigin
from .base import BaseWebSocketHandler
from ..app import App
from ..method import Method
REQUEST_SCHEMA = {
"type": "object",
"additionalProperties": False,
"required": ["jsonrpc", "method"],
"properties": {
"jsonrpc": {"enum": ["2.0"]},
"method": {"type": "string"},
"params": {"type": "array"},
"id": {"type": ["null", "number", "string"]},
}
}
class RpcWebSocketAppEvent(enum.Enum):
MESSAGE = enum.auto()
CLOSE = enum.auto()
class RpcWebSocketApp(App):
def __init__(self, middleware: "Middleware", origin: ConnectionOrigin, ws: WebSocketResponse):
super().__init__(origin)
self.websocket = True
self.middleware = middleware
self.ws = ws
self.softhardsemaphore = SoftHardSemaphore(10, 20)
self.callbacks = defaultdict(list)
self.subscriptions = {}
def send(self, data):
fut = asyncio.run_coroutine_threadsafe(self.ws.send_str(json.dumps(data)), self.middleware.loop)
def send_error(self, id_: Any, code: int, message: str, data: Any = None):
error = {
"jsonrpc": "2.0",
"error": {
"code": code,
"message": message,
},
"id": id_,
}
if data is not None:
error["error"]["data"] = data
self.send(error)
def send_truenas_error(self, id_: Any, code: int, message: str, errno_: int, reason: str,
exc_info=None, extra: list | None = None):
self.send_error(id_, code, message, self.format_truenas_error(errno_, reason, exc_info, extra))
def format_truenas_error(self, errno_: int, reason: str, exc_info=None, extra: list | None = None):
return {
"error": errno_,
"errname": get_errname(errno_),
"reason": reason,
"trace": self.truenas_error_traceback(exc_info) if exc_info else None,
"extra": extra,
**({"py_exception": binascii.b2a_base64(pickle.dumps(exc_info[1])).decode()}
if self.py_exceptions and exc_info else {}),
}
def truenas_error_traceback(self, exc_info):
etype, value, tb = exc_info
frames = []
cur_tb = tb
while cur_tb:
tb_frame = cur_tb.tb_frame
cur_tb = cur_tb.tb_next
cur_frame = get_frame_details(tb_frame, self.middleware.logger)
if cur_frame:
frames.append(cur_frame)
return {
"class": etype.__name__,
"frames": frames,
"formatted": "".join(traceback.format_exception(*exc_info)),
"repr": repr(value),
}
def send_truenas_validation_error(self, id_: Any, exc_info, errors: list):
self.send_error(id_, JSONRPCError.INVALID_PARAMS.value, "Invalid params",
self.format_truenas_validation_error(exc_info[1], exc_info, errors))
def format_truenas_validation_error(self, exception, exc_info=None, errors: list | None = None):
return self.format_truenas_error(errno.EINVAL, str(exception), exc_info, errors)
def register_callback(self, event: RpcWebSocketAppEvent, callback: Callable):
self.callbacks[event.value].append(callback)
async def run_callback(self, event, *args, **kwargs):
for callback in self.callbacks[event.value]:
try:
if asyncio.iscoroutinefunction(callback):
await callback(self, *args, **kwargs)
else:
await self.middleware.run_in_thread(callback, self, *args, **kwargs)
except Exception:
self.middleware.logger.error(f"Failed to run {event} callback", exc_info=True)
async def subscribe(self, ident: str, name: str):
shortname, arg = self.middleware.event_source_manager.short_name_arg(name)
if shortname in self.middleware.event_source_manager.event_sources:
await self.middleware.event_source_manager.subscribe_app(self, self.__esm_ident(ident), shortname, arg)
else:
self.subscriptions[ident] = name
async def unsubscribe(self, ident: str):
if ident in self.subscriptions:
self.subscriptions.pop(ident)
elif self.__esm_ident(ident) in self.middleware.event_source_manager.idents:
await self.middleware.event_source_manager.unsubscribe(self.__esm_ident(ident))
def __esm_ident(self, ident):
return self.session_id + ident
def send_event(self, name: str, event_type: str, **kwargs):
if (
not any(i in [name, "*"] for i in self.subscriptions.values()) and
(
self.middleware.event_source_manager.short_name_arg(name)[0] not in
self.middleware.event_source_manager.event_sources
)
):
return
event = {
"msg": event_type.lower(),
"collection": name,
}
kwargs = kwargs.copy()
if "id" in kwargs:
event["id"] = kwargs.pop("id")
if event_type in ("ADDED", "CHANGED"):
if "fields" in kwargs:
event["fields"] = kwargs.pop("fields")
if kwargs:
event["extra"] = kwargs
self.send_notification("collection_update", event)
def notify_unsubscribed(self, collection: str, error: Exception | None):
params = {"collection": collection, "error": None}
if error:
if isinstance(error, ValidationErrors):
params["error"] = self.format_truenas_validation_error(error, errors=list(error))
elif isinstance(error, CallError):
params["error"] = self.format_truenas_error(error.errno, str(error), extra=error.extra)
else:
params["error"] = self.format_truenas_error(errno.EINVAL, str(error))
self.send_notification("notify_unsubscribed", params)
def send_notification(self, method, params):
self.send({
"jsonrpc": "2.0",
"method": method,
"params": params,
})
class RpcWebSocketHandler(BaseWebSocketHandler):
def __init__(self, middleware: "Middleware", methods: dict[str, Method]):
super().__init__(middleware)
self.methods = methods
async def process(self, origin: ConnectionOrigin, ws: WebSocketResponse):
app = RpcWebSocketApp(self.middleware, origin, ws)
self.middleware.register_wsclient(app)
try:
# aiohttp can cancel tasks if a request take too long to finish.
# It is desired to prevent that in this stage in case we are debugging middlewared via gdb (which makes the
# program execution a lot slower)
await asyncio.shield(self.middleware.call_hook("core.on_connect", app))
msg: WSMessage
async for msg in ws:
if msg.type == WSMsgType.ERROR:
self.middleware.logger.error("Websocket error: %r", msg.data)
break
if msg.type != WSMsgType.TEXT:
await ws.close(
code=WSCloseCode.UNSUPPORTED_DATA,
message=f"Invalid websocket message type: {msg.type!r}".encode("utf-8"),
)
break
try:
message = parse_message(app.authenticated, msg.data)
except MsgSizeError as err:
if err.limit is not MsgSizeLimit.UNAUTHENTICATED:
creds = app.authenticated_credentials.dump() if app.authenticated_credentials else None
origin = app.origin.repr if app.origin else None
self.middleware.logger.error(
'Client using credentials [%s] at [%s] sent message with payload size [%d bytes] '
'exceeding limit of %d for method %s',
creds, origin, err.datalen, err.limit, err.method_name
)
await ws.close(
code=err.ws_close_code,
message=err.ws_errmsg.encode('utf-8'),
)
break
except ValueError as e:
app.send_error(None, JSONRPCError.INVALID_JSON.value, str(e))
continue
await app.run_callback(RpcWebSocketAppEvent.MESSAGE, message)
try:
await self.process_message(app, message)
except Exception as e:
self.middleware.logger.error("Unhandled exception in JSON-RPC message handler", exc_info=True)
await ws.close(
code=WSCloseCode.INTERNAL_ERROR,
message=str(e).encode("utf-8"),
)
break
finally:
await app.run_callback(RpcWebSocketAppEvent.CLOSE)
await self.middleware.event_source_manager.unsubscribe_app(app)
self.middleware.unregister_wsclient(app)
async def process_message(self, app: RpcWebSocketApp, message: Any):
try:
jsonschema.validate(message, REQUEST_SCHEMA)
except jsonschema.ValidationError as e:
app.send_error(None, JSONRPCError.INVALID_REQUEST.value, str(e))
return
id_ = message.get("id")
method = self.methods.get(message["method"])
if method is None:
app.send_error(id_, JSONRPCError.METHOD_NOT_FOUND.value, "Method does not exist")
return
asyncio.ensure_future(self.process_method_call(app, id_, method, message["params"]))
async def process_method_call(self, app: RpcWebSocketApp, id_: Any, method: Method, params: dict[str, Any]):
try:
async with app.softhardsemaphore:
result = await method.call(app, params)
except SoftHardSemaphoreLimit as e:
app.send_error(id_, JSONRPCError.TRUENAS_TOO_MANY_CONCURRENT_CALLS.value,
f"Maximum number of concurrent calls ({e.args[0]}) has exceeded")
except ValidationError as e:
app.send_truenas_validation_error(id_, sys.exc_info(), [
(e.attribute, e.errmsg, e.errno),
])
except ValidationErrors as e:
app.send_truenas_validation_error(id_, sys.exc_info(), list(e))
except (CallException, Error) as e:
# CallException and subclasses are the way to gracefully send errors to the client
app.send_truenas_error(id_, JSONRPCError.TRUENAS_CALL_ERROR.value, "Method call error", e.errno, str(e),
sys.exc_info(), e.extra)
except Exception as e:
adapted = adapt_exception(e)
if adapted:
errno_ = adapted.errno
error = adapted
extra = adapted.extra
else:
errno_ = errno.EINVAL
error = e
extra = None
app.send_truenas_error(id_, JSONRPCError.TRUENAS_CALL_ERROR.value, "Method call error", errno_,
str(error) or repr(error), sys.exc_info(), extra)
if not adapted and not app.py_exceptions:
self.middleware.logger.warning(f"Exception while calling {method.name}(*{method.dump_args(params)!r})",
exc_info=True)
else:
app.send({
"jsonrpc": "2.0",
"result": result,
"id": id_,
})
| 12,605 | Python | .py | 263 | 35.558935 | 120 | 0.590188 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,338 | base.py | truenas_middleware/src/middlewared/middlewared/api/base/server/ws_handler/base.py | from aiohttp.http_websocket import WSCloseCode
from aiohttp.web import Request, WebSocketResponse
from middlewared.utils.origin import ConnectionOrigin
from middlewared.webui_auth import addr_in_allowlist
class BaseWebSocketHandler:
def __init__(self, middleware: "Middleware"):
self.middleware = middleware
async def __call__(self, request: Request):
ws = WebSocketResponse()
try:
await ws.prepare(request)
except ConnectionResetError:
# Happens when we're preparing a new session, and during the time we prepare, the server is
# stopped/killed/restarted etc. Ignore these to prevent log spam.
return ws
origin = await self.get_origin(request)
if origin is None:
await ws.close()
return ws
if not await self.can_access(origin):
await ws.close(
code=WSCloseCode.POLICY_VIOLATION,
message=b"You are not allowed to access this resource",
)
return ws
await self.process(origin, ws)
return ws
async def get_origin(self, request: Request) -> ConnectionOrigin | None:
return await self.middleware.run_in_thread(ConnectionOrigin.create, request)
async def can_access(self, origin: ConnectionOrigin | None) -> bool:
if origin is None:
return False
if origin.is_unix_family or origin.is_ha_connection:
return True
ui_allowlist = await self.middleware.call("system.general.get_ui_allowlist")
if not ui_allowlist:
return True
elif addr_in_allowlist(origin.rem_addr, ui_allowlist):
return True
return False
async def process(self, origin: ConnectionOrigin, ws: WebSocketResponse):
raise NotImplementedError
| 1,851 | Python | .py | 42 | 34.595238 | 103 | 0.662771 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,339 | test_service_crud_service.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/test_service_crud_service.py | from pydantic import Secret
import pytest
from middlewared.api.base import BaseModel
from middlewared.api.base.handler.result import serialize_result
from middlewared.service.crud_service import query_result
@pytest.mark.parametrize("result,serialized", [
([{"username": "ivan", "password": "pass"}, {"username": "pyotr", "password": "p@ss"}],
[{"username": "ivan", "password": "********"}, {"username": "pyotr", "password": "********"}]),
({"username": "ivan", "password": "pass"}, {"username": "ivan", "password": "********"}),
(10, 10),
])
def test_query_result(result, serialized):
class Entry(BaseModel):
username: str
password: Secret[str]
assert serialize_result(query_result(Entry), result, False) == serialized
| 765 | Python | .py | 16 | 44 | 100 | 0.660403 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,340 | test_service_cli_descriptions.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/test_service_cli_descriptions.py | import textwrap
import pytest
from middlewared.service import CoreService
@pytest.mark.parametrize("doc,names,descriptions", [
(
textwrap.dedent("""\
Create a new user.
If `uid` is not provided it is automatically filled with the next one available.
`group` is required if `group_create` is false.
Available choices for `shell` can be retrieved with `user.shell_choices`.
`smb` specifies whether the user should be allowed access to SMB shares. User
will also automatically be added to the `builtin_users` group.
"""),
{"uid", "group", "group_create", "shell", "smb"},
{
"uid": "If `uid` is not provided it is automatically filled with the next one available.",
"group": "`group` is required if `group_create` is false.",
"group_create": "`group` is required if `group_create` is false.",
"shell": "Available choices for `shell` can be retrieved with `user.shell_choices`.",
"smb": "`smb` specifies whether the user should be allowed access to SMB shares. User\n"
"will also automatically be added to the `builtin_users` group.",
}
),
(
textwrap.dedent("""\
* `schedule` is a schedule to run replication task. Only `auto` replication tasks without bound periodic
snapshot tasks can have a schedule
* `restrict_schedule` restricts when replication task with bound periodic snapshot tasks runs. For example,
you can have periodic snapshot tasks that run every 15 minutes, but only run replication task every hour.
* Enabling `only_matching_schedule` will only replicate snapshots that match `schedule` or
`restrict_schedule`
* `allow_from_scratch` will destroy all snapshots on target side and replicate everything from scratch if none
of the snapshots on target side matches source snapshots
* `readonly` controls destination datasets readonly property:
* `SET` will set all destination datasets to readonly=on after finishing the replication
* `REQUIRE` will require all existing destination datasets to have readonly=on property
* `IGNORE` will avoid this kind of behavior
* `hold_pending_snapshots` will prevent source snapshots from being deleted by retention of replication fails
for some reason
"""),
{"schedule", "restrict_schedule", "only_matching_schedule", "allow_from_scratch", "readonly", "hold_pending_snapshots", "auto"},
{
"schedule": "* `schedule` is a schedule to run replication task. Only `auto` replication tasks without bound periodic\n"
" snapshot tasks can have a schedule\n"
"* Enabling `only_matching_schedule` will only replicate snapshots that match `schedule` or\n"
" `restrict_schedule`",
"restrict_schedule": "* `restrict_schedule` restricts when replication task with bound periodic snapshot tasks runs. For example,\n"
" you can have periodic snapshot tasks that run every 15 minutes, but only run replication task every hour.\n"
"* Enabling `only_matching_schedule` will only replicate snapshots that match `schedule` or\n"
" `restrict_schedule`",
"only_matching_schedule": "* Enabling `only_matching_schedule` will only replicate snapshots that match `schedule` or\n"
" `restrict_schedule`",
"allow_from_scratch": "* `allow_from_scratch` will destroy all snapshots on target side and replicate everything from scratch if none\n"
" of the snapshots on target side matches source snapshots",
"readonly": "* `readonly` controls destination datasets readonly property:\n"
" * `SET` will set all destination datasets to readonly=on after finishing the replication\n"
" * `REQUIRE` will require all existing destination datasets to have readonly=on property\n"
" * `IGNORE` will avoid this kind of behavior",
"hold_pending_snapshots": "* `hold_pending_snapshots` will prevent source snapshots from being deleted by retention of replication fails\n"
" for some reason",
"auto": "* `schedule` is a schedule to run replication task. Only `auto` replication tasks without bound periodic\n"
" snapshot tasks can have a schedule",
},
)
])
def test_cli_args_descriptions(doc, names, descriptions):
assert CoreService(None)._cli_args_descriptions(doc, names) == descriptions
| 4,903 | Python | .py | 67 | 58.776119 | 151 | 0.633596 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,341 | test_service_part.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/test_service_part.py | import pytest
from middlewared.service import job, Service, ServicePartBase
from middlewared.schema import accepts, Int
class SumServiceBase(ServicePartBase):
@accepts(Int("a"), Int("b"))
def sum(self, a, b):
"""
Sum two numbers
"""
pass
def test__method_not_defined():
with pytest.raises(RuntimeError) as e:
class SumServiceImpl(Service, SumServiceBase):
def add(self, a, b):
return a + b
assert "does not define method 'sum'" in e.value.args[0], e.value.args[0]
def test__signatures_do_not_match():
with pytest.raises(RuntimeError) as e:
class SumServiceImpl(Service, SumServiceBase):
def sum(self, a, b, c=0):
return a + b
assert "Signature for method" in e.value.args[0], e.value.args[0]
def test__ok():
class SumServiceImpl(Service, SumServiceBase):
def sum(self, a, b):
return a + b
assert SumServiceImpl.sum.__doc__ is not None
def test__schema_works():
class SumServiceImpl(Service, SumServiceBase):
def sum(self, a, b):
return a + b
assert SumServiceImpl(None).sum(1, "2") == 3
def test__job():
class JobServiceBase(ServicePartBase):
@accepts(Int("arg"))
@job()
def process(self, job, arg):
pass
class JobServiceImpl(Service, JobServiceBase):
def process(self, job, arg):
return arg * 2
assert JobServiceImpl(None).process(None, 3) == 6
| 1,517 | Python | .py | 42 | 28.666667 | 77 | 0.629808 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,342 | test_schema.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/test_schema.py | import pytest
from unittest.mock import Mock
from middlewared.service import job
from middlewared.service_exception import ValidationErrors
from middlewared.schema import (
accepts, Bool, Cron, Dict, Dir, File, Float, Int, IPAddr, List, Str, URI,
Password, UnixPerm, UUID, LocalUsername, NetbiosName, NetbiosDomain
)
from middlewared.validators import QueryFilters, QueryOptions
def test__nonhidden_after_hidden():
with pytest.raises(ValueError):
@accepts(Int('id'), Bool('fake', hidden=True), List('flags'))
def f(self, id_, fake, flags):
pass
def test__schema_str_empty():
@accepts(Str('data', empty=False))
def strempty(self, data):
return data
self = Mock()
with pytest.raises(ValidationErrors):
strempty(self, '')
def test__schema_str_non_empty():
@accepts(Str('data', empty=True))
def strempty(self, data):
return data
self = Mock()
assert strempty(self, '') == ''
def test__schema_str_null():
@accepts(Str('data', null=True))
def strnull(self, data):
return data
self = Mock()
assert strnull(self, None) is None
def test__schema_str_not_null():
@accepts(Str('data', null=False))
def strnotnull(self, data):
return data
self = Mock()
with pytest.raises(ValidationErrors):
assert strnotnull(self, None) is not None
@pytest.mark.parametrize("value,expected", [
('foo', 'foo'),
(3, '3'),
(False, ValidationErrors),
(3.3, ValidationErrors),
(["foo"], ValidationErrors),
])
def test__schema_str_values(value, expected):
@accepts(Str('data'))
def strv(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(ValidationErrors) as ei:
strv(self, value)
assert ei.value.errors[0].errmsg == 'Not a string'
else:
assert strv(self, value) == expected
@pytest.mark.parametrize("value,expected", [
('FOO', 'FOO'),
('BAR', 'BAR'),
('FOOBAR', ValidationErrors),
])
def test__schema_str_num(value, expected):
@accepts(Str('data', enum=['FOO', 'BAR']))
def strv(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(ValidationErrors) as ei:
strv(self, value)
assert ei.value.errors[0].errmsg.startswith('Invalid choice')
else:
assert strv(self, value) == expected
def test__schema_bool_null():
@accepts(Bool('data', null=True))
def boolnull(self, data):
return data
self = Mock()
assert boolnull(self, None) is None
def test__schema_bool_not_null():
@accepts(Bool('data', null=False))
def boolnotnull(self, data):
return data
self = Mock()
with pytest.raises(ValidationErrors):
assert boolnotnull(self, None) is not None
def test__schema_float_null():
@accepts(Float('data', null=True))
def floatnull(self, data):
return data
self = Mock()
assert floatnull(self, None) is None
def test__schema_float_not_null():
@accepts(Float('data', null=False))
def floatnotnull(self, data):
return data
self = Mock()
with pytest.raises(ValidationErrors):
assert floatnotnull(self, None) is not None
@pytest.mark.parametrize("value,expected", [
(5, 5.0),
('5', 5.0),
('5.0', 5.0),
(5.0, 5.0),
('FOO', ValidationErrors),
(False, ValidationErrors),
([4], ValidationErrors),
])
def test__schema_float_values(value, expected):
@accepts(Float('data', null=False))
def floatv(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(ValidationErrors) as ei:
floatv(self, value)
assert ei.value.errors[0].errmsg == 'Not a floating point number'
else:
assert floatv(self, value) == expected
def test__schema_int_null():
@accepts(Int('data', null=True))
def intnull(self, data):
return data
self = Mock()
assert intnull(self, None) is None
def test__schema_int_not_null():
@accepts(Int('data', null=False))
def intnotnull(self, data):
return data
self = Mock()
with pytest.raises(ValidationErrors):
assert intnotnull(self, None) is not None
@pytest.mark.parametrize("value,expected", [
(3, 3),
('3', 3),
('-3', -3),
(-3, -3),
(3.0, ValidationErrors),
('FOO', ValidationErrors),
(False, ValidationErrors),
([4], ValidationErrors),
])
def test__schema_int_values(value, expected):
@accepts(Int('data'))
def intv(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(ValidationErrors) as ei:
intv(self, False)
assert ei.value.errors[0].errmsg == 'Not an integer'
else:
assert intv(self, value) == expected
def test__schema_dict_null():
@accepts(Dict('data', null=True))
def dictnull(self, data):
return data
self = Mock()
assert dictnull(self, None) is None
def test__schema_dict_not_null():
@accepts(Str('data', null=False))
def dictnotnull(self, data):
return data
self = Mock()
with pytest.raises(ValidationErrors):
assert dictnotnull(self, None) != {}
@pytest.mark.parametrize("value,expected", [
({'foo': 'foo'}, {'foo': 'foo'}),
({}, {}),
({'foo': None}, ValidationErrors),
({'bar': None}, ValidationErrors),
])
def test__schema_dict_not_null_args(value, expected):
@accepts(Dict(
'data',
Str('foo'),
Bool('bar'),
))
def dictargs(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(ValidationErrors) as ei:
dictargs(self, value)
assert ei.value.errors[0].errmsg == 'null not allowed'
else:
assert dictargs(self, value) == expected
@pytest.mark.parametrize("value,expected", [
({'foo': 'foo', 'bar': False, 'list': []}, {'foo': 'foo', 'bar': False, 'list': []}),
({'foo': 'foo'}, ValidationErrors),
({'bar': False}, ValidationErrors),
({'foo': 'foo', 'bar': False}, {'foo': 'foo', 'bar': False, 'list': []}),
])
def test__schema_dict_required_args(value, expected):
@accepts(Dict(
'data',
Str('foo', required=True),
Bool('bar', required=True),
List('list', required=True),
))
def dictargs(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(ValidationErrors) as ei:
dictargs(self, value)
assert ei.value.errors[0].errmsg == 'attribute required'
else:
assert dictargs(self, value) == expected
@pytest.mark.parametrize("value,expected,msg", [
({'foo': 'foo', 'bar': False}, {'foo': 'foo', 'bar': False, 'list': []}, None),
({'foo': 'foo', 'bar': False, 'num': 5}, {'foo': 'foo', 'bar': False, 'num': 5, 'list': []}, None),
({'foo': 'foo'}, {'foo': 'foo', 'list': []}, None),
({'foo': 'foo', 'list': ['listitem']}, {'foo': 'foo', 'list': ['listitem']}, None),
({'foo': 'foo', 'list': 5}, ValidationErrors, 'Not a list'),
({'foo': 'foo', 'bar': False, 'num': None}, ValidationErrors, 'null not allowed'),
({'foo': None}, ValidationErrors, 'null not allowed'),
({'bar': None}, ValidationErrors, 'attribute required'),
])
def test__schema_dict_mixed_args(value, expected, msg):
@accepts(Dict(
'data',
Str('foo', required=True),
Bool('bar', null=True),
Int('num'),
List('list', items=[Str('listitem')]),
))
def dictargs(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(ValidationErrors) as ei:
dictargs(self, value)
assert ei.value.errors[0].errmsg == msg
else:
assert dictargs(self, value) == expected
@pytest.mark.parametrize("schema,attribute", [
(
Dict(
'create',
Dict(
'image',
Str('repository', required=True),
),
),
'create.image.repository',
),
(
Dict(
'create',
Str('repository', required=True),
),
'create.repository',
),
])
def test__schema_dict_error_handler_attribute_name(schema, attribute):
@accepts(schema)
def meth(self, data):
return data
with pytest.raises(ValidationErrors) as ei:
meth({})
assert ei.value.errors[0].attribute == attribute
def test__schema_dict_error_handler():
@accepts(Dict(
'create',
Str('repository', required=True),
))
def meth(self, data):
return data
with pytest.raises(ValidationErrors) as ei:
meth({})
assert ei.value.errors[0].attribute == 'create.repository'
@pytest.mark.parametrize('items,value,expected', [
([List('b', items=[List('c', private=True)])], [[['a']]], [['********']]),
([Dict('b', Str('c', private=True))], [{'c': 'secret'}], [{'c': '********'}]),
([Dict('b', Str('c', private=True)), Dict('d', Str('e'))], [{'c': 'secret'}], [{'c': '********'}]),
([Dict('b', Str('c')), Dict('d', Str('c', private=True))], [{'c': 'secret'}], ['********']),
([Dict('b', Password('c'))], [{'c': 'secret'}], [{'c': '********'}]),
([Dict('b', Password('c')), Dict('d', Str('e'))], [{'c': 'secret'}], [{'c': '********'}]),
([Dict('b', Str('c')), Dict('d', Password('c'))], [{'c': 'secret'}], ['********']),
])
def test__schema_list_private_items(items, value, expected):
assert List('a', items=items).dump(value) == expected
def test__schema_list_empty():
@accepts(List('data', empty=False))
def listempty(self, data):
return data
self = Mock()
with pytest.raises(ValidationErrors):
listempty(self, [])
def test__schema_list_non_empty():
@accepts(List('data', empty=True))
def listempty(self, data):
return data
self = Mock()
assert listempty(self, []) == []
def test__schema_list_null():
@accepts(List('data', null=True, default=None))
def listnull(self, data):
return data
self = Mock()
assert listnull(self, None) is None
def test__schema_list_not_null():
@accepts(List('data', null=False))
def listnotnull(self, data):
return data
self = Mock()
with pytest.raises(ValidationErrors):
assert listnotnull(self, None) != []
def test__schema_list_noarg_not_null():
@accepts(List('data', null=False))
def listnotnull(self, data):
return data
self = Mock()
with pytest.raises(ValidationErrors) as ei:
listnotnull(self, None)
assert ei.value.errors[0].errmsg == 'null not allowed'
@pytest.mark.parametrize("value,expected", [
(["foo"], ["foo"]),
([2], ["2"]),
([2, "foo"], ["2", "foo"]),
([False], ValidationErrors),
("foo", ValidationErrors),
({"foo": "bar"}, ValidationErrors),
])
def test__schema_list_items(value, expected):
@accepts(List('data', items=[Str('foo')]))
def listnotnull(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(ValidationErrors):
listnotnull(self, value)
else:
assert listnotnull(self, value) == expected
@pytest.mark.parametrize('value,expected', [
(['foo'], ['foo']),
([True, True, 'foo'], [True, True, 'foo']),
([2, {'bool': True}], ['2', {'bool': True}]),
([2, {'bool': True, 'str': False}], ValidationErrors),
({'foo': False}, ValidationErrors),
({'unexpected': False}, ValidationErrors),
('foo', ValidationErrors),
({'foo': 'foo'}, ValidationErrors),
])
def test__schema_list_multiple_items(value, expected):
@accepts(List('data', items=[Str('foo'), Bool('bool'), Dict('dict', Bool('bool'), Str('str'))]))
def listnotnull(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(ValidationErrors):
listnotnull(self, value)
else:
assert listnotnull(self, value) == expected
def test__schema_list_null_items():
@accepts(List('data', null=True, items=[Str('item')]))
def listnull(self, data):
return data
self = Mock()
# FIXME: Why does this return `[]`? It should return `None`
# We are only checking that there is no crash here
listnull(self, None)
def test__schema_unixperm_null():
@accepts(UnixPerm('data', null=True))
def unixpermnull(self, data):
return data
self = Mock()
assert unixpermnull(self, None) is None
def test__schema_dir_null():
@accepts(Dir('data', null=True))
def dirnull(self, data):
return data
self = Mock()
assert dirnull(self, None) is None
def test__schema_file_null():
@accepts(File('data', null=True))
def filenull(self, data):
return data
self = Mock()
assert filenull(self, None) is None
@pytest.mark.parametrize("value,expected", [
({'minute': '55'}, {'minute': '55'}),
({'dow': '2'}, {'dow': '2'}),
({'hour': '*'}, {'hour': '*'}),
({'minute': '*/10'}, {'minute': '*/10'}),
({'minute': '10-30/10'}, {'minute': '10-30/10'}),
({'minute': '0/10'}, ValidationErrors),
({'minute': '66'}, ValidationErrors),
({'hour': '-25'}, ValidationErrors),
({'dom': '33'}, ValidationErrors),
])
def test__schema_cron_values(value, expected):
@accepts(Cron('data'))
def cronv(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(ValidationErrors):
cronv(self, value)
else:
result = {k: v for k, v in cronv(self, value).items() if k in expected}
assert result == expected
@pytest.mark.parametrize("data_dict,begin_end,result", [
(
{"cron_minute": "00", "cron_hour": "01", "cron_daymonth": "02", "cron_month": "03", "cron_dayweek": "04"},
False,
{"schedule": {"minute": "00", "hour": "01", "dom": "02", "month": "03", "dow": "04"}},
),
(
{"cron_minute": "00", "cron_hour": None, "cron_daymonth": "02", "cron_month": "03", "cron_dayweek": "04"},
False,
{"schedule": None},
),
(
{"cron_minute": "00", "cron_hour": "01", "cron_daymonth": "02", "cron_month": "03", "cron_dayweek": "04",
"cron_begin": "05:00:00", "cron_end": "06:00:00"},
True,
{"schedule": {"minute": "00", "hour": "01", "dom": "02", "month": "03", "dow": "04",
"begin": "05:00", "end": "06:00"}},
),
(
{"cron_minute": "00", "cron_hour": None, "cron_daymonth": "02", "cron_month": "03", "cron_dayweek": "04",
"cron_begin": "05:00:00", "cron_end": "06:00:00"},
True,
{"schedule": None},
),
(
{"cron_minute": "00", "cron_hour": "01", "cron_daymonth": "02", "cron_month": "03", "cron_dayweek": "04",
"cron_begin": "05:00:00", "cron_end": None},
True,
{"schedule": None},
),
])
def test__cron__convert_db_format_to_schedule(data_dict, begin_end, result):
Cron.convert_db_format_to_schedule(data_dict, "schedule", "cron_", begin_end)
assert data_dict == result
@pytest.mark.parametrize("value,error", [
({'hour': '0', 'minute': '0', 'begin': '09:00', 'end': '18:00'}, True),
({'hour': '9', 'minute': '0', 'begin': '09:00', 'end': '18:00'}, False),
({'hour': '9', 'minute': '0', 'begin': '09:10', 'end': '18:00'}, True),
({'hour': '9', 'minute': '15', 'begin': '09:10', 'end': '18:00'}, False),
])
def test__cron__begin_end_validate(value, error):
@accepts(Cron('data', begin_end=True))
def cronv(self, data):
return data
self = Mock()
if error:
with pytest.raises(ValidationErrors):
cronv(self, value)
else:
cronv(self, value)
@pytest.mark.parametrize("value,expected", [
('127.0.0.1', '127.0.0.1'),
('22::56', '22::56'),
('192.', ValidationErrors),
('5:5', ValidationErrors),
('ff:ff:ee:aa', ValidationErrors),
])
def test__schema_ipaddr(value, expected):
@accepts(IPAddr('data'))
def ipaddrv(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(ValidationErrors):
ipaddrv(self, value)
else:
assert ipaddrv(self, value) == expected
@pytest.mark.parametrize("value,expected", [
('127.0.0.1/32', '127.0.0.1/32'),
('22::56/64', '22::56/64'),
('192.', ValidationErrors),
('5:5', ValidationErrors),
('ff:ff:ee:aa', ValidationErrors),
('192.168.3.1/33', ValidationErrors),
('ff::4/129', ValidationErrors),
])
def test__schema_ipaddr_cidr(value, expected):
@accepts(IPAddr('data', cidr=True))
def ipaddrv(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(ValidationErrors):
ipaddrv(self, value)
else:
assert ipaddrv(self, value) == expected
@pytest.mark.parametrize("value,expected", [
('192.168.0.0%enp0s3', ValidationErrors),
('22::56%enp0s3', '22::56%enp0s3'),
])
def test__schema_ipaddr_cidr_allow_zone_index(value, expected):
@accepts(IPAddr('data', allow_zone_index=True))
def ipaddrv(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(ValidationErrors):
ipaddrv(self, value)
else:
assert ipaddrv(self, value) == expected
@pytest.mark.parametrize("value,expected", [
('192.168.0.0/24', '192.168.0.0/24'),
('192.168.0.0/255.255.255.0', '192.168.0.0/24'),
('192.168.0.1', '192.168.0.1/32'),
('192.168.0.999', ValidationErrors),
('BOGUS.NAME', ValidationErrors),
])
def test__schema_ipaddr_network(value, expected):
@accepts(IPAddr('data', network=True))
def ipaddrv(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(ValidationErrors):
ipaddrv(self, value)
else:
assert ipaddrv(self, value) == expected
@pytest.mark.parametrize("value,expected", [
('192.168.0.0/24', None),
('192.168.0.0/255.255.255.0', None),
('192.168.0.1', None),
('192.168.0.999', ValidationErrors),
('BOGUS.NAME', ValidationErrors),
])
def test__schema_ipaddr_validate(value, expected):
network = value.find('/') != -1
ipaddr = IPAddr(network=network)
if expected is ValidationErrors:
with pytest.raises(ValidationErrors):
ipaddr.validate(value)
else:
assert ipaddr.validate(value) == expected
def test__schema_str_default():
@accepts(Str('foo'), Str('bar', default='BAR'))
def strdef(self, foo, bar):
return bar
self = Mock()
assert strdef(self, 'foo') == 'BAR'
def test__schema_str_job_default():
"""
Job changes the order of the parameters in schema\
"""
@accepts(Str('foo'), Str('bar', default='BAR'))
@job()
def strdef(self, job, foo, bar):
return bar
self = Mock()
jobm = Mock()
assert strdef(self, jobm, 'foo') == 'BAR'
@pytest.mark.parametrize("schema,attribute", [
(
Dict(
'create',
Dict(
'image',
Str('repository', required=True),
),
),
'create.image.repository',
),
(
Dict(
'create',
Str('repository', required=True),
),
'create.repository',
),
])
def test__schema_or_error_handler_attribute_name(schema, attribute):
@accepts(schema)
def meth(self, data):
return data
with pytest.raises(ValidationErrors) as ei:
meth({})
assert ei.value.errors[0].attribute == attribute
@pytest.mark.parametrize('test_value,expected_error', [
('https://google.com', False),
('https:google.com', True),
('https:/google', True),
('https://www.google.com/search?q=truenas', False),
('', False),
])
def test__uri_schema(test_value, expected_error):
@accepts(URI('uri'))
def strv(self, uri):
return uri
self = Mock()
if expected_error:
with pytest.raises(ValidationErrors) as ei:
strv(self, test_value)
assert ei.value.errors[0].errmsg == 'Not a valid URI'
else:
assert strv(self, test_value) == test_value
def validate_simple(fn, value, must_fail, casefold=False):
self = Mock()
if must_fail:
with pytest.raises(ValidationErrors):
fn(self, value)
else:
if casefold:
assert fn(self, value).casefold() == value.casefold()
else:
assert fn(self, value) == value
@pytest.mark.parametrize('value,expected_to_fail', [
('', True),
(f'{"a" * 33}', True),
(' bad', True),
('a$a', True),
('a!', True),
('a$', False),
('aaa', False),
('aAA', False),
('Aaa', False),
('A.a', False),
])
def test__localusername_schema(value, expected_to_fail):
@accepts(LocalUsername('username', required=True))
def user(self, data):
return data
validate_simple(user, value, expected_to_fail)
@pytest.mark.parametrize('value,expected_to_fail', [
('', True),
('canary', True),
(0, True),
('c254614c-932f-4a31-888c-6330f5cc77a9', False),
])
def test__uuid_schema(value, expected_to_fail):
@accepts(UUID('uuid', required=True))
def do_uuid(self, data):
return data
validate_simple(do_uuid, value, expected_to_fail)
@pytest.mark.parametrize('value,expected_to_fail', [
('', True),
('*canary', True),
('aaaaaaaaaaaaaaaa', True),
('canary space', True),
('canary.space', True),
('canary?', True),
('<canary', True),
('canary>', True),
('canary|', True),
('1234567', True),
('canary', False),
('CaNary', False),
('can_ary', False),
('can-ary', False),
('LOCAL', True),
])
def test__netbiosname_schema(value, expected_to_fail):
@accepts(NetbiosName('nbname', required=True))
def do_netbiosname(self, data):
return data
validate_simple(do_netbiosname, value, expected_to_fail, True)
@pytest.mark.parametrize('value,expected_to_fail', [
('', True),
('*canary', True),
('aaaaaaaaaaaaaaaa', True),
('canary space', True),
('canary.space', False),
('canary?', True),
('<canary', True),
('canary>', True),
('canary|', True),
('1234567', True),
('canary', False),
('CaNary', False),
('can_ary', False),
('can-ary', False),
('LOCAL', True),
])
def test__netbiosdomain_schema(value, expected_to_fail):
@accepts(NetbiosDomain('nbname', required=True))
def do_netbiosdomain(self, data):
return data
validate_simple(do_netbiosdomain, value, expected_to_fail, True)
@pytest.mark.parametrize('filters, options, expected_to_fail', [
([], {'select': ["a", 1]}, True),
([], {'select': ["a", ["b", 1]]}, True),
([], {'select': ["a", ["b", "c", "d"]]}, True),
([], {'select': ["a"]}, False),
([], {'select': ["a", ["b", "c"]]}, False),
([], {'order_by': [1]}, True),
([], {'order_by': ["a"]}, False),
([["a", "canary", "b"]], {}, True),
([["a", "=", "b"]], {}, False),
])
def test__filterable(filters, options, expected_to_fail):
@accepts(
List('query-filters', items=[List('query-filter')], validators=[QueryFilters()]),
Dict('query-options', additional_attrs=True, validators=[QueryOptions()])
)
def do_filter_op(self, query_filters, query_options):
return
self = Mock()
if expected_to_fail:
with pytest.raises(ValidationErrors):
do_filter_op(self, filters, options)
else:
do_filter_op(self, filters, options)
| 23,994 | Python | .py | 703 | 28.083926 | 114 | 0.592167 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,343 | test_validators.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/test_validators.py | import os
import shutil
import pytest
from middlewared.schema import Dict, Int, Str
from middlewared.service_exception import ValidationErrors
from middlewared.validators import check_path_resides_within_volume_sync, validate_schema, Range, Email
@pytest.mark.parametrize("schema,data,result", [
([Str("text")], {"text": "XXX"}, {"text": "XXX"}),
([Str("text", default="XXX")], {}, {"text": "XXX"}),
([Str("text", required=True)], {}, {"text"}),
([Int("number")], {"number": "1"}, {"number": 1}),
([Int("number")], {"number": "XXX"}, {"number"}),
([Int("number", validators=[Range(min_=2)])], {"number": 1}, {"number"}),
([Dict("image", Str("repository", required=True))], {}, {"image.repository"}),
])
def test__validate_schema(schema, data, result):
verrors = validate_schema(schema, data)
if isinstance(result, set):
assert result == {e.attribute for e in verrors.errors}
else:
assert data == result
@pytest.mark.parametrize("email,should_raise", [
("2@2@me.com", False),
("2&2@me.com", False),
("2@\uD800\uD800ñoñó郵件ñoñó郵件.商務", False),
(f'{"2" * 250}@me.com', True),
("@me.com", True),
("2@", True),
("@", True),
("", True),
])
def test__email_schema(email, should_raise):
if not should_raise:
Email()(email)
else:
with pytest.raises(ValueError):
Email()(email)
@pytest.fixture(scope="function")
def setup_mnt(tmpdir):
os.makedirs('/mnt/pool/foo')
os.mkdir('/mnt/foo')
try:
os.symlink(tmpdir, '/mnt/pool/symlink')
os.symlink('/mnt/foo', '/mnt/pool/symlink2')
yield
finally:
shutil.rmtree('/mnt/pool')
os.rmdir('/mnt/foo')
@pytest.mark.parametrize("path,should_raise", [
('/tmp', True),
('EXTERNAL://smb_server.local/SHARE', True),
('/mnt/does_not_exist', True),
('/mnt/foo', True),
('/mnt/pool/foo', False),
('/mnt/pool', False),
('/mnt/pool/..', True),
('/mnt/pool/symlink', True),
('/mnt/pool/symlink2', True)
])
def test___check_path_resides_within_volume(setup_mnt, path, should_raise):
volumes = ['pool']
verr = ValidationErrors()
check_path_resides_within_volume_sync(verr, "test.path", path, volumes)
if should_raise:
with pytest.raises(ValidationErrors):
verr.check()
else:
verr.check()
| 2,390 | Python | .py | 68 | 29.867647 | 103 | 0.609586 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,344 | middleware.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/middleware.py | import asyncio
import logging
from unittest.mock import AsyncMock, Mock
from middlewared.plugins.datastore.read import DatastoreService
from middlewared.utils import filter_list
from middlewared.utils.plugins import SchemasMixin
class Middleware(SchemasMixin, dict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self['failover.licensed'] = AsyncMock(return_value=False)
self.call_hook = AsyncMock()
self.call_hook_inline = Mock()
self.event_register = Mock()
self.send_event = Mock()
self.logger = logging.getLogger("middlewared")
super().__init__()
# Resolve core schemas like `query-filters`
super()._resolve_methods([DatastoreService(self)], [])
def _resolve_methods(self, services, events):
try:
return super()._resolve_methods(services, events)
except ValueError as e:
self.logger.warning(str(e))
async def _call(self, name, serviceobj, method, args, app=None):
self._resolve_methods([serviceobj], [])
return await method(*args)
async def call(self, name, *args):
result = self[name](*args)
if asyncio.iscoroutine(result):
result = await result
return result
def call_sync(self, name, *args):
return self[name](*args)
async def run_in_executor(self, executor, method, *args, **kwargs):
return method(*args, **kwargs)
async def run_in_thread(self, method, *args, **kwargs):
return method(*args, **kwargs)
def _query_filter(self, lst):
def query(filters=None, options=None):
return filter_list(lst, filters, options)
return query
| 1,730 | Python | .py | 41 | 34.585366 | 71 | 0.652927 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,345 | helpers.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/helpers.py | import functools
from unittest.mock import Mock
from middlewared.utils.plugins import LoadPluginsMixin
def load_compound_service(name):
lpm = LoadPluginsMixin()
lpm.event_register = Mock()
lpm.get_events = Mock(return_value=[])
lpm._load_plugins()
service = lpm.get_service(name)
return functools.partial(_compound_service_wrapper, service)
def _compound_service_wrapper(service, fake_middleware):
service.middleware = fake_middleware
for part in service.parts:
part.middleware = fake_middleware
return service
def create_service(middleware, cls):
service = cls(middleware)
middleware._resolve_methods([service], [])
return service
| 695 | Python | .py | 19 | 32.263158 | 64 | 0.751868 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,346 | test_smartctl.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/common/smart/test_smartctl.py | import pytest
from unittest.mock import Mock, patch
from middlewared.common.smart.smartctl import get_smartctl_args, SMARTCTX
@pytest.mark.asyncio
async def test__get_smartctl_args__disk_nonexistent():
context = SMARTCTX(devices={}, enterprise_hardware=False, middleware=None)
assert await get_smartctl_args(context, "sda", "") is None
@pytest.mark.asyncio
async def test__get_smartctl_args__nvme():
context = SMARTCTX(devices={}, enterprise_hardware=False, middleware=None)
assert await get_smartctl_args(context, "nvme0n1", "") == ["/dev/nvme0n1", "-d", "nvme"]
@pytest.mark.asyncio
async def test_get_disk__unknown_usb_bridge():
context = SMARTCTX(
devices={
"sda": {
"name": "sda",
"sectorsize": 4096,
"number": 2048,
"subsystem": "scsi",
"driver": "sd",
"hctl": "17:0:0:0",
"size": 10000831348736,
"mediasize": 10000831348736,
"vendor": "HGST",
"ident": "ZZBBBAAA",
"serial": "ZZBBBAAA",
"model": "USB MODEL",
"descr": "USB MODEL",
"lunid": "5000cca251214158",
"bus": "USB",
"type": "SSD",
"blocks": 19532873728,
"serial_lunid": "ZZBBBAAA_5000cca251214158",
"rotationrate": None,
"stripesize": None,
"parts": [],
"dif": False
},
},
enterprise_hardware=False,
middleware=None,
)
assert await get_smartctl_args(context, "sda", "") == ["/dev/sda", "-d", "sat"]
@pytest.mark.asyncio
async def test_get_disk__generic():
context = SMARTCTX(
devices={
"sda": {
"name": "sda",
"sectorsize": 4096,
"number": 2048,
"subsystem": "scsi",
"driver": "sd",
"hctl": "17:0:0:0",
"size": 10000831348736,
"vendor": "HGST",
"mediasize": 10000831348736,
"ident": "ZZBBBAAA",
"serial": "ZZBBBAAA",
"model": "USB MODEL",
"descr": "USB MODEL",
"lunid": "5000cca251214158",
"bus": "scsi",
"type": "HDD",
"blocks": 19532873728,
"serial_lunid": "ZZBBBAAA_5000cca251214158",
"rotationrate": "7200",
"stripesize": None,
"parts": [],
"dif": False
},
},
enterprise_hardware=False,
middleware=None,
)
with patch("middlewared.common.smart.smartctl.run") as run:
run.return_value = Mock(stdout="Everything is OK")
assert await get_smartctl_args(context, "sda", "") == ["/dev/sda"]
@pytest.mark.asyncio
async def test_get_disk__nvme_behind_sd():
context = SMARTCTX(
devices={
"sda": {
"name": "sda",
"sectorsize": 4096,
"number": 2048,
"subsystem": "scsi",
"driver": "sd",
"hctl": "17:0:0:0",
"size": 10000831348736,
"vendor": "NVMe",
"mediasize": 10000831348736,
"ident": "ZZBBBAAA",
"serial": "ZZBBBAAA",
"model": "USB MODEL",
"descr": "USB MODEL",
"lunid": "5000cca251214158",
"bus": "scsi",
"type": "HDD",
"blocks": 19532873728,
"serial_lunid": "ZZBBBAAA_5000cca251214158",
"rotationrate": "7200",
"stripesize": None,
"parts": [],
"dif": False
},
},
enterprise_hardware=False,
middleware=None,
)
with patch("middlewared.common.smart.smartctl.run") as run:
run.return_value = Mock(stdout="Everything is OK")
assert await get_smartctl_args(context, "sda", "") == ["/dev/sda", "-d", "nvme"]
| 4,183 | Python | .py | 114 | 24.105263 | 92 | 0.479783 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,347 | test_smartd.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/etc_files/test_smartd.py | import subprocess
import textwrap
from unittest.mock import call, Mock, patch
import pytest
from middlewared.etc_files.smartd import (
ensure_smart_enabled, get_smartd_schedule, get_smartd_config
)
@pytest.mark.asyncio
async def test__ensure_smart_enabled__smart_error():
with patch("middlewared.etc_files.smartd.smartctl") as run:
run.return_value = Mock(stdout='{"smart_support": {"enabled": false, "available": false}}')
assert await ensure_smart_enabled(["/dev/ada0"]) is False
run.assert_called_once()
@pytest.mark.asyncio
async def test__ensure_smart_enabled__smart_enabled():
with patch("middlewared.etc_files.smartd.smartctl") as run:
run.return_value = Mock(stdout='{"smart_support": {"enabled": true, "available": true}}')
assert await ensure_smart_enabled(["/dev/ada0"])
run.assert_called_once()
@pytest.mark.asyncio
async def test__ensure_smart_enabled__smart_was_disabled():
with patch("middlewared.etc_files.smartd.smartctl") as run:
run.return_value = Mock(stdout='{"smart_support": {"enabled": false, "available": true}}', returncode=0)
assert await ensure_smart_enabled(["/dev/ada0"])
assert run.call_args_list == [
call(["/dev/ada0", "-i", "--json=c"], check=False, stderr=subprocess.STDOUT,
encoding="utf8", errors="ignore"),
call(["/dev/ada0", "-s", "on"], check=False, stderr=subprocess.STDOUT),
]
@pytest.mark.asyncio
async def test__ensure_smart_enabled__enabling_smart_failed():
with patch("middlewared.etc_files.smartd.smartctl") as run:
run.return_value = Mock(stdout='{"smart_support": {"enabled": false, "available": false}}', returncode=1)
assert await ensure_smart_enabled(["/dev/ada0"]) is False
@pytest.mark.asyncio
async def test__ensure_smart_enabled__handled_args_properly():
with patch("middlewared.etc_files.smartd.smartctl") as run:
run.return_value = Mock(stdout='{"smart_support": {"enabled": true, "available": true}}')
assert await ensure_smart_enabled(["/dev/ada0", "-d", "sat"])
run.assert_called_once_with(
["/dev/ada0", "-d", "sat", "-i", "--json=c"], check=False, stderr=subprocess.STDOUT,
encoding="utf8", errors="ignore",
)
def test__get_smartd_schedule__need_mapping():
assert get_smartd_schedule({
"smarttest_schedule": {
"month": "jan,feb,mar,apr,may,jun,jul,aug,sep,oct,nov,dec",
"dom": "1,hedgehog day,3",
"dow": "tue,SUN",
"hour": "*/1",
}
}) == "../(01|03)/(2|7)/.."
def test__get_smartd_schedule__0_is_sunday():
assert get_smartd_schedule({
"smarttest_schedule": {
"month": "*",
"dom": "*",
"dow": "0",
"hour": "0",
}
}) == "../../(7)/(00)"
def test__get_smartd_config():
assert get_smartd_config({
"smartctl_args": ["/dev/ada0", "-d", "sat"],
"smart_powermode": "never",
"smart_difference": 0,
"smart_informational": 1,
"smart_critical": 2,
"smarttest_type": "S",
"smarttest_schedule": {
"month": "*/1",
"dom": "*/1",
"dow": "*/1",
"hour": "*/1",
},
"disk_critical": None,
"disk_difference": None,
"disk_informational": None,
}) == textwrap.dedent("""\
/dev/ada0 -d sat -n never -W 0,1,2 -m root -M exec /usr/local/libexec/smart_alert.py\\
-s S/../.././..\\
""")
def test__get_smartd_config_without_schedule():
assert get_smartd_config({
"smartctl_args": ["/dev/ada0", "-d", "sat"],
"smart_powermode": "never",
"smart_difference": 0,
"smart_informational": 1,
"smart_critical": 2,
"disk_critical": None,
"disk_difference": None,
"disk_informational": None,
}) == textwrap.dedent("""\
/dev/ada0 -d sat -n never -W 0,1,2 -m root -M exec /usr/local/libexec/smart_alert.py""")
def test__get_smartd_config_with_temp():
assert get_smartd_config({
"smartctl_args": ["/dev/ada0", "-d", "sat"],
"smart_powermode": "never",
"smart_difference": 0,
"smart_informational": 1,
"smart_critical": 2,
"disk_critical": 50,
"disk_difference": 10,
"disk_informational": 40,
}) == textwrap.dedent("""\
/dev/ada0 -d sat -n never -W 10,40,50 -m root -M exec /usr/local/libexec/smart_alert.py""")
| 4,551 | Python | .py | 106 | 35.141509 | 113 | 0.590621 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,348 | test_quota.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/alert/source/test_quota.py | from unittest.mock import Mock
import pytest
from middlewared.alert.base import Alert
from middlewared.alert.source.quota import QuotaCriticalAlertClass, QuotaAlertSource
@pytest.mark.parametrize("dataset_query,alerts", [
# 10 MB quota, 10 MB reserved, a few kilobytes used
(
[
{
"name": {"rawvalue": "Volume_1/Hard_Drives/Bill_HDD"},
"used": {"rawvalue": "10485760"},
"quota": {"rawvalue": "10485760"},
"available": {"rawvalue": "10395648"},
},
],
[]
),
# Refquota
(
[
{
"name": {"rawvalue": "Volume_1/Hard_Drives/Bill_HDD"},
"usedbydataset": {"rawvalue": "10000000"},
"refquota": {"rawvalue": "10485760"},
},
],
[
Alert(
QuotaCriticalAlertClass,
args={
"name": "Refquota",
"dataset": "Volume_1/Hard_Drives/Bill_HDD",
"used_fraction": 95.367431640625,
"used": "9.54 MiB",
"quota_value": "10 MiB",
},
key=["Volume_1/Hard_Drives/Bill_HDD", "refquota"],
mail=None,
)
]
)
])
def test__quota_alert_source(dataset_query, alerts):
middleware = Mock()
middleware.call_sync.return_value = dataset_query
qas = QuotaAlertSource(middleware)
qas._get_owner = Mock(return_value=0)
assert qas.check_sync() == alerts
| 1,572 | Python | .py | 48 | 21.958333 | 84 | 0.507246 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,349 | test_jbof.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/alert/source/test_jbof.py | from unittest.mock import Mock
import pytest
import copy
from middlewared.alert.base import Alert
from middlewared.alert.source.jbof import JBOFInvalidDataAlertClass, JBOFRedfishCommAlertClass, JBOFAlertSource, JBOFElementWarningAlertClass, JBOFElementCriticalAlertClass
from middlewared.pytest.unit.middleware import Middleware
uuid1 = '244c0e5f-bf7b-4a68-bd40-80e3f1a5b4ed'
desc1 = 'ES24N - 1234'
ip1 = '1.2.3.4'
ip2 = '1.2.3.5'
jbof_query_one = [
{
'id': 1,
'description': desc1,
'index': 0,
'uuid': uuid1,
'mgmt_ip1': ip1,
'mgmt_ip2': ip2,
'mgmt_username': 'Admin',
'mgmt_password': 'SomePassword'
}
]
jbof1_id_dict = {'desc': desc1, 'ip1': ip1, 'ip2': ip2}
jbof_data_one = [
{'bsg': None,
'controller': False,
'dmi': uuid1,
'elements': {'Array Device Slot': {'1': {'descriptor': 'Disk #1',
'dev': 'nvme25n1',
'original': {'descriptor': 'slot1',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 1},
'status': 'OK',
'value': None,
'value_raw': 16777216},
'10': {'descriptor': 'Disk #10',
'dev': None,
'original': {'descriptor': 'slot10',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 10},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'11': {'descriptor': 'Disk #11',
'dev': None,
'original': {'descriptor': 'slot11',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 11},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'12': {'descriptor': 'Disk #12',
'dev': None,
'original': {'descriptor': 'slot12',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 12},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'13': {'descriptor': 'Disk #13',
'dev': None,
'original': {'descriptor': 'slot13',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 13},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'14': {'descriptor': 'Disk #14',
'dev': None,
'original': {'descriptor': 'slot14',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 14},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'15': {'descriptor': 'Disk #15',
'dev': None,
'original': {'descriptor': 'slot15',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 15},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'16': {'descriptor': 'Disk #16',
'dev': None,
'original': {'descriptor': 'slot16',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 16},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'17': {'descriptor': 'Disk #17',
'dev': None,
'original': {'descriptor': 'slot17',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 17},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'18': {'descriptor': 'Disk #18',
'dev': None,
'original': {'descriptor': 'slot18',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 18},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'19': {'descriptor': 'Disk #19',
'dev': None,
'original': {'descriptor': 'slot19',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 19},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'2': {'descriptor': 'Disk #2',
'dev': 'nvme27n1',
'original': {'descriptor': 'slot2',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 2},
'status': 'OK',
'value': None,
'value_raw': 16777216},
'20': {'descriptor': 'Disk #20',
'dev': None,
'original': {'descriptor': 'slot20',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 20},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'21': {'descriptor': 'Disk #21',
'dev': None,
'original': {'descriptor': 'slot21',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 21},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'22': {'descriptor': 'Disk #22',
'dev': None,
'original': {'descriptor': 'slot22',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 22},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'23': {'descriptor': 'Disk #23',
'dev': None,
'original': {'descriptor': 'slot23',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 23},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'24': {'descriptor': 'Disk #24',
'dev': None,
'original': {'descriptor': 'slot24',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 24},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'3': {'descriptor': 'Disk #3',
'dev': 'nvme26n1',
'original': {'descriptor': 'slot3',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 3},
'status': 'OK',
'value': None,
'value_raw': 16777216},
'4': {'descriptor': 'Disk #4',
'dev': None,
'original': {'descriptor': 'slot4',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 4},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'5': {'descriptor': 'Disk #5',
'dev': None,
'original': {'descriptor': 'slot5',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 5},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'6': {'descriptor': 'Disk #6',
'dev': None,
'original': {'descriptor': 'slot6',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 6},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'7': {'descriptor': 'Disk #7',
'dev': None,
'original': {'descriptor': 'slot7',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 7},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'8': {'descriptor': 'Disk #8',
'dev': None,
'original': {'descriptor': 'slot8',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 8},
'status': 'Not installed',
'value': None,
'value_raw': 83886080},
'9': {'descriptor': 'Disk #9',
'dev': None,
'original': {'descriptor': 'slot9',
'enclosure_bsg': None,
'enclosure_id': uuid1,
'enclosure_sg': None,
'slot': 9},
'status': 'Not installed',
'value': None,
'value_raw': 83886080}},
'Cooling': {'Fan1': {'descriptor': 'Fan1',
'status': 'OK',
'value': 'SpeedRPM=21760.0',
'value_raw': None},
'Fan2': {'descriptor': 'Fan2',
'status': 'OK',
'value': 'SpeedRPM=21760.0',
'value_raw': None},
'Fan3': {'descriptor': 'Fan3',
'status': 'OK',
'value': 'SpeedRPM=21760.0',
'value_raw': None},
'Fan4': {'descriptor': 'Fan4',
'status': 'OK',
'value': 'SpeedRPM=21792.0',
'value_raw': None},
'Fan5': {'descriptor': 'Fan5',
'status': 'OK',
'value': None,
'value_raw': None},
'Fan6': {'descriptor': 'Fan6',
'status': 'OK',
'value': 'SpeedRPM=21792.0',
'value_raw': None}},
'Power Supply': {'PSU1': {'descriptor': 'PSU1,YSEF1600EM-2A01P10,S0A00A3032029000366,A00,3Y POWER,1600W',
'status': 'OK',
'value': 'Normal',
'value_raw': None},
'PSU2': {'descriptor': 'PSU2,YSEF1600EM-2A01P10,S0A00A3032029000366,A00,3Y POWER,1600W',
'status': 'OK',
'value': 'Normal',
'value_raw': None}},
'Temperature Sensors': {'TempDrive1': {'descriptor': 'Temperature Sensor Drive 1',
'status': 'OK',
'value': '18.0 C',
'value_raw': None},
'TempDrive10': {'descriptor': 'Temperature Sensor Drive 10',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive11': {'descriptor': 'Temperature Sensor Drive 11',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive12': {'descriptor': 'Temperature Sensor Drive 12',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive13': {'descriptor': 'Temperature Sensor Drive 13',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive14': {'descriptor': 'Temperature Sensor Drive 14',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive15': {'descriptor': 'Temperature Sensor Drive 15',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive16': {'descriptor': 'Temperature Sensor Drive 16',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive17': {'descriptor': 'Temperature Sensor Drive 17',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive18': {'descriptor': 'Temperature Sensor Drive 18',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive19': {'descriptor': 'Temperature Sensor Drive 19',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive2': {'descriptor': 'Temperature Sensor Drive 2',
'status': 'OK',
'value': '18.0 C',
'value_raw': None},
'TempDrive20': {'descriptor': 'Temperature Sensor Drive 20',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive21': {'descriptor': 'Temperature Sensor Drive 21',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive22': {'descriptor': 'Temperature Sensor Drive 22',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive23': {'descriptor': 'Temperature Sensor Drive 23',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive24': {'descriptor': 'Temperature Sensor Drive 24',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive3': {'descriptor': 'Temperature Sensor Drive 3',
'status': 'OK',
'value': '17.0 C',
'value_raw': None},
'TempDrive4': {'descriptor': 'Temperature Sensor Drive 4',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive5': {'descriptor': 'Temperature Sensor Drive 5',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive6': {'descriptor': 'Temperature Sensor Drive 6',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive7': {'descriptor': 'Temperature Sensor Drive 7',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive8': {'descriptor': 'Temperature Sensor Drive 8',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempDrive9': {'descriptor': 'Temperature Sensor Drive 9',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempPSU1Temp1': {'descriptor': 'TempPSU1Temp1',
'status': 'Not installed',
'value': None,
'value_raw': None},
'TempPSU2Temp1': {'descriptor': 'TempPSU2Temp1',
'status': 'OK',
'value': '22.0 C',
'value_raw': None},
'TempSensMidplane1': {'descriptor': 'Midplane Temp1',
'status': 'OK',
'value': '19.0 C',
'value_raw': None},
'TempSensMidplane2': {'descriptor': 'Midplane Temp2',
'status': 'OK',
'value': '20.0 C',
'value_raw': None}},
'Voltage Sensor': {'VoltPS1Vin': {'descriptor': 'VoltPS1Vin',
'status': 'Not installed',
'value': None,
'value_raw': None},
'VoltPS2Vin': {'descriptor': 'VoltPS2Vin',
'status': 'OK',
'value': '204.0',
'value_raw': None}}},
'front_slots': 24,
'id': uuid1,
'internal_slots': 0,
'model': 'ES24N',
'name': 'ES24N JBoF Enclosure',
'rackmount': True,
'rear_slots': 0,
'sg': None,
'should_ignore': False,
'status': ['OK'],
'top_loaded': False}]
@pytest.mark.asyncio
async def test__jbof_redfish_comm_alert():
m = Middleware()
m['jbof.query'] = Mock(return_value=jbof_query_one)
m['enclosure2.map_jbof'] = Mock(return_value=[])
jas = JBOFAlertSource(m)
alerts = await jas.check()
assert len(alerts) == 1, alerts
alert = alerts[0]
assert alert == Alert(JBOFRedfishCommAlertClass, args=jbof1_id_dict)
assert alert.formatted == f'JBOF: "{desc1}" ({ip1}/{ip2}) Failed to communicate with redfish interface.'
@pytest.mark.asyncio
async def test__jbof_no_alert():
m = Middleware()
m['jbof.query'] = Mock(return_value=jbof_query_one)
m['enclosure2.map_jbof'] = Mock(return_value=jbof_data_one)
jas = JBOFAlertSource(m)
alerts = await jas.check()
assert len(alerts) == 0, alerts
@pytest.mark.asyncio
async def test__jbof_invalid_data():
data = copy.deepcopy(jbof_data_one)
del data[0]['elements']
m = Middleware()
m['jbof.query'] = Mock(return_value=jbof_query_one)
m['enclosure2.map_jbof'] = Mock(return_value=data)
jas = JBOFAlertSource(m)
alerts = await jas.check()
assert len(alerts) == 1, alerts
alert = alerts[0]
assert alert.klass == JBOFInvalidDataAlertClass
assert alert.formatted == f'JBOF: "{desc1}" ({ip1}/{ip2}) does not provide valid data for: elements'
@pytest.mark.asyncio
async def test__jbof_psu_critical():
data = copy.deepcopy(jbof_data_one)
data[0]['elements']['Power Supply']['PSU1'] = {
'descriptor': 'PSU1,,,,',
'status': 'Critical',
'value': 'LossOfInput',
'value_raw': None
}
m = Middleware()
m['jbof.query'] = Mock(return_value=jbof_query_one)
m['enclosure2.map_jbof'] = Mock(return_value=data)
jas = JBOFAlertSource(m)
alerts = await jas.check()
assert len(alerts) == 1, alerts
alert = alerts[0]
assert alert.klass == JBOFElementCriticalAlertClass
assert alert.formatted == f'JBOF: "{desc1}" ({ip1}/{ip2}) Power Supply PSU1 is critical: LossOfInput'
@pytest.mark.asyncio
async def test__jbof_fan_noncritical():
data = copy.deepcopy(jbof_data_one)
data[0]['elements']['Cooling']['Fan6'] = {
'descriptor': 'Fan6',
'status': 'Noncritical',
'value': 'SpeedRPM=12345.0',
'value_raw': None
}
m = Middleware()
m['jbof.query'] = Mock(return_value=jbof_query_one)
m['enclosure2.map_jbof'] = Mock(return_value=data)
jas = JBOFAlertSource(m)
alerts = await jas.check()
assert len(alerts) == 1, alerts
alert = alerts[0]
assert alert.klass == JBOFElementWarningAlertClass
assert alert.formatted == f'JBOF: "{desc1}" ({ip1}/{ip2}) Cooling Fan6 is noncritical: SpeedRPM=12345.0'
@pytest.mark.asyncio
async def test__jbof_temp_sensor_critical():
data = copy.deepcopy(jbof_data_one)
data[0]['elements']['Temperature Sensors']['TempDrive1'] = {
'descriptor': 'Temperature Sensor Drive 1',
'status': 'Critical',
'value': '50.0 C',
'value_raw': None
}
m = Middleware()
m['jbof.query'] = Mock(return_value=jbof_query_one)
m['enclosure2.map_jbof'] = Mock(return_value=data)
jas = JBOFAlertSource(m)
alerts = await jas.check()
assert len(alerts) == 1, alerts
alert = alerts[0]
assert alert.klass == JBOFElementCriticalAlertClass
assert alert.formatted == f'JBOF: "{desc1}" ({ip1}/{ip2}) Temperature Sensors TempDrive1 is critical: 50.0 C'
@pytest.mark.asyncio
async def test__jbof_volt_sensor_critical():
data = copy.deepcopy(jbof_data_one)
data[0]['elements']['Voltage Sensor']['VoltPS1Vin'] = {
'descriptor': 'VoltPS1Vin',
'status': 'Critical',
'value': '100',
'value_raw': None
}
m = Middleware()
m['jbof.query'] = Mock(return_value=jbof_query_one)
m['enclosure2.map_jbof'] = Mock(return_value=data)
jas = JBOFAlertSource(m)
alerts = await jas.check()
assert len(alerts) == 1, alerts
alert = alerts[0]
assert alert.klass == JBOFElementCriticalAlertClass
assert alert.formatted == f'JBOF: "{desc1}" ({ip1}/{ip2}) Voltage Sensor VoltPS1Vin is critical: 100'
| 35,067 | Python | .py | 534 | 26.404494 | 172 | 0.281877 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,350 | test_ipmi_sel.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/alert/source/test_ipmi_sel.py | from middlewared.alert.source.ipmi_sel import remove_deasserted_records
import pytest
@pytest.mark.parametrize("records,result", [
(
[
{
"name": "PS2 Status",
"event_direction": "Assertion Event",
"event": "Power Supply Failure detected"
},
{
"name": "PS2 Status",
"event_direction": "Deassertion Event",
"event": "Power Supply Failure detected"
},
{
"name": "Sensor #255",
"event_direction": "Assertion Event",
"event": "Event Offset = 00h"
},
],
[2],
)
])
def test_remove_deasserted_records(records, result):
assert remove_deasserted_records(records) == [records[i] for i in result]
| 840 | Python | .py | 26 | 21.5 | 77 | 0.510481 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,351 | test_limits.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/utils/test_limits.py | import pytest
import json
from aiohttp.http_websocket import WSCloseCode
from middlewared.utils import limits
def test__limit_unauthenticated_excetion():
data = 'x' * (limits.MsgSizeLimit.UNAUTHENTICATED + 1)
with pytest.raises(limits.MsgSizeError) as err:
limits.parse_message(False, data)
assert err.value.limit is limits.MsgSizeLimit.UNAUTHENTICATED
assert err.value.datalen == len(data)
assert err.value.ws_close_code is WSCloseCode.INVALID_TEXT
assert err.value.ws_errmsg == 'Anonymous connection max message length is 8 kB'
def test__limit_authenticated_basic_exception():
data = json.dumps({'msg': 'method', 'method': 'canary', 'params': ['x' * (limits.MsgSizeLimit.AUTHENTICATED + 1)]})
with pytest.raises(limits.MsgSizeError) as err:
limits.parse_message(True, data)
assert err.value.limit is limits.MsgSizeLimit.AUTHENTICATED
assert err.value.datalen == len(data)
assert err.value.method_name == 'canary'
assert err.value.ws_close_code is WSCloseCode.MESSAGE_TOO_BIG
assert err.value.ws_errmsg == 'Max message length is 64 kB'
def test__limit_authenticated_extended_exception():
data = json.dumps({'msg': 'method', 'method': 'canary', 'params': ['x' * (limits.MsgSizeLimit.EXTENDED + 1)]})
with pytest.raises(limits.MsgSizeError) as err:
limits.parse_message(True, data)
assert err.value.limit is limits.MsgSizeLimit.EXTENDED
assert err.value.datalen == len(data)
assert err.value.ws_close_code is WSCloseCode.MESSAGE_TOO_BIG
assert err.value.ws_errmsg == 'Max message length is 64 kB'
def test__limit_unauthenticated_parse():
data = {'msg': 'method', 'method': 'canary', 'params': ['x' * 1000]}
parsed = limits.parse_message(False, json.dumps(data))
assert parsed == data
def test__limit_authenticated_parse():
data = {'msg': 'method', 'method': 'canary', 'params': ['x' * 1000]}
parsed = limits.parse_message(True, json.dumps(data))
assert parsed == data
| 2,005 | Python | .py | 37 | 49.459459 | 119 | 0.718526 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,352 | test_security_descriptor.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/utils/test_security_descriptor.py | import pytest
from middlewared.utils import security_descriptor
SAMPLE_DOM_SID = 'S-1-5-21-3510196835-1033636670-2319939847-200108'
SAMPLE_BUILTIN_SID = 'S-1-5-32-544'
@pytest.mark.parametrize('theacl', [
[
{'ae_who_sid': SAMPLE_BUILTIN_SID, 'ae_perm': 'FULL', 'ae_type': 'ALLOWED'},
{'ae_who_sid': SAMPLE_BUILTIN_SID, 'ae_perm': 'CHANGE', 'ae_type': 'ALLOWED'},
{'ae_who_sid': SAMPLE_BUILTIN_SID, 'ae_perm': 'READ', 'ae_type': 'ALLOWED'},
{'ae_who_sid': SAMPLE_DOM_SID, 'ae_perm': 'FULL', 'ae_type': 'ALLOWED'},
{'ae_who_sid': SAMPLE_DOM_SID, 'ae_perm': 'CHANGE', 'ae_type': 'ALLOWED'},
{'ae_who_sid': SAMPLE_DOM_SID, 'ae_perm': 'READ', 'ae_type': 'ALLOWED'},
],
[
{'ae_who_sid': SAMPLE_BUILTIN_SID, 'ae_perm': 'FULL', 'ae_type': 'DENIED'},
{'ae_who_sid': SAMPLE_BUILTIN_SID, 'ae_perm': 'CHANGE', 'ae_type': 'DENIED'},
{'ae_who_sid': SAMPLE_BUILTIN_SID, 'ae_perm': 'READ', 'ae_type': 'DENIED'},
{'ae_who_sid': SAMPLE_DOM_SID, 'ae_perm': 'FULL', 'ae_type': 'DENIED'},
{'ae_who_sid': SAMPLE_DOM_SID, 'ae_perm': 'CHANGE', 'ae_type': 'DENIED'},
{'ae_who_sid': SAMPLE_DOM_SID, 'ae_perm': 'READ', 'ae_type': 'DENIED'},
],
])
def test__convert_share_acl(theacl):
""" test that converting a share ACL to packed security descriptor and back yields same result """
sd_bytes = security_descriptor.share_acl_to_sd_bytes(theacl)
assert security_descriptor.sd_bytes_to_share_acl(sd_bytes) == theacl
| 1,515 | Python | .py | 26 | 52.307692 | 102 | 0.61186 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,353 | test_filesystem_misc.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/utils/test_filesystem_misc.py | import pytest
from middlewared.utils.filesystem import acl
from middlewared.utils.filesystem import attrs
@pytest.mark.parametrize('xattr_list,expected', [
([acl.ACLXattr.POSIX_ACCESS.value], True),
([acl.ACLXattr.POSIX_DEFAULT.value], True),
([acl.ACLXattr.ZFS_NATIVE.value], True),
([], False),
])
def test__acl_is_present(xattr_list, expected):
assert acl.acl_is_present(xattr_list) is expected
def test__zfs_attrs_enum():
for attr in attrs.SUPPORTED_ATTRS:
assert attr in attrs.ZFSAttr
@pytest.mark.parametrize('attr', attrs.SUPPORTED_ATTRS)
def test__zfs_attr_mask_conversion(attr):
# Check that it gets converted to list properly:
assert attrs.zfs_attributes_dump(attr) == [attr.name.upper()]
attr_dict = {a.name.lower(): False for a in attrs.SUPPORTED_ATTRS}
assert attrs.zfs_attributes_to_dict(attr) == attr_dict | {attr.name.lower(): True}
@pytest.mark.parametrize('attr', attrs.SUPPORTED_ATTRS)
def test__dict_to_attr_mask_conversion_single(attr):
payload = {attr.name.lower(): True}
assert attrs.dict_to_zfs_attributes_mask(payload) == attr
def test__dict_to_attr_mask_conversion_multi():
payload = {attr.name.lower(): True for attr in attrs.SUPPORTED_ATTRS}
assert attrs.dict_to_zfs_attributes_mask(payload) == attrs.SUPPORTED_ATTRS
@pytest.mark.parametrize('attr', attrs.SUPPORTED_ATTRS)
def test__list_to_attr_mask_conversion_single(attr):
payload = [attr.name]
assert attrs.zfs_attributes_to_mask(payload) == attr
def test__list_to_attr_mask_conversion_multi():
payload = [attr.name for attr in attrs.SUPPORTED_ATTRS]
assert attrs.zfs_attributes_to_mask(payload) == attrs.SUPPORTED_ATTRS
| 1,699 | Python | .py | 34 | 46.147059 | 86 | 0.738629 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,354 | test_audit.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/utils/test_audit.py | import pytest
from middlewared.auth import (
UserSessionManagerCredentials,
TrueNasNodeSessionManagerCredentials
)
from middlewared.utils.audit import audit_username_from_session
from middlewared.utils.auth import AA_LEVEL1
from types import SimpleNamespace
USER_SESSION = UserSessionManagerCredentials({'username': 'bob', 'privilege': {'allowlist': []}}, AA_LEVEL1)
TOKEN_USER_SESSION = SimpleNamespace(root_credentials=USER_SESSION, is_user_session=True, user=USER_SESSION.user)
NODE_SESSION = TrueNasNodeSessionManagerCredentials()
@pytest.mark.parametrize('cred,expected', [
(None, '.UNAUTHENTICATED'),
(USER_SESSION, 'bob'),
(TOKEN_USER_SESSION, 'bob'),
(NODE_SESSION, '.TRUENAS_NODE')
])
def test_privilege_has_webui_access(cred, expected):
assert audit_username_from_session(cred) == expected
| 834 | Python | .py | 19 | 41.105263 | 113 | 0.782447 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,355 | test_cpu_util_generic.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/utils/test_cpu_util_generic.py | # -*- coding=utf-8 -*-
from unittest.mock import Mock
import pytest
from middlewared.utils.cpu import generic_cpu_temperatures
@pytest.mark.parametrize("reading,result", [
(
{
"coretemp-isa-0001": {
"coretemp-isa-0001_temp1": {
"name": "Package id 1",
"value": 45
},
"coretemp-isa-0001_temp2": {
"name": "Core 0",
"value": 55.0
},
"coretemp-isa-0001_temp3": {
"name": "Core 1",
"value": 54.0
}},
"coretemp-isa-0000": {
"coretemp-isa-0000_temp1": {
"name": "Package id 0",
"value": 36
},
"coretemp-isa-0000_temp2": {
"name": "Core 0",
"value": 48.0
},
"coretemp-isa-0000_temp3": {
"name": "Core 1",
"value": 49.0
}
}},
{
0: 48.0,
1: 49.0,
2: 55.0,
3: 54.0,
}
)
])
def test_generic_cpu_temperatures(reading, result):
assert generic_cpu_temperatures(reading) == result
| 1,322 | Python | .py | 44 | 16.409091 | 58 | 0.386185 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,356 | test_disk_names.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/utils/test_disk_names.py | import unittest.mock
import pytest
from middlewared.utils.disks import get_disk_names, VALID_WHOLE_DISK
@pytest.mark.parametrize('to_test, should_work', [
('sda', True),
('sdab', True),
('sdz', True),
('vdv', True),
('vds', True),
('nvme0n0', True),
('nvme2n4', True),
('vda1', False),
('vdA', False),
('sd2', False),
('sda2', False),
('sda3', False),
('vda3', False),
])
def test_regex(to_test, should_work):
if should_work:
assert bool(VALID_WHOLE_DISK.match(to_test)) is True
else:
assert bool(VALID_WHOLE_DISK.match(to_test)) is False
@unittest.mock.patch('os.scandir')
def test_get_disk_names(scandir):
mock_devices = []
for name in ['vda', 'vdb', 'sda', 'sdd', 'nvme0n1', 'sdd1', 'sda2', 'vdb2']:
device = unittest.mock.Mock(is_file=lambda: True)
device.name = name # Set the name attribute directly
mock_devices.append(device)
scandir.return_value.__enter__.return_value = mock_devices
assert get_disk_names() is not None
assert get_disk_names() == ['vda', 'vdb', 'sda', 'sdd', 'nvme0n1']
| 1,124 | Python | .py | 33 | 29.212121 | 80 | 0.622694 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,357 | test_mountinfo.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/utils/test_mountinfo.py | import pytest
from middlewared.utils.mount import __parse_to_dev, __parse_to_mnt_id, __create_tree
fake_mntinfo = r"""21 26 0:19 / /sys rw,nosuid,nodev,noexec,relatime shared:7 - sysfs sysfs rw
22 26 0:20 / /proc rw,nosuid,nodev,noexec,relatime shared:12 - proc proc rw
23 26 0:5 / /dev rw,nosuid,relatime shared:2 - devtmpfs udev rw,size=1841320k,nr_inodes=460330,mode=755,inode64
24 23 0:21 / /dev/pts rw,nosuid,noexec,relatime shared:3 - devpts devpts rw,gid=5,mode=620,ptmxmode=000
25 26 0:22 / /run rw,nosuid,nodev,noexec,relatime shared:5 - tmpfs tmpfs rw,size=402344k,mode=755,inode64
26 1 0:23 / / rw,relatime shared:1 - zfs boot-pool/ROOT/22.12-MASTER-20220616-071633 rw,xattr,noacl
27 21 0:6 / /sys/kernel/security rw,nosuid,nodev,noexec,relatime shared:8 - securityfs securityfs rw
28 23 0:24 / /dev/shm rw,nosuid,nodev shared:4 - tmpfs tmpfs rw,inode64
29 25 0:25 / /run/lock rw,nosuid,nodev,noexec,relatime shared:6 - tmpfs tmpfs rw,size=5120k,inode64
30 21 0:26 / /sys/fs/cgroup rw,nosuid,nodev,noexec,relatime shared:9 - cgroup2 cgroup2 rw,nsdelegate,memory_recursiveprot
31 21 0:27 / /sys/fs/pstore rw,nosuid,nodev,noexec,relatime shared:10 - pstore pstore rw
32 21 0:28 / /sys/fs/bpf rw,nosuid,nodev,noexec,relatime shared:11 - bpf bpf rw,mode=700
33 22 0:29 / /proc/sys/fs/binfmt_misc rw,relatime shared:13 - autofs systemd-1 rw,fd=30,pgrp=1,timeout=0,minproto=5,maxproto=5,direct,pipe_ino=12795
34 23 0:30 / /dev/hugepages rw,relatime shared:14 - hugetlbfs hugetlbfs rw,pagesize=2M
35 23 0:18 / /dev/mqueue rw,nosuid,nodev,noexec,relatime shared:15 - mqueue mqueue rw
36 21 0:7 / /sys/kernel/debug rw,nosuid,nodev,noexec,relatime shared:16 - debugfs debugfs rw
37 21 0:12 / /sys/kernel/tracing rw,nosuid,nodev,noexec,relatime shared:17 - tracefs tracefs rw
38 26 0:31 / /tmp rw,nosuid,nodev shared:18 - tmpfs tmpfs rw,inode64
39 25 0:32 / /run/rpc_pipefs rw,relatime shared:19 - rpc_pipefs sunrpc rw
40 22 0:33 / /proc/fs/nfsd rw,relatime shared:20 - nfsd nfsd rw
41 21 0:34 / /sys/fs/fuse/connections rw,nosuid,nodev,noexec,relatime shared:21 - fusectl fusectl rw
42 21 0:35 / /sys/kernel/config rw,nosuid,nodev,noexec,relatime shared:22 - configfs configfs rw
279 33 0:49 / /proc/sys/fs/binfmt_misc rw,nosuid,nodev,noexec,relatime shared:154 - binfmt_misc binfmt_misc rw
285 26 0:50 / /boot/grub rw,relatime shared:157 - zfs boot-pool/grub rw,xattr,noacl
292 26 0:51 / /mnt/dozer rw,noatime shared:161 - zfs dozer rw,xattr,posixacl
355 292 0:62 / /mnt/dozer/posixacltest rw,noatime shared:197 - zfs dozer/posixacltest rw,xattr,posixacl
397 355 0:66 / /mnt/dozer/posixacltest/foo rw,noatime shared:221 - zfs dozer/posixacltest/foo rw,xattr,posixacl
334 292 0:59 / /mnt/dozer/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa rw,noatime shared:185 - zfs dozer/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa rw,xattr,posixacl
383 334 0:65 / /mnt/dozer/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb rw,noatime shared:213 - zfs dozer/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb rw,xattr,posixacl
418 383 0:69 / /mnt/dozer/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/cccccccccccccccccccccccccccccccccccccccccccccc rw,noatime shared:233 - zfs dozer/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/cccccccccccccccccccccccccccccccccccccccccccccc rw,xattr,posixacl
439 334 0:72 / /mnt/dozer/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/fdd rw,noatime shared:245 - zfs dozer/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/fdd rw,xattr,posixacl
313 292 0:54 / /mnt/dozer/RO ro,nosuid,noexec,noatime shared:173 - zfs dozer/RO ro,xattr,posixacl
320 292 0:57 / /mnt/dozer/TESTSMB rw,noatime shared:177 - zfs dozer/TESTSMB rw,xattr,nfs4acl
299 292 0:52 / /mnt/dozer/NFS4 rw shared:165 - zfs dozer/NFS4 rw,xattr,nfs4acl
411 299 0:67 / /mnt/dozer/NFS4/stuff rw shared:229 - zfs dozer/NFS4/stuff rw,xattr,nfs4acl
390 292 0:64 / /mnt/dozer/test_homes rw,noatime shared:217 - zfs dozer/test_homes rw,xattr,nfs4acl
341 292 0:55 / /mnt/dozer/SMB rw,noatime shared:189 - zfs dozer/SMB rw,xattr,nfs4acl
425 341 0:70 / /mnt/dozer/SMB/SUBDATASET rw,noatime shared:237 - zfs dozer/SMB/SUBDATASET rw,xattr,nfs4acl
348 292 0:58 / /mnt/dozer/TESTNFS rw,noatime shared:193 - zfs dozer/TESTNFS rw,xattr,posixacl
376 292 0:63 / /mnt/dozer/smb-vss rw,noatime shared:209 - zfs dozer/smb-vss rw,xattr,nfs4acl
432 376 0:71 / /mnt/dozer/smb-vss/sub1 rw,noatime shared:241 - zfs dozer/smb-vss/sub1 rw,xattr,nfs4acl
327 292 0:56 / /mnt/dozer/TESTFUN rw,noatime shared:181 - zfs dozer/TESTFUN rw,xattr,noacl
369 292 0:61 / /mnt/dozer/administrative_share rw,noatime shared:205 - zfs dozer/administrative_share rw,xattr,posixacl
404 369 0:68 / /mnt/dozer/administrative_share/backups_dataset rw,noatime shared:225 - zfs dozer/administrative_share/backups_dataset rw,xattr,posixacl
446 404 0:73 / /mnt/dozer/administrative_share/backups_dataset/userdata rw,noatime shared:249 - zfs dozer/administrative_share/backups_dataset/userdata rw,xattr,posixacl
453 446 0:74 / /mnt/dozer/administrative_share/backups_dataset/userdata/DOMAIN_GOAT rw,noatime shared:253 - zfs dozer/administrative_share/backups_dataset/userdata/DOMAIN_GOAT rw,xattr,posixacl
460 453 0:75 / /mnt/dozer/administrative_share/backups_dataset/userdata/DOMAIN_GOAT/bob rw,noatime shared:257 - zfs dozer/administrative_share/backups_dataset/userdata/DOMAIN_GOAT/bob rw,xattr,posixacl
306 292 0:53 / /mnt/dozer/EXPORT rw,noatime shared:169 - zfs dozer/EXPORT rw,xattr,posixacl
362 292 0:60 / /mnt/dozer/noacl rw,noatime shared:201 - zfs dozer/noacl rw,xattr,noacl
93 26 0:38 / /var/db/system rw,relatime shared:48 - zfs dozer/.system rw,xattr,noacl
100 93 0:39 / /var/db/system/cores rw,relatime shared:52 - zfs dozer/.system/cores rw,xattr,noacl
107 93 0:40 / /var/db/system/samba4 rw,relatime shared:56 - zfs dozer/.system/samba4 rw,xattr,noacl
121 93 0:42 / /var/db/system/rrd-f803551cf3cd4df8a1ddb0569466b72a rw,relatime shared:64 - zfs dozer/.system/rrd-f803551cf3cd4df8a1ddb0569466b72a rw,xattr,noacl
151 93 0:43 / /var/db/system/configs-f803551cf3cd4df8a1ddb0569466b72a rw,relatime shared:68 - zfs dozer/.system/configs-f803551cf3cd4df8a1ddb0569466b72a rw,xattr,noacl
158 93 0:44 / /var/db/system/webui rw,relatime shared:100 - zfs dozer/.system/webui rw,xattr,noacl
187 93 0:45 / /var/db/system/services rw,relatime shared:104 - zfs dozer/.system/services rw,xattr,noacl
234 93 0:46 / /var/db/system/glusterd rw,relatime shared:143 - zfs dozer/.system/glusterd rw,xattr,noacl
241 93 0:47 / /var/db/system/ctdb_shared_vol rw,relatime shared:147 - zfs dozer/.system/ctdb_shared_vol rw,xattr,noacl
271 26 0:39 / /var/lib/systemd/coredump rw,relatime shared:52 - zfs dozer/.system/cores rw,xattr,noacl
467 26 0:76 / /mnt/tank\040space\040 rw,noatime shared:261 - zfs tank\040space\040 rw,xattr,posixacl
474 467 0:77 / /mnt/tank\040space\040/Dataset\040With\040a\040space rw,noatime shared:265 - zfs tank\040space\040/Dataset\040With\040a\040space rw,xattr,posixacl
572 26 0:1005 / /mnt/zz rw,noatime shared:4257 - zfs zz rw,xattr,posixacl,casesensitive
7460 572 0:1005 / /mnt/zz/ds920 rw,noatime shared:4257 - zfs zz/ds920 rw,xattr,posixacl,casesensitive
8069 572 0:1214 / /mnt/zz/mixy rw,noatime shared:4507 - zfs zz/mixy rw,xattr,posixacl,casemixed
537 327 0:75 / /mnt/dozer/TESTFUN/mp29-nfs0016K ro shared:301 - zfs dozer/TESTFUN/mp29-nfs0016K ro,xattr,posixacl,casesensitive
"""
def test__mntinfo_spaces():
line = r'474 467 0:77 / /mnt/tank\040space\040/Dataset\040With\040a\040space rw,noatime shared:265 - zfs tank\040space\040/Dataset\040With\040a\040space rw,xattr,posixacl'
data = {}
__parse_to_dev(line, data)
assert 77 in data
mntent = data[77]
assert mntent['mount_id'] == 474
assert mntent['parent_id'] == 467
assert mntent['device_id'] == {'major': 0, 'minor': 77, 'dev_t': 77}
assert mntent['root'] == '/'
assert mntent['mountpoint'] == '/mnt/tank space /Dataset With a space'
assert mntent['mount_opts'] == ['RW', 'NOATIME']
assert mntent['fs_type'] == 'zfs'
assert mntent['mount_source'] == 'tank space /Dataset With a space'
assert mntent['super_opts'] == ['RW', 'XATTR', 'POSIXACL']
def test__getmntinfo():
def __rebuild_device_info(e):
return f'{e["mount_id"]} {e["parent_id"]} {e["device_id"]["major"]}:{e["device_id"]["minor"]} {e["root"]}'
def __rebuild_opts(e):
mnt_opts = ','.join([x.lower() for x in e['mount_opts']])
sb_opts = ','.join([x.lower() for x in e['super_opts']])
return mnt_opts, sb_opts
for line in fake_mntinfo.splitlines():
data = {}
__parse_to_dev(line, data)
mnt_data = list(data.values())[0]
assert __rebuild_device_info(mnt_data) in line
for opt in __rebuild_opts(mnt_data):
assert opt.casefold() in line.casefold()
assert mnt_data['mountpoint'] in line.replace('\\040', ' ')
assert mnt_data['mount_source'] in line.replace('\\040', ' ')
assert mnt_data['fs_type'] in line
def test__atime_and_casesentivity_in_mntinfo():
line = r'7460 572 0:1005 / /mnt/zz/ds920 rw,noatime shared:4257 - zfs zz/ds920 rw,xattr,posixacl,casesensitive'
data = {}
__parse_to_dev(line, data)
assert 3145965 in data
mntent = data[3145965]
assert 'NOATIME' in mntent['mount_opts']
assert 'CASESENSITIVE' in mntent['super_opts']
line = r'8069 306 0:1214 / /mnt/zz/mixy rw,noatime shared:4507 - zfs zz/mixy rw,xattr,posixacl,casemixed'
data = {}
__parse_to_dev(line, data)
assert 4194494 in data
mntent = data[4194494]
assert 'CASEMIXED' in mntent['super_opts']
def test__readonly_in_mntinfo():
line = r'537 327 0:75 / /mnt/dozer/TESTFUN/mp29-nfs0016K ro shared:301 - zfs dozer/TESTFUN/mp29-nfs0016K ro,xattr,posixacl,casesensitive'
data = {}
__parse_to_dev(line, data)
assert 75 in data
assert 'RO' in data[75]['mount_opts']
def test__mount_id_key():
line = r'537 327 0:75 / /mnt/tank/perf/mp29-nfs0016K ro shared:301 - zfs tank/perf/mp29-nfs0016K ro,xattr,posixacl,casesensitive'
data = {}
__parse_to_mnt_id(line, data)
assert 537 in data
def test__mountinfo_tree():
data = {}
for line in fake_mntinfo.splitlines():
__parse_to_mnt_id(line, data)
root = __create_tree(data, 369)
assert root['mount_source'] == 'dozer/administrative_share', str(root)
assert len(root['children']) == 1, str(root)
root = root['children'][0]
assert root['mount_source'] == 'dozer/administrative_share/backups_dataset'
assert len(root['children']) == 1, str(root)
root = root['children'][0]
assert root['mount_source'] == 'dozer/administrative_share/backups_dataset/userdata'
assert len(root['children']) == 1, str(root)
root = root['children'][0]
assert root['mount_source'] == 'dozer/administrative_share/backups_dataset/userdata/DOMAIN_GOAT'
assert len(root['children']) == 1, str(root)
root = root['children'][0]
assert root['mount_source'] == 'dozer/administrative_share/backups_dataset/userdata/DOMAIN_GOAT/bob'
assert len(root['children']) == 0, str(root)
def test__mountinfo_tree_miss():
data = {}
for line in fake_mntinfo.splitlines():
__parse_to_mnt_id(line, data)
with pytest.raises(KeyError) as e:
root = __create_tree(data, 8675309)
| 12,237 | Python | .py | 150 | 77.946667 | 575 | 0.759867 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,358 | test_mdns.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/utils/test_mdns.py | import pytest
from middlewared.utils import mdns
def test__dev_info():
data = mdns.generate_avahi_srv_record(
'DEV_INFO', txt_records=[f'model={mdns.DevType.MACPRORACK}']
)
parsed = mdns.parse_srv_record_data(data)[0]
assert parsed.get('srv') == mdns.ServiceType.DEV_INFO.value[0]
assert parsed.get('port') == mdns.ServiceType.DEV_INFO.value[1]
assert parsed.get('txt_records') == [f'model={mdns.DevType.MACPRORACK}']
def test__smb():
data = mdns.generate_avahi_srv_record('SMB')
parsed = mdns.parse_srv_record_data(data)[0]
assert parsed.get('srv') == mdns.ServiceType.SMB.value[0]
assert parsed.get('port') == mdns.ServiceType.SMB.value[1]
def test__http():
data = mdns.generate_avahi_srv_record('HTTP', custom_port='8080')
parsed = mdns.parse_srv_record_data(data)[0]
assert parsed.get('srv') == mdns.ServiceType.HTTP.value[0]
assert parsed.get('port') == 8080
@pytest.mark.parametrize("srv,port,ifindex,txtrecord", [
('_ftp._tcp.', 21, None, None),
('_afpovertcp._tcp.', 548, None, None),
('_nfs._tcp.', 2048, [2, 3], ['path=/mnt/tank', 'path=/mnt/dozer']),
])
def test__custom(srv, port, ifindex, txtrecord):
data = mdns.generate_avahi_srv_record(
'CUSTOM',
interface_indexes=ifindex,
custom_service_type=srv,
custom_port=port,
txt_records=txtrecord
)
parsed = mdns.parse_srv_record_data(data)
expected_txt = txtrecord or []
ifindexes = ifindex or []
for idx, ifidx in enumerate(ifindexes):
assert parsed[idx].get('srv') == srv
assert parsed[idx].get('port') == port
assert parsed[idx].get('interface') == ifidx
assert parsed[idx].get('txt_records') == expected_txt, str(parsed)
| 1,770 | Python | .py | 41 | 37.634146 | 76 | 0.657143 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,359 | test_nss.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/utils/test_nss.py | import grp as py_grp
import pwd as py_pwd
import pytest
from middlewared.utils.nss import grp, pwd
BAD_UIDS = [987654, -1]
BAD_GIDS = [987654, -1]
BAD_NAMES = ["BogusName"]
def test__check_user_count():
assert len(pwd.getpwall()['FILES']) == len(py_pwd.getpwall())
def test__check_group_count():
assert len(grp.getgrall()['FILES']) == len(py_grp.getgrall())
def test__check_user_contents():
py_users = py_pwd.getpwall()
users = pwd.getpwall()['FILES']
for py_entry, entry in zip(py_users, users):
assert py_entry.pw_name == entry.pw_name
assert py_entry.pw_uid == entry.pw_uid
assert py_entry.pw_gid == entry.pw_gid
assert py_entry.pw_gecos == entry.pw_gecos
assert py_entry.pw_dir == entry.pw_dir
assert py_entry.pw_shell == entry.pw_shell
def test__check_group_contents():
py_groups = py_grp.getgrall()
groups = grp.getgrall()['FILES']
for py_entry, entry in zip(py_groups, groups):
assert py_entry.gr_name == entry.gr_name
assert py_entry.gr_gid == entry.gr_gid
assert py_entry.gr_mem == entry.gr_mem
def test__check_user_dict_conversion():
users_normal = pwd.getpwall()['FILES']
users_dict = pwd.getpwall(as_dict=True)['FILES']
for py_entry, entry in zip(users_normal, users_dict):
assert py_entry.pw_name == entry['pw_name']
assert py_entry.pw_uid == entry['pw_uid']
assert py_entry.pw_gid == entry['pw_gid']
assert py_entry.pw_gecos == entry['pw_gecos']
assert py_entry.pw_dir == entry['pw_dir']
assert py_entry.pw_shell == entry['pw_shell']
def test__check_group_dict_conversion():
groups_normal = grp.getgrall()['FILES']
groups_dict = grp.getgrall(as_dict=True)['FILES']
for py_entry, entry in zip(groups_normal, groups_dict):
assert py_entry.gr_name == entry['gr_name']
assert py_entry.gr_gid == entry['gr_gid']
assert py_entry.gr_mem == entry['gr_mem']
def test__check_user_misses():
for uid in BAD_UIDS:
with pytest.raises(KeyError) as ve:
py_pwd.getpwuid(uid)
assert 'uid not found' in str(ve)
with pytest.raises(KeyError) as ve:
pwd.getpwuid(uid)
assert 'uid not found' in str(ve)
for name in BAD_NAMES:
with pytest.raises(KeyError) as ve:
py_pwd.getpwnam(name)
assert 'name not found' in str(ve)
with pytest.raises(KeyError) as ve:
pwd.getpwnam(name)
assert 'name not found' in str(ve)
def test__check_group_misses():
for uid in BAD_GIDS:
with pytest.raises(KeyError) as ve:
py_grp.getgrgid(uid)
assert 'gid not found' in str(ve)
with pytest.raises(KeyError) as ve:
grp.getgrgid(uid)
assert 'gid not found' in str(ve)
for name in BAD_NAMES:
with pytest.raises(KeyError) as ve:
py_grp.getgrnam(name)
assert 'name not found' in str(ve)
with pytest.raises(KeyError) as ve:
grp.getgrnam(name)
assert 'name not found' in str(ve)
def test___iter_pwd():
py_users = {u.pw_uid: u for u in py_pwd.getpwall()}
for entry in pwd.iterpw():
py_entry = py_users.pop(entry.pw_uid)
assert py_entry.pw_name == entry.pw_name
assert py_entry.pw_uid == entry.pw_uid
assert py_entry.pw_gid == entry.pw_gid
assert py_entry.pw_gecos == entry.pw_gecos
assert py_entry.pw_dir == entry.pw_dir
assert py_entry.pw_shell == entry.pw_shell
assert py_users == {}, str(py_users)
def test___iter_grp():
py_groups = {g.gr_name: g for g in py_grp.getgrall()}
for entry in grp.itergrp():
py_entry = py_groups.pop(entry.gr_name)
assert py_entry.gr_name == entry.gr_name
assert py_entry.gr_gid == entry.gr_gid
assert py_entry.gr_mem == entry.gr_mem
assert py_groups == {}, str(py_groups)
| 3,946 | Python | .py | 94 | 34.765957 | 65 | 0.628534 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,360 | test_write_if_changed.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/utils/test_write_if_changed.py | import os
import pytest
import shutil
from middlewared.utils.io import (
FileChanges,
ID_MAX,
UnexpectedFileChange,
write_if_changed
)
ETC_DIR = 'test-etc'
@pytest.fixture(scope='module')
def create_etc_dir(request):
os.mkdir(ETC_DIR)
try:
yield os.path.realpath(ETC_DIR)
finally:
shutil.rmtree(ETC_DIR)
def test__write_file_content_basic(create_etc_dir):
target = os.path.join(create_etc_dir, 'testfile1')
changes = write_if_changed(target, "canary")
assert changes == FileChanges.CONTENTS
changes = write_if_changed(target, b"canary2")
assert changes == FileChanges.CONTENTS
# basic smoketest that we're actually writing contents
with open(target, 'r') as f:
assert f.read() == 'canary2'
os.unlink(target)
def test__write_file_content_basic_dirfd(create_etc_dir):
dirfd = os.open(create_etc_dir, os.O_DIRECTORY)
try:
changes = write_if_changed('testfile2', "canary", dirfd=dirfd)
assert changes == FileChanges.CONTENTS
changes = write_if_changed('testfile2', b"canary2", dirfd=dirfd)
assert changes == FileChanges.CONTENTS
os.remove('testfile2', dir_fd=dirfd)
finally:
os.close(dirfd)
def test__write_file_perms(create_etc_dir):
target = os.path.join(create_etc_dir, 'testfile3')
# file doesn't exist and so we only expect contents change
changes = write_if_changed(target, "canary", perms=0o777)
assert changes == FileChanges.CONTENTS
# changing content and perms
changes = write_if_changed(target, b"canary2", perms=0o755)
assert changes == FileChanges.CONTENTS | FileChanges.PERMS
# changing only perms
changes = write_if_changed(target, b"canary2", perms=0o700)
assert changes == FileChanges.PERMS
# changing nothing
changes = write_if_changed(target, b"canary2", perms=0o700)
assert changes == 0
assert os.stat(target).st_mode & 0o777 == 0o700
os.unlink(target)
def test__write_file_uid(create_etc_dir):
target = os.path.join(create_etc_dir, 'testfile4')
# file doesn't exist and so we only expect contents change
changes = write_if_changed(target, "canary", uid=1000)
assert changes == FileChanges.CONTENTS
# changing content and uid
changes = write_if_changed(target, b"canary2", uid=1001)
assert changes == FileChanges.CONTENTS | FileChanges.UID
# changing uid only
changes = write_if_changed(target, b"canary2", uid=1002)
assert changes == FileChanges.UID
# changing nothing
changes = write_if_changed(target, b"canary2", uid=1002)
assert changes == 0
assert os.stat(target).st_uid == 1002
os.unlink(target)
def test__write_file_gid(create_etc_dir):
target = os.path.join(create_etc_dir, 'testfile5')
# file doesn't exist and so we only expect contents change
changes = write_if_changed(target, "canary", gid=1000)
assert changes == FileChanges.CONTENTS
# changing content and gid
changes = write_if_changed(target, b"canary2", gid=1001)
assert changes == FileChanges.CONTENTS | FileChanges.GID
# changing gid only
changes = write_if_changed(target, b"canary2", gid=1002)
assert changes == FileChanges.GID
# changing nothing
changes = write_if_changed(target, b"canary2", gid=1002)
assert changes == 0
assert os.stat(target).st_gid == 1002
os.unlink(target)
def test__write_file_exceptions(create_etc_dir):
target = os.path.join(create_etc_dir, 'testfile6')
changes = write_if_changed(target, "canary")
assert changes == FileChanges.CONTENTS
with pytest.raises(UnexpectedFileChange) as exc:
changes = write_if_changed(target, "canary", uid=1000, gid=1001, perms=0o700, raise_error=True)
assert exc.value.changes == FileChanges.UID | FileChanges.GID | FileChanges.PERMS
assert exc.value.path == target
# Make sure changes were still written
st = os.stat(target)
assert st.st_uid == 1000
assert st.st_gid == 1001
assert st.st_mode & 0o700 == 0o700
os.unlink(target)
@pytest.mark.parametrize("params,expected_text", [
({'uid': -1}, f'uid must be between 0 and {ID_MAX}'),
({'gid': -1}, f'gid must be between 0 and {ID_MAX}'),
({'uid': 'bob'}, 'uid must be an integer'),
({'gid': 'bob'}, 'gid must be an integer'),
({'perms': 'bob'}, 'perms must be an integer'),
({'perms': 0o4777}, '2559: invalid mode. Supported bits are RWX for UGO.'),
({'dirfd': 'home'}, 'dirfd must be a valid file descriptor'),
({'dirfd': -1}, '-1: file descriptor not found'),
])
def test__write_file_value_errors(create_etc_dir, params, expected_text):
target = os.path.join(create_etc_dir, 'testfile7')
with pytest.raises(ValueError) as exc:
write_if_changed(target, "canary", **params)
assert expected_text in str(exc.value)
def test__write_file_path_absolute_dirfd_value_error(create_etc_dir):
target = os.path.join(create_etc_dir, 'testfile8')
dirfd = os.open(create_etc_dir, os.O_DIRECTORY)
try:
with pytest.raises(ValueError) as exc:
write_if_changed(target, "canary", dirfd=dirfd)
finally:
os.close(dirfd)
assert 'absolute paths may not be used' in str(exc.value)
def test__write_file_path_relative_no_dirfd_value_error(create_etc_dir):
with pytest.raises(ValueError) as exc:
write_if_changed('testfile9', "canary")
assert 'relative paths may not be used' in str(exc.value)
def test__write_file_wrong_open_type_value_error(create_etc_dir):
# create a test file
with open(os.path.join(create_etc_dir, 'testfile10'), 'w') as f:
with pytest.raises(ValueError) as exc:
write_if_changed('willnotexist', "canary", dirfd=f.fileno())
assert 'dirfd must be opened' in str(exc.value)
os.unlink(os.path.join(create_etc_dir, 'testfile10'))
@pytest.mark.parametrize("mask,expected_dump", [
(FileChanges.CONTENTS, ['CONTENTS']),
(FileChanges.UID, ['UID']),
(FileChanges.GID, ['GID']),
(FileChanges.PERMS, ['PERMS']),
(FileChanges.CONTENTS | FileChanges.UID | FileChanges.GID | FileChanges.PERMS, [
'CONTENTS', 'UID', 'GID', 'PERMS'
])
])
def test__write_file_dump_changes(mask, expected_dump):
assert FileChanges.dump(mask) == expected_dump
def test__write_file_dump_changes_validation():
with pytest.raises(ValueError) as exc:
FileChanges.dump(16)
assert 'unsupported flags in mask' in str(exc.value)
| 6,532 | Python | .py | 149 | 38.456376 | 103 | 0.689339 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,361 | test_krb5.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/utils/test_krb5.py | import base64
import jsonschema
import os
import pytest
from middlewared.service_exception import CallError
from middlewared.utils.directoryservices import krb5_constants, krb5, krb5_conf
# Base64-encoded kerberos keytab from reference system
SAMPLE_KEYTAB = 'BQIAAABTAAIAC0hPTUVET00uRlVOABFyZXN0cmljdGVka3JiaG9zdAASdGVzdDQ5LmhvbWVkb20uZnVuAAAAAV8kEroBAAEACDHN3Kv9WKLLAAAAAQAAAAAAAABHAAIAC0hPTUVET00uRlVOABFyZXN0cmljdGVka3JiaG9zdAAGVEVTVDQ5AAAAAV8kEroBAAEACDHN3Kv9WKLLAAAAAQAAAAAAAABTAAIAC0hPTUVET00uRlVOABFyZXN0cmljdGVka3JiaG9zdAASdGVzdDQ5LmhvbWVkb20uZnVuAAAAAV8kEroBAAMACDHN3Kv9WKLLAAAAAQAAAAAAAABHAAIAC0hPTUVET00uRlVOABFyZXN0cmljdGVka3JiaG9zdAAGVEVTVDQ5AAAAAV8kEroBAAMACDHN3Kv9WKLLAAAAAQAAAAAAAABbAAIAC0hPTUVET00uRlVOABFyZXN0cmljdGVka3JiaG9zdAASdGVzdDQ5LmhvbWVkb20uZnVuAAAAAV8kEroBABEAEBDQOH+tKYCuoedQ53WWKFgAAAABAAAAAAAAAE8AAgALSE9NRURPTS5GVU4AEXJlc3RyaWN0ZWRrcmJob3N0AAZURVNUNDkAAAABXyQSugEAEQAQENA4f60pgK6h51DndZYoWAAAAAEAAAAAAAAAawACAAtIT01FRE9NLkZVTgARcmVzdHJpY3RlZGtyYmhvc3QAEnRlc3Q0OS5ob21lZG9tLmZ1bgAAAAFfJBK6AQASACCKZTjTnrjT30jdqAG2QRb/cFyTe9kzfLwhBAm5QnuMiQAAAAEAAAAAAAAAXwACAAtIT01FRE9NLkZVTgARcmVzdHJpY3RlZGtyYmhvc3QABlRFU1Q0OQAAAAFfJBK6AQASACCKZTjTnrjT30jdqAG2QRb/cFyTe9kzfLwhBAm5QnuMiQAAAAEAAAAAAAAAWwACAAtIT01FRE9NLkZVTgARcmVzdHJpY3RlZGtyYmhvc3QAEnRlc3Q0OS5ob21lZG9tLmZ1bgAAAAFfJBK6AQAXABAcyjciCUnM9DmiyiPO4VIaAAAAAQAAAAAAAABPAAIAC0hPTUVET00uRlVOABFyZXN0cmljdGVka3JiaG9zdAAGVEVTVDQ5AAAAAV8kEroBABcAEBzKNyIJScz0OaLKI87hUhoAAAABAAAAAAAAAEYAAgALSE9NRURPTS5GVU4ABGhvc3QAEnRlc3Q0OS5ob21lZG9tLmZ1bgAAAAFfJBK6AQABAAgxzdyr/ViiywAAAAEAAAAAAAAAOgACAAtIT01FRE9NLkZVTgAEaG9zdAAGVEVTVDQ5AAAAAV8kEroBAAEACDHN3Kv9WKLLAAAAAQAAAAAAAABGAAIAC0hPTUVET00uRlVOAARob3N0ABJ0ZXN0NDkuaG9tZWRvbS5mdW4AAAABXyQSugEAAwAIMc3cq/1YossAAAABAAAAAAAAADoAAgALSE9NRURPTS5GVU4ABGhvc3QABlRFU1Q0OQAAAAFfJBK6AQADAAgxzdyr/ViiywAAAAEAAAAAAAAATgACAAtIT01FRE9NLkZVTgAEaG9zdAASdGVzdDQ5LmhvbWVkb20uZnVuAAAAAV8kEroBABEAEBDQOH+tKYCuoedQ53WWKFgAAAABAAAAAAAAAEIAAgALSE9NRURPTS5GVU4ABGhvc3QABlRFU1Q0OQAAAAFfJBK6AQARABAQ0Dh/rSmArqHnUOd1lihYAAAAAQAAAAAAAABeAAIAC0hPTUVET00uRlVOAARob3N0ABJ0ZXN0NDkuaG9tZWRvbS5mdW4AAAABXyQSugEAEgAgimU40564099I3agBtkEW/3Bck3vZM3y8IQQJuUJ7jIkAAAABAAAAAAAAAFIAAgALSE9NRURPTS5GVU4ABGhvc3QABlRFU1Q0OQAAAAFfJBK6AQASACCKZTjTnrjT30jdqAG2QRb/cFyTe9kzfLwhBAm5QnuMiQAAAAEAAAAAAAAATgACAAtIT01FRE9NLkZVTgAEaG9zdAASdGVzdDQ5LmhvbWVkb20uZnVuAAAAAV8kEroBABcAEBzKNyIJScz0OaLKI87hUhoAAAABAAAAAAAAAEIAAgALSE9NRURPTS5GVU4ABGhvc3QABlRFU1Q0OQAAAAFfJBK6AQAXABAcyjciCUnM9DmiyiPO4VIaAAAAAQAAAAAAAAA1AAEAC0hPTUVET00uRlVOAAdURVNUNDkkAAAAAV8kEroBAAEACDHN3Kv9WKLLAAAAAQAAAAAAAAA1AAEAC0hPTUVET00uRlVOAAdURVNUNDkkAAAAAV8kEroBAAMACDHN3Kv9WKLLAAAAAQAAAAAAAAA9AAEAC0hPTUVET00uRlVOAAdURVNUNDkkAAAAAV8kEroBABEAEBDQOH+tKYCuoedQ53WWKFgAAAABAAAAAAAAAE0AAQALSE9NRURPTS5GVU4AB1RFU1Q0OSQAAAABXyQSugEAEgAgimU40564099I3agBtkEW/3Bck3vZM3y8IQQJuUJ7jIkAAAABAAAAAAAAAD0AAQALSE9NRURPTS5GVU4AB1RFU1Q0OSQAAAABXyQSugEAFwAQHMo3IglJzPQ5osojzuFSGgAAAAEAAAAA' # noqa
# Base64-encoded kerberos ccache file from reference system
SAMPLE_CCACHE = 'BQQADAABAAj////9AAAAAAAAAAEAAAABAAAAFUFEMDIuVE4uSVhTWVNURU1TLk5FVAAAAA9URVNUV1BRSU02MDNWNyQAAAABAAAAAQAAABVBRDAyLlROLklYU1lTVEVNUy5ORVQAAAAPVEVTVFdQUUlNNjAzVjckAAAAAQAAAAMAAAAMWC1DQUNIRUNPTkY6AAAAFWtyYjVfY2NhY2hlX2NvbmZfZGF0YQAAAAdwYV90eXBlAAAAMmtyYnRndC9BRDAyLlROLklYU1lTVEVNUy5ORVRAQUQwMi5UTi5JWFNZU1RFTVMuTkVUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABMgAAAAAAAAABAAAAAQAAABVBRDAyLlROLklYU1lTVEVNUy5ORVQAAAAPVEVTVFdQUUlNNjAzVjckAAAAAgAAAAIAAAAVQUQwMi5UTi5JWFNZU1RFTVMuTkVUAAAABmtyYnRndAAAABVBRDAyLlROLklYU1lTVEVNUy5ORVQAEgAAACA3FwVK1Ic6M3HMiFsHSzmtWng2iM2buJ66noxiidZQiWZQm1pmUJtaZlEn+mZR7NoAAOEAAAAAAAAAAAAAAAAEfWGCBHkwggR1oAMCAQWhFxsVQUQwMi5UTi5JWFNZU1RFTVMuTkVUoiowKKADAgECoSEwHxsGa3JidGd0GxVBRDAyLlROLklYU1lTVEVNUy5ORVSjggQnMIIEI6ADAgESoQMCAQKiggQVBIIEEY0Sk5v/+H5REJJpvYsPIM1O09jzoQFFtGLHlxA4zSFRAmEqdiJr+YL66wEkwUmoPl/JYMpFALNymzWDez0zaSybzilA//0weimbhrqljplSEwmWUaeRlkqDxpk2Xnn8l10xQ+vTQAGIcocV710cKzP2fnnt1O9Z5jVsTeZ6rFIDFPx4FKT4j+AcTtE07q3RYLKIUae1lbgT8s5t4YHYNnflcLw/o41cYPUADesttPW0vq7qYm+S89qX/3KuLF+05nZe+hrwgJHT68fJPi0D+Ge2Dh/sDye3aBZDBcXVArnyCyv9f8QAtO2U0nkvdth7KsWWl238BK6fRVNt4X5MGO6uO5T1JWYIZGOILPgphHJ3cT3SIen988XxBlwC8oR/KPaCc2wNCPSj95ozPZ6J9t98QAOGIYNBK9rc2m2h+jMtMFnc1+7i0A6dK5PwClEEhCd4uX6MWBLP3nYnUO8Z0RDoNxIsPNR6x6wOeJDzlg5dqnD7Wn0y2yH+D1E6jBDicz+49eMEbaRnE1d5TdPY6pjkzQMaYZvBaKz346g68k5XI3/NTIkpFueupVGksPI9/aXG23FCx2iDTK0r9FHEDUrQVg/EmnOtaC7xGzawqqBZrma5RyCj+eIZkPCR5j0LtzTauHtL4IjQvBiklobEW+v6D+3Lx/c9BFcc0XGVWHaRy/yeR6c9ObRgrZ/Ug64/RRTR9wLUGlZ6gBaOoyvn3tFs/gde7pK3wHnJtmB+QHlZaao8sZjdVD367NcORyjta5IQBORe0pxRAP8n+Q5JfzRiDbI1m9iq30EKTa/FWde2orqVfHP896zOZnuzXJYEqTDTJIHJ8phKQXPG4a8qEO0nRamxE2zbWIEgdz/z1jqXfe+iBeeJpPwbhMq+0JHfRMWH4KJEFNJi/8ISEW8Zkpi9D2E/Mxc6zLx0Em1SyshARQAfvMOzAmYF7RqDWPxmjIeoJF2PGNED8LroTirO2O/j6bLSgjgQDGMRNCaONPySH0X9X7LmBTKHs4dBK6pgqNd3c1ehhNIELku8L0s2YXsf6P+Po6Bq84ZEGcNHTqFNcvEmMRG6fqZqIFnZ+0Hxd60Y5WLZhag9MoUJuNHvRU8h+Xeg7OTZEd/+9cL381CyaBPrvcSgBG1CSyi/gtMQa8SKBsfNYD5mbJsrhHEAffJbcYDH+MtbMs3xPdPJJllxdb4AiXBP/EnL1/VMLlC6s66ZlT/VHoRrRMriBjWcs1Ax6c7iHypDF41JfFEobU7T8//pyDbBCZ/Sytui7ongMR8fwo0wB0lAL/dkhCfMs8uc3DDBn86uCqrEzEz31LLXvImWRmNaazZJhJdowi7gdGEFIYux7vZeCUHsGpwGK4b70FMMUK8r/R+gx0jpzD0GdQAAAAA=' # noqa
# Below KEYTAB_LIST_OUTPUT should match SAMPLE_KEYTAB above
# if the keytab is replaced, then this output should also be replaced
KEYTAB_LIST_OUTPUT = """Keytab name: FILE:/tmp/test_kt
KVNO Timestamp Principal
---- ----------------- --------------------------------------------------------
1 07/31/20 05:46:50 restrictedkrbhost/test49.homedom.fun@HOMEDOM.FUN (DEPRECATED:des-cbc-crc)
1 07/31/20 05:46:50 restrictedkrbhost/TEST49@HOMEDOM.FUN (DEPRECATED:des-cbc-crc)
1 07/31/20 05:46:50 restrictedkrbhost/test49.homedom.fun@HOMEDOM.FUN (DEPRECATED:des-cbc-md5)
1 07/31/20 05:46:50 restrictedkrbhost/TEST49@HOMEDOM.FUN (DEPRECATED:des-cbc-md5)
1 07/31/20 05:46:50 restrictedkrbhost/test49.homedom.fun@HOMEDOM.FUN (aes128-cts-hmac-sha1-96)
1 07/31/20 05:46:50 restrictedkrbhost/TEST49@HOMEDOM.FUN (aes128-cts-hmac-sha1-96)
1 07/31/20 05:46:50 restrictedkrbhost/test49.homedom.fun@HOMEDOM.FUN (aes256-cts-hmac-sha1-96)
1 07/31/20 05:46:50 restrictedkrbhost/TEST49@HOMEDOM.FUN (aes256-cts-hmac-sha1-96)
1 07/31/20 05:46:50 restrictedkrbhost/test49.homedom.fun@HOMEDOM.FUN (DEPRECATED:arcfour-hmac)
1 07/31/20 05:46:50 restrictedkrbhost/TEST49@HOMEDOM.FUN (DEPRECATED:arcfour-hmac)
1 07/31/20 05:46:50 host/test49.homedom.fun@HOMEDOM.FUN (DEPRECATED:des-cbc-crc)
1 07/31/20 05:46:50 host/TEST49@HOMEDOM.FUN (DEPRECATED:des-cbc-crc)
1 07/31/20 05:46:50 host/test49.homedom.fun@HOMEDOM.FUN (DEPRECATED:des-cbc-md5)
1 07/31/20 05:46:50 host/TEST49@HOMEDOM.FUN (DEPRECATED:des-cbc-md5)
1 07/31/20 05:46:50 host/test49.homedom.fun@HOMEDOM.FUN (aes128-cts-hmac-sha1-96)
1 07/31/20 05:46:50 host/TEST49@HOMEDOM.FUN (aes128-cts-hmac-sha1-96)
1 07/31/20 05:46:50 host/test49.homedom.fun@HOMEDOM.FUN (aes256-cts-hmac-sha1-96)
1 07/31/20 05:46:50 host/TEST49@HOMEDOM.FUN (aes256-cts-hmac-sha1-96)
1 07/31/20 05:46:50 host/test49.homedom.fun@HOMEDOM.FUN (DEPRECATED:arcfour-hmac)
1 07/31/20 05:46:50 host/TEST49@HOMEDOM.FUN (DEPRECATED:arcfour-hmac)
1 07/31/20 05:46:50 TEST49$@HOMEDOM.FUN (DEPRECATED:des-cbc-crc)
1 07/31/20 05:46:50 TEST49$@HOMEDOM.FUN (DEPRECATED:des-cbc-md5)
1 07/31/20 05:46:50 TEST49$@HOMEDOM.FUN (aes128-cts-hmac-sha1-96)
1 07/31/20 05:46:50 TEST49$@HOMEDOM.FUN (aes256-cts-hmac-sha1-96)
1 07/31/20 05:46:50 TEST49$@HOMEDOM.FUN (DEPRECATED:arcfour-hmac)""" # noqa
KEYTAB_NAME = 'krb5.keytab'
CCACHE_NAME = 'krb5cc_0'
APPDEFAULTS_AUX = """
pam = {
renew_lifetime = 86400
}
"""
REALMS = [
{
'id': 1,
'realm': 'AD02.TN.IXSYSTEMS.NET',
'kdc': ['10.238.238.2', '10.238.238.3'],
'admin_server': ['10.238.238.2'],
'kpasswd_server': ['10.238.238.2'],
},
{
'id': 2,
'realm': 'AD03.TN.IXSYSTEMS.NET',
'kdc': [],
'admin_server': [],
'kpasswd_server': [],
},
]
@pytest.fixture(scope="function")
def kerberos_data_dir(tmpdir):
with open(os.path.join(tmpdir, KEYTAB_NAME), 'wb') as f:
f.write(base64.b64decode(SAMPLE_KEYTAB))
f.flush()
with open(os.path.join(tmpdir, CCACHE_NAME), 'wb') as f:
f.write(base64.b64decode(SAMPLE_CCACHE))
f.flush()
return tmpdir
def test__ktutil_list_impl(kerberos_data_dir):
"""
Validate that parser for kerberos keytab works and provides expected entries
"""
entries = krb5.ktutil_list_impl(os.path.join(kerberos_data_dir, KEYTAB_NAME))
assert len(entries) != 0
slots = []
# The schema for what entries / types are expected for valid keytab entries
# returned from `ktutil_list_impl` is defined in the
# KTUTIL_LIST_OUTPUT_SCHEMA
#
# If for some reason, the output for this method is changed, then the
# aforementioned schema _must_ also be changed or this test will fail.
jsonschema.validate(entries, krb5.KTUTIL_LIST_OUTPUT_SCHEMA)
for entry in entries:
assert entry['slot'] not in slots
slots.append(entry['slot'])
if entry['etype'] in (
krb5_constants.KRB_ETYPE.AES256_CTS_HMAC_SHA1_96.value,
krb5_constants.KRB_ETYPE.AES128_CTS_HMAC_SHA1_96.value,
):
assert entry['etype_deprecated'] is False, str(entry)
else:
assert entry['etype_deprecated'] is True, str(entry)
def test__keytab_extraction(kerberos_data_dir):
"""
Validate that we can pass query-filters to `extract_from_keytab`
with expected results
"""
data = krb5.extract_from_keytab(
os.path.join(kerberos_data_dir, KEYTAB_NAME),
[['etype_deprecated', '=', False]]
)
new_kt_path = os.path.join(kerberos_data_dir, 'new_kt')
with open(new_kt_path, 'wb') as f:
f.write(data)
f.flush()
entries = krb5.ktutil_list_impl(new_kt_path)
jsonschema.validate(entries, krb5.KTUTIL_LIST_OUTPUT_SCHEMA)
for entry in entries:
assert entry['etype_deprecated'] is False, str(entry)
def test__keytab_parser(kerberos_data_dir):
entries = krb5.ktutil_list_impl(os.path.join(kerberos_data_dir, KEYTAB_NAME))
data_to_parse = KEYTAB_LIST_OUTPUT.splitlines()[3:]
assert len(entries) == len(data_to_parse)
assert entries == krb5.parse_keytab(data_to_parse)
def test__keytab_services(kerberos_data_dir):
"""
This validates that we are properly retrieving a list of service names
from a given keytab
"""
svcs = krb5.keytab_services(os.path.join(kerberos_data_dir, KEYTAB_NAME))
assert set(svcs) == set(['restrictedkrbhost', 'host'])
def test__klist_impl(kerberos_data_dir):
"""
This validates that we can read and parse a given kerberos ccache file
"""
ccache_path = os.path.join(kerberos_data_dir, CCACHE_NAME)
klist = krb5.klist_impl(ccache_path)
jsonschema.validate(klist, krb5.KLIST_OUTPUT_SCHEMA)
assert klist['default_principal'] == 'TESTWPQIM603V7$@AD02.TN.IXSYSTEMS.NET'
assert klist['ticket_cache'].get('type') == 'FILE'
assert klist['ticket_cache'].get('name') == ccache_path
assert len(klist['tickets']) == 1
tkt = klist['tickets'][0]
assert len(tkt['flags']) != 0
def test__check_ticket(kerberos_data_dir):
"""
We use gssapi library to perform basic validation of kerberos tickets
The ccache file we write as part of this test is valid and expired
so we check that it raises the expected error if exceptions are
requested otherwise check that it returns False. Tests of valid
tickets occur during full CI test runs.
"""
ccache_path = os.path.join(kerberos_data_dir, CCACHE_NAME)
# first validate boolean-only response
assert krb5.gss_get_current_cred(ccache_path, False) is None
with pytest.raises(CallError) as ce:
krb5.gss_get_current_cred(ccache_path)
assert ce.value.errmsg == 'Kerberos ticket is expired'
@pytest.mark.parametrize('params,expected,success', [
('dns_canonicalize_hostname = true', {'dns_canonicalize_hostname': 'true'}, True),
('canonicalize = true', {'canonicalize': 'true'}, True),
('admin_server = canary', None, False), # invalid entry
('rdns = canary', None, False), # wrong type for boolean value
('permitted_enctypes = aes256-cts-hmac-sha1-96', {'permitted_enctypes': 'aes256-cts-hmac-sha1-96'}, True),
('permitted_enctypes = canary', None, False), # not a valid encryption type
])
def test__krb5conf_libdefaults_aux_parser(params, expected, success):
data = {}
if success:
krb5_conf.parse_krb_aux_params(
krb5_conf.KRB5ConfSection.LIBDEFAULTS,
data,
params
)
assert data == expected
else:
with pytest.raises(ValueError):
krb5_conf.parse_krb_aux_params(
krb5_conf.KRB5ConfSection.LIBDEFAULTS,
data,
params
)
@pytest.mark.parametrize('params,expected,success', [
('renew_lifetime = 86400', {'renew_lifetime': '86400'}, True),
('canonicalize = true', None, False),
(APPDEFAULTS_AUX, {'pam': {'renew_lifetime': '86400'}}, True),
])
def test__krb5conf_appdefaults_aux_parser(params, expected, success):
data = {}
if success:
krb5_conf.parse_krb_aux_params(
krb5_conf.KRB5ConfSection.APPDEFAULTS,
data,
params
)
assert data == expected
else:
with pytest.raises(ValueError):
krb5_conf.parse_krb_aux_params(
krb5_conf.KRB5ConfSection.APPDEFAULTS,
data,
params
)
def validate_realms_section(data):
"""
data will consist of approximately following:
\tAD02.TN.IXSYSTEMS.NET = {\n
\t\tdefault_domain = AD02.TN.IXSYSTEMS.NET\n
\t\tkdc = ip1 ip2 ip3\n
\t\tadmin_server = ip1\n
\t\tkpasswd_server = ip1 ip2 ip3\n
"""
def validate_realm(idx, realm):
this = REALMS[idx]
lidx = 0
for line in realm.splitlines():
if not line.strip():
continue
match lidx:
case 0:
assert line.startswith(f'\t{this["realm"]} =')
case 1:
assert line.strip() == f'default_domain = {this["realm"]}', str(realm)
case _:
data = line.split('=')
assert len(data) == 2, realm
key, val = data
key = key.strip()
match key:
case 'kdc' | 'admin_server' | 'kpasswd_server':
assert val.strip() in this[key]
case _:
raise ValueError(f'{key}: unexpected key in realm config')
lidx += 1
for idx, realm in enumerate(data.split('}')):
if not realm.strip():
continue
validate_realm(idx, realm)
def validate_domain_realms_section(data):
"""
data will consist of approximately following:
\tad02.tn.ixsystems.net = AD02.TN.IXSYSTEMS.NET\n
\t.ad02.tn.ixsystems.net = AD02.TN.IXSYSTEMS.NET\n
\tAD02.TN.IXSYSTEMS.NET = AD02.TN.IXSYSTEMS.NET\n
\t.AD02.TN.IXSYSTEMS.NET = AD02.TN.IXSYSTEMS.NET\n
"""
realm_idx = 0
for idx, line in enumerate(data.splitlines()):
relative_idx = idx % 4
if idx and relative_idx == 0:
realm_idx += 1
realm_name = REALMS[realm_idx]['realm']
match relative_idx:
case 0:
assert line.strip() == f'{realm_name.lower()} = {realm_name}'
case 1:
assert line.strip() == f'.{realm_name.lower()} = {realm_name}'
case 2:
assert line.strip() == f'{realm_name.upper()} = {realm_name}'
case 3:
assert line.strip() == f'.{realm_name.upper()} = {realm_name}'
def test__krb5conf_realm():
"""
Verify that a list of kerberos realms is stored properly
within a KRB5Conf object
"""
kconf = krb5_conf.KRB5Conf()
kconf.add_realms(REALMS)
stored_realms = kconf.realms
for realm in REALMS:
assert realm['realm'] in stored_realms
stored = stored_realms[realm['realm']]
assert stored['realm'] == realm['realm']
assert stored['admin_server'] == realm['admin_server']
assert stored['kpasswd_server'] == realm['kpasswd_server']
assert stored['kdc'] == realm['kdc']
# Convert our stored kerberos realm configuration into krb5.conf
# data via `generate()` method and validate it's what we expect.
for section in kconf.generate().split('\n\n'):
if not section.startswith(('[realms]', '[domain_realms]')):
continue
section_name, data = section.split('\n', 1)
match section_name:
case '[realms]':
validate_realms_section(data)
case '[domain_realms]':
validate_domain_realms_section(data)
case _:
raise ValueError(f'{section_name}: unexpected entry')
def test__krb5conf_libdefaults():
"""
Validate generating krb5.conf with libdefault configured via
config dict and auxiliary parameter blob
"""
kconf = krb5_conf.KRB5Conf()
kconf.add_libdefaults(
{'canonicalize': 'true'},
'rdns = false\npermitted_enctypes = aes256-cts-hmac-sha1-96'
)
for section in kconf.generate().split('\n\n'):
if not section.startswith('[libdefaults]'):
continue
section_name, data = section.split('\n', 1)
for line in data.splitlines():
if not line.strip():
continue
key, value = line.strip().split('=')
match key.strip():
case 'canonicalize':
assert value.strip() == 'true'
case 'rdns':
assert value.strip() == 'false'
case 'permitted_enctypes':
assert value.strip() == 'aes256-cts-hmac-sha1-96'
case _:
raise ValueError(f'{key}: unexpected libdefault parameter')
def test__krb5conf_appdefaults():
"""
Validate generating krb5.conf with libdefault configured via
config dict and auxiliary parameter blob
"""
kconf = krb5_conf.KRB5Conf()
kconf.add_appdefaults(
{'renew_lifetime': '86400'},
'forwardable = true\nproxiable = false'
)
for section in kconf.generate().split('\n\n'):
if not section.startswith('[appdefaults]'):
continue
section_name, data = section.split('\n', 1)
for line in data.splitlines():
if not line.strip():
continue
key, value = line.strip().split('=')
match key.strip():
case 'renew_lifetime':
assert value.strip() == '86400'
case 'forwardable':
assert value.strip() == 'true'
case 'proxiable':
assert value.strip() == 'false'
case _:
raise ValueError(f'{key}: unexpected libdefault parameter')
def test__write_krb5_conf(kerberos_data_dir):
kconf = krb5_conf.KRB5Conf()
kconf.add_realms(REALMS)
kconf.add_libdefaults({'default_realm': 'AD02.TN.IXSYSTEMS.NET'})
data = kconf.generate()
path = os.path.join(kerberos_data_dir, 'test_krb5.conf')
kconf.write(path)
with open(path, 'r') as f:
assert f.read() == data
| 19,675 | Python | .py | 346 | 48.705202 | 2,662 | 0.715949 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,362 | test_privilege.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/utils/test_privilege.py | import pytest
import types
from middlewared.auth import UserSessionManagerCredentials
from middlewared.utils.auth import AA_LEVEL1
from middlewared.utils.privilege import (
app_credential_full_admin_or_user,
credential_has_full_admin,
credential_full_admin_or_user,
privilege_has_webui_access,
)
from middlewared.plugins.service_.utils import app_has_write_privilege_for_service
@pytest.mark.parametrize('privilege,expected', [
({'roles': ['READONLY_ADMIN'], 'allowlist': []}, True),
({'roles': ['SHARING_ADMIN'], 'allowlist': []}, True),
({'roles': ['FULL_ADMIN'], 'allowlist': []}, True),
({'roles': ['SHARING_SMB_READ'], 'allowlist': []}, False),
])
def test_privilege_has_webui_access(privilege, expected):
assert privilege_has_webui_access(privilege) == expected
@pytest.mark.parametrize('credential,expected', [
({'username': 'BOB', 'privilege': {'allowlist': [], 'roles': ['READONLY_ADMIN']}}, False),
({'username': 'BOB', 'privilege': {'allowlist': [], 'roles': ['FULL_ADMIN']}}, True),
({'username': 'BOB', 'privilege': {'allowlist': [{'method': '*', 'resource': '*'}], 'roles': []}}, True),
])
def test_privilege_has_full_admin(credential, expected):
user_cred = UserSessionManagerCredentials(credential, AA_LEVEL1)
assert credential_has_full_admin(user_cred) == expected
assert credential_full_admin_or_user(user_cred, 'canary') == expected
assert credential_full_admin_or_user(user_cred, 'BOB')
assert app_credential_full_admin_or_user(types.SimpleNamespace(authenticated_credentials=user_cred), 'canary') == expected
@pytest.mark.parametrize('service,credential,expected', [
('cifs', {'privilege': {'allowlist': [], 'roles': ['READONLY_ADMIN']}}, False),
('cifs', {'privilege': {'allowlist': [], 'roles': ['FULL_ADMIN']}}, True),
('cifs', {'privilege': {'roles': [], 'allowlist': [{'method': '*', 'resource': '*'}]}}, True),
('cifs', {'privilege': {'allowlist': [], 'roles': ['SHARING_SMB_WRITE']}}, True),
('cifs', {'privilege': {'allowlist': [], 'roles': ['SHARING_NFS_WRITE']}}, False),
('cifs', {'privilege': {'allowlist': [], 'roles': ['SHARING_ISCSI_WRITE']}}, False),
('cifs', {'privilege': {'allowlist': [], 'roles': ['SHARING_FTP_WRITE']}}, False),
('nfs', {'privilege': {'allowlist': [], 'roles': ['READONLY_ADMIN']}}, False),
('nfs', {'privilege': {'allowlist': [], 'roles': ['FULL_ADMIN']}}, True),
('nfs', {'privilege': {'roles': [], 'allowlist': [{'method': '*', 'resource': '*'}]}}, True),
('nfs', {'privilege': {'allowlist': [], 'roles': ['SHARING_SMB_WRITE']}}, False),
('nfs', {'privilege': {'allowlist': [], 'roles': ['SHARING_NFS_WRITE']}}, True),
('nfs', {'privilege': {'allowlist': [], 'roles': ['SHARING_ISCSI_WRITE']}}, False),
('nfs', {'privilege': {'allowlist': [], 'roles': ['SHARING_FTP_WRITE']}}, False),
('iscsitarget', {'privilege': {'allowlist': [], 'roles': ['READONLY_ADMIN']}}, False),
('iscsitarget', {'privilege': {'allowlist': [], 'roles': ['FULL_ADMIN']}}, True),
('iscsitarget', {'privilege': {'roles': [], 'allowlist': [{'method': '*', 'resource': '*'}]}}, True),
('iscsitarget', {'privilege': {'allowlist': [], 'roles': ['SHARING_SMB_WRITE']}}, False),
('iscsitarget', {'privilege': {'allowlist': [], 'roles': ['SHARING_NFS_WRITE']}}, False),
('iscsitarget', {'privilege': {'allowlist': [], 'roles': ['SHARING_ISCSI_WRITE']}}, True),
('iscsitarget', {'privilege': {'allowlist': [], 'roles': ['SHARING_FTP_WRITE']}}, False),
('ftp', {'privilege': {'allowlist': [], 'roles': ['READONLY_ADMIN']}}, False),
('ftp', {'privilege': {'allowlist': [], 'roles': ['FULL_ADMIN']}}, True),
('ftp', {'privilege': {'roles': [], 'allowlist': [{'method': '*', 'resource': '*'}]}}, True),
('ftp', {'privilege': {'allowlist': [], 'roles': ['SHARING_SMB_WRITE']}}, False),
('ftp', {'privilege': {'allowlist': [], 'roles': ['SHARING_NFS_WRITE']}}, False),
('ftp', {'privilege': {'allowlist': [], 'roles': ['SHARING_ISCSI_WRITE']}}, False),
('ftp', {'privilege': {'allowlist': [], 'roles': ['SHARING_FTP_WRITE']}}, True),
])
def test_privilege_has_write_to_service(service, credential, expected):
user_cred = UserSessionManagerCredentials({'username': 'BOB'} | credential, AA_LEVEL1)
assert app_has_write_privilege_for_service(types.SimpleNamespace(authenticated_credentials=user_cred), service) == expected
| 4,437 | Python | .py | 63 | 66.31746 | 127 | 0.61475 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,363 | test_filter_list.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/utils/test_filter_list.py | import pytest
import datetime
from middlewared.utils import filter_list
DATA = [
{
'foo': 'foo1',
'number': 1,
'list': [1],
},
{
'foo': 'foo2',
'number': 2,
'list': [2],
},
{
'foo': '_foo_',
'number': 3,
'list': [3],
},
]
DATA_WITH_NULL = [
{
'foo': 'foo1',
'number': 1,
'list': [1],
},
{
'foo': 'foo2',
'number': 2,
'list': [2],
},
{
'foo': '_foo_',
'number': 3,
'list': [3],
},
{
'foo': None,
'number': 4,
'list': [4],
},
{
'number': 5,
'list': [5],
},
]
DATA_WITH_CASE = [
{
'foo': 'foo',
'number': 1,
'list': [1],
},
{
'foo': 'Foo',
'number': 2,
'list': [2],
},
{
'foo': 'foO_',
'number': 3,
'list': [3],
},
{
'foo': 'bar',
'number': 3,
'list': [3],
},
]
DATA_WITH_LISTODICTS = [
{
'foo': 'foo',
'list': [{'number': 1}, {'number': 2}],
},
{
'foo': 'Foo',
'list': [{'number': 2}, {'number': 3}],
},
{
'foo': 'foO_',
'list': [{'number': 3}],
},
{
'foo': 'bar',
'list': [{'number': 0}],
}
]
DATA_WITH_LISTODICTS_INCONSISTENT = [
{
'foo': 'foo',
'list': [{'number': 1}, 'canary'],
},
{
'foo': 'Foo',
'list': [1, {'number': 3}],
},
{
'foo': 'foO_',
'list': [{'number': 3}, ('bob', 1)],
},
{
'foo': 'bar',
'list': [{'number': 0}],
},
{
'foo': 'bar',
'list': 'whointheirrightmindwoulddothis'
},
{
'foo': 'bar',
},
{
'foo': 'bar',
'list': None,
},
{
'foo': 'bar',
'list': 42,
},
'canary'
]
DATA_WITH_DEEP_LISTS = [
{
'foo': 'foo',
'list': [{'list2': [{'number': 1}, 'canary']}, {'list2': [{'number': 2}, 'canary']}],
},
{
'foo': 'Foo',
'list': [{'list2': [{'number': 3}, 'canary']}, {'list2': [{'number': 2}, 'canary']}],
}
]
DATA_SELECT_COMPLEX = [
{
'foo': 'foo',
'number': 1,
'foobar': {'stuff': {'more_stuff': 1}},
'foo.bar': 42,
'list': [1],
},
{
'foo': 'Foo',
'number': 2,
'foobar': {'stuff': {'more_stuff': 2}},
'foo.bar': 43,
'list': [2],
},
{
'foo': 'foO_',
'number': 3,
'foobar': {'stuff': {'more_stuff': 2}},
'foo.bar': 44,
'list': [3],
},
{
'foo': 'bar',
'number': 4,
'foobar': {'stuff': {'more_stuff': 4}},
'foo.bar': 45,
'list': [4],
},
]
COMPLEX_DATA = [
{
"timestamp": "2022-11-10T07:40:17.397502-0800",
"type": "Authentication",
"Authentication": {
"version": {
"major": 1,
"minor": 2
},
"eventId": 4625,
"logonId": "0",
"logonType": 3,
"status": "NT_STATUS_NO_SUCH_USER",
"localAddress": "ipv4:192.168.0.200:445",
"remoteAddress": "ipv4:192.168.0.151:50559",
"serviceDescription": "SMB2",
"authDescription": None,
"clientDomain": "MicrosoftAccount",
"clientAccount": "awalker325@outlook.com",
"workstation": "WALKSURF",
"becameAccount": None,
"becameDomain": None,
"becameSid": None,
"mappedAccount": "awalker325@outlook.com",
"mappedDomain": "MicrosoftAccount",
"netlogonComputer": None,
"netlogonTrustAccount": None,
"netlogonNegotiateFlags": "0x00000000",
"netlogonSecureChannelType": 0,
"netlogonTrustAccountSid": None,
"passwordType": "NTLMv2",
"duration": 6298
},
"timestamp_tval": {
"tv_sec": 1668094817,
"tv_usec": 397502
}
},
{
"timestamp": "2023-01-24T12:37:39.522594-0800",
"type": "Authentication",
"Authentication": {
"version": {
"major": 1,
"minor": 2
},
"eventId": 4624,
"logonId": "c1b1a262c42babb6",
"logonType": 8,
"status": "NT_STATUS_OK",
"localAddress": "unix:",
"remoteAddress": "unix:",
"serviceDescription": "winbind",
"authDescription": "PAM_AUTH, PAM_WINBIND[sshd], 133191",
"clientDomain": "BILLY",
"clientAccount": "joiner",
"workstation": None,
"becameAccount": "joiner",
"becameDomain": "BILLY",
"becameSid": "S-1-5-21-1002530428-2020721000-3540273080-1103",
"mappedAccount": None,
"mappedDomain": None,
"netlogonComputer": None,
"netlogonTrustAccount": None,
"netlogonNegotiateFlags": "0x00000000",
"netlogonSecureChannelType": 0,
"netlogonTrustAccountSid": None,
"passwordType": "Plaintext",
"duration": 23554
},
"timestamp_tval": {
"tv_sec": 1674592659,
"tv_usec": 522594
}
}
]
SAMPLE_AUDIT = [
{
'audit_id': 'd89cd1ba-3982-4407-98fb-febd829ca0d4',
'timestamp': datetime.datetime(2023, 12, 18, 16, 10, 30, tzinfo=datetime.timezone.utc),
'service': 'MIDDLEWARE',
'event': 'AUTHENTICATION'
},
{
'audit_id': 'd53dcd53-2955-453b-9e94-759031e3d1c7',
'timestamp': datetime.datetime(2023, 12, 18, 16, 10, 33, tzinfo=datetime.timezone.utc),
'service': 'MIDDLEWARE',
'event': 'AUTHENTICATION'
},
{
'audit_id': '3f63e567-b302-4d18-803f-fc4dcbd27832',
'timestamp': datetime.datetime(2023, 12, 18, 16, 15, 35, tzinfo=datetime.timezone.utc),
'service': 'MIDDLEWARE', 'event': 'METHOD_CALL'
},
{
'audit_id': 'e617db35-ee56-40f9-876a-f97edb1f8a26',
'timestamp': datetime.datetime(2023, 12, 18, 16, 15, 55, tzinfo=datetime.timezone.utc),
'service': 'MIDDLEWARE',
'event': 'METHOD_CALL'
},
{
'audit_id': '7fd9f725-afad-4a62-91e4-1adbdaf3928b',
'timestamp': datetime.datetime(2023, 12, 18, 16, 21, 25, tzinfo=datetime.timezone.utc),
'service': 'MIDDLEWARE',
'event': 'AUTHENTICATION'
}
]
def test__filter_list_equal():
assert len(filter_list(DATA, [['foo', '=', 'foo1']])) == 1
def test__filter_list_starts():
assert len(filter_list(DATA, [['foo', '^', 'foo']])) == 2
def test__filter_list_ends():
assert len(filter_list(DATA, [['foo', '$', '_']])) == 1
def test__filter_list_regex_begins():
assert len(filter_list(DATA, [['foo', '~', '^foo']])) == 2
def test__filter_list_regex_contains():
assert len(filter_list(DATA, [['foo', '~', '.*foo.*']])) == 3
def test__filter_list_gt():
assert len(filter_list(DATA, [['number', '>', 1]])) == 2
def test__filter_list_gte():
assert len(filter_list(DATA, [['number', '>=', 1]])) == 3
def test__filter_list_lt():
assert len(filter_list(DATA, [['number', '<', 3]])) == 2
def test__filter_list_lte():
assert len(filter_list(DATA, [['number', '<=', 3]])) == 3
def test__filter_list_in():
assert len(filter_list(DATA, [['number', 'in', [1, 3]]])) == 2
def test__filter_list_nin():
assert len(filter_list(DATA, [['number', 'nin', [1, 3]]])) == 1
def test__filter_list_rin():
assert len(filter_list(DATA, [['list', 'rin', 1]])) == 1
def test__filter_list_rnin():
assert len(filter_list(DATA, [['list', 'rnin', 1]])) == 2
def test__filter_list_OR_eq1():
assert len(filter_list(DATA, [['OR', [
['number', '=', 1],
['number', '=', 200],
]]])) == 1
def test__filter_list_OR_eq2():
assert len(filter_list(DATA, [['OR', [
['number', '=', 1],
['number', '=', 2],
]]])) == 2
def test__filter_list_OR_eq3():
assert len(filter_list(DATA, [['OR', [
[['number', '=', 1], ['foo', '=', 'foo1']],
['number', '=', 2],
]]])) == 2
assert len(filter_list(DATA, [['OR', [
[['number', '=', 1], ['foo', '=', 'foo2']],
['number', '=', 2],
]]])) == 1
def test__filter_list_OR_nesting():
assert len(filter_list(DATA, [['OR', [
['OR', [['number', '=', 1], ['foo', '=', 'canary']]],
['number', '=', 2],
]]])) == 2
assert len(filter_list(DATA, [['OR', [
['OR', [['number', '=', 'canary'], ['foo', '=', 'canary']]],
['number', '=', 2],
]]])) == 1
assert len(filter_list(DATA, [['OR', [
['OR', [
['OR', [
['number', '=', 1],
['number', '=', 'canary'],
]],
['foo', '=', 'canary']
]],
['number', '=', 2],
]]])) == 2
with pytest.raises(ValueError) as ve:
filter_list(DATA, [['OR', [
['OR', [
['OR', [
['OR', [
['number', '=', 1],
['number', '=', 'canary'],
]],
['number', '=', 1],
['number', '=', 'canary'],
]],
['foo', '=', 'canary']
]],
['number', '=', 2],
]]])
assert 'query-filters max recursion depth exceeded' in str(ve)
def test__filter_list_nested_dict():
assert len(filter_list(COMPLEX_DATA, [['Authentication.status', '=', 'NT_STATUS_OK']])) == 1
def test__filter_list_option_get():
assert isinstance(filter_list(DATA, [], {'get': True}), dict)
def test__filter_list_option_get_and_order_by():
assert filter_list(DATA, [], {'get': True, 'order_by': ['-number']})['foo'] == '_foo_'
def test__filter_list_option_order_by():
for idx, entry in enumerate(filter_list(DATA, [], {'order_by': ['number']})):
assert entry['number'] == idx + 1
def test__filter_list_option_order_by_reverse():
for idx, entry in enumerate(filter_list(DATA, [], {'order_by': ['-number']})):
assert entry['number'] == 3 - idx
def test__filter_list_option_select():
for entry in filter_list(DATA, [], {'select': ['foo']}):
assert list(entry.keys()) == ['foo']
def test__filter_list_option_nulls_first():
assert filter_list(DATA_WITH_NULL, [], {'order_by': ['nulls_first:foo']})[0]['foo'] is None
def test__filter_list_option_nulls_last():
assert filter_list(DATA_WITH_NULL, [], {'order_by': ['nulls_last:foo']})[-1].get('foo') is None
def test__filter_list_option_casefold_equals():
assert len(filter_list(DATA, [['foo', 'C=', 'Foo1']])) == 1
def test__filter_list_option_casefold_starts():
assert len(filter_list(DATA_WITH_CASE, [['foo', 'C^', 'F']])) == 3
def test__filter_list_option_casefold_does_not_start():
assert len(filter_list(DATA_WITH_CASE, [['foo', 'C!^', 'F']])) == 1
def test__filter_list_option_casefold_ends():
assert len(filter_list(DATA_WITH_CASE, [['foo', 'C$', 'foo']])) == 2
def test__filter_list_option_casefold_does_not_end():
assert len(filter_list(DATA_WITH_CASE, [['foo', 'C!$', 'O']])) == 2
def test__filter_list_option_casefold_in():
assert len(filter_list(DATA_WITH_CASE, [['foo', 'Cin', 'foo']])) == 2
def test__filter_list_option_casefold_rin():
assert len(filter_list(DATA_WITH_CASE, [['foo', 'Crin', 'foo']])) == 3
def test__filter_list_option_casefold_nin():
assert len(filter_list(DATA_WITH_CASE, [['foo', 'Cnin', 'foo']])) == 2
def test__filter_list_option_casefold_rnin():
assert len(filter_list(DATA_WITH_CASE, [['foo', 'Crnin', 'foo']])) == 1
def test__filter_list_option_casefold_complex_data():
assert len(filter_list(COMPLEX_DATA, [['Authentication.clientAccount', 'C=', 'JOINER']])) == 1
def test__filter_list_nested_select():
data = filter_list(
DATA_SELECT_COMPLEX,
[['foobar.stuff.more_stuff', '=', 4]],
{'select': ['foobar.stuff.more_stuff']}
)
assert len(data) == 1
entry = data[0]
assert 'foobar' in entry
assert 'stuff' in entry['foobar']
assert 'more_stuff' in entry['foobar']['stuff']
assert entry['foobar']['stuff']['more_stuff'] == 4
def test__filter_list_nested_select_escape():
data = filter_list(DATA_SELECT_COMPLEX, [['foobar.stuff.more_stuff', '=', 4]], {'select': ['foo\\.bar']})
assert len(data) == 1
entry = data[0]
assert 'foo.bar' in entry
assert entry['foo.bar'] == 45
def test__filter_list_complex_data_nested_select():
data = filter_list(
COMPLEX_DATA,
[],
{'select': ['Authentication.status', 'Authentication.localAddress', 'Authentication.clientAccount']}
)
assert len(data) != 0
assert 'Authentication' in data[0]
auth = data[0]['Authentication']
assert len(auth.keys()) == 3
assert 'status' in auth
assert 'localAddress' in auth
assert 'clientAccount' in auth
def test__filter_list_select_as():
data = filter_list(
DATA_SELECT_COMPLEX,
[['foobar.stuff.more_stuff', '=', 4]],
{'select': [['foobar.stuff.more_stuff', 'data']]}
)
assert len(data) == 1
entry = data[0]
assert len(entry.keys()) == 1
assert 'data' in entry
assert entry['data'] == 4
def test__filter_list_select_null():
data = filter_list(DATA_WITH_NULL, [['number', '=', 4]], {'select': ['foo'], 'get': True})
assert len(data) == 1
assert 'foo' in data
assert data['foo'] is None
def test__filter_list_select_as_validation():
with pytest.raises(ValueError) as ve:
# too few items in the select list
filter_list(DATA_SELECT_COMPLEX, [], {'select': [['foobar.stuff.more_stuff']]})
assert 'select as list may only contain two parameters' in str(ve)
with pytest.raises(ValueError) as ve:
# too many items in the select list
filter_list(DATA_SELECT_COMPLEX, [], {'select': [['foobar.stuff.more_stuff', 'cat', 'dog']]})
assert 'select as list may only contain two parameters' in str(ve)
with pytest.raises(ValueError) as ve:
# wrong type in select
filter_list(DATA_SELECT_COMPLEX, [], {'select': [[1, 'cat']]})
assert 'first item must be a string' in str(ve)
def test__filter_list_timestamp_invalid_string():
with pytest.raises(ValueError) as ve:
filter_list([], [["timestamp.$date", "=", "Canary"]])
assert 'must be an ISO-8601 formatted timestamp string' in str(ve)
def test__filter_list_timestamp_invalid_type():
with pytest.raises(ValueError) as ve:
filter_list([], [["timestamp.$date", "=", 1]])
assert 'must be an ISO-8601 formatted timestamp string' in str(ve)
def test__filter_list_timestamp_invalid_operator():
with pytest.raises(ValueError) as ve:
filter_list([], [["timestamp.$date", "^", '2023-12-18T16:15:35+00:00']])
assert 'invalid timestamp operation.' in str(ve)
def test__filter_list_timestamp():
# A few basic comparison operators to smoke-check
assert len(filter_list(SAMPLE_AUDIT, [['timestamp.$date', '>', '2023-12-18T16:15:35+00:00']])) == 2
assert len(filter_list(SAMPLE_AUDIT, [['timestamp.$date', '>=', '2023-12-18T16:15:35+00:00']])) == 3
# Check that zulu abbreviation is evaluated properly
assert len(filter_list(SAMPLE_AUDIT, [['timestamp.$date', '<', '2023-12-18T16:15:35Z']])) == 2
def test__filter_list_nested_object_in_list():
assert len(filter_list(DATA_WITH_LISTODICTS, [['list.*.number', '=', 3]])) == 2
def test__filter_list_inconsistent_nested_object_in_list():
assert len(filter_list(DATA_WITH_LISTODICTS_INCONSISTENT, [['list.*.number', '=', 3]])) == 2
def test__filter_list_deeply_nested_lists():
assert len(filter_list(DATA_WITH_DEEP_LISTS, [['list.*.list2.*.number', '=', 2]])) == 2
def test__filter_list_undefined():
assert len(filter_list(DATA_WITH_NULL, [['foo', '=', None]])) == 1
def test__filter_list_invalid_key():
assert len(filter_list(DATA_WITH_NULL, [['canary', 'in', 'canary2']])) == 0
| 16,323 | Python | .py | 474 | 27.016878 | 109 | 0.526319 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,364 | test_cpu_util_amd.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/utils/test_cpu_util_amd.py | # -*- coding=utf-8 -*-
from unittest.mock import Mock, patch
import pytest
from middlewared.utils.cpu import amd_cpu_temperatures
@pytest.mark.parametrize("model,core_count,reading,result", [
# k10temp has no temperature offset constant for this CPU, Tdie will be equal to Tctl, it's better to use Tccd1
("AMD Ryzen 5 3600 6-Core Processor", 6, {
"k10temp-pci-00c3_temp1": {
"name": "Tctl",
"value": 48.625
},
"k10temp-pci-00c3_temp3": {
"name": "Tdie",
"value": 48.625
},
"k10temp-pci-00c3_temp4": {
"name": "Tccd1",
"value": 54.750
},
}, dict(enumerate([54.750] * 6))),
# k10temp has temperature offset constant for this CPU so we should use Tdie
# https://jira.ixsystems.com/browse/NAS-110515
("AMD Ryzen Threadripper 1950X 16-Core Processor", 16, {
"k10temp-pci-00c3_temp1": {
"name": "Tctl",
"value": 67.0
},
"k10temp-pci-00c3_temp3": {
"name": "Tdie",
"value": 40.0
},
"k10temp-pci-00c3_temp4": {
"name": "Tccd1",
"value": 65.5
},
}, dict(enumerate([40] * 16))),
("AMD Opteron APU 1-Core Processor", 1, {
"k10temp-pci-00c3_temp1": {
"name": "temp1",
"value": 48.23
}
}, dict(enumerate([48.23] * 1))),
("AMD Opteron APU Processor", 1, {
"k10temp-pci-00c3_temp1": {
"name": "temp1",
"value": {"temp1_input": 48.23}
},
}, dict(enumerate([48.23] * 1))),
])
def test_amd_cpu_temperatures(model, core_count, reading, result):
with patch(
"middlewared.utils.cpu.cpu_info", Mock(
return_value={"cpu_model": model, "physical_core_count": core_count}
)
):
assert amd_cpu_temperatures(reading) == result
| 1,914 | Python | .py | 56 | 25.785714 | 115 | 0.543197 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,365 | test_secret.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/handler/accept/test_secret.py | from pydantic import Secret
from middlewared.api.base import BaseModel
from middlewared.api.base.handler.accept import accept_params
def test_private_str():
class MethodArgs(BaseModel):
password: Secret[str]
assert accept_params(MethodArgs, ["pass"]) == ["pass"]
| 283 | Python | .py | 7 | 36.571429 | 61 | 0.764706 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,366 | test_default.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/handler/accept/test_default.py | from pydantic import Field
from middlewared.api.base import BaseModel
from middlewared.api.base.handler.accept import accept_params
def test_default_dict():
class Options(BaseModel):
force: bool = False
class MethodArgs(BaseModel):
id: int
options: Options = Field(default=Options())
assert accept_params(MethodArgs, [1, {"force": True}]) == [1, {"force": True}]
assert accept_params(MethodArgs, [1]) == [1, {"force": False}]
| 471 | Python | .py | 11 | 37.727273 | 82 | 0.694505 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,367 | test_accept_1.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/handler/accept/test_accept_1.py | from annotated_types import Gt
import pytest
from typing_extensions import Annotated
from middlewared.api.base import BaseModel
from middlewared.api.base.handler.accept import accept_params
from middlewared.service_exception import CallError, ValidationErrors
class MethodArgs(BaseModel):
param: "Param"
force: bool = False
class Param(BaseModel):
name: str
count: Annotated[int, Gt(0)] = 1
@pytest.mark.parametrize("params,result_or_error", [
([], {"param": "Field required"}),
([1, 2, 3], "Too many arguments (expected 2, found 3)"),
([{"name": "test"}], [{"name": "test", "count": 1}, False]),
([{"name": "test"}, True], [{"name": "test", "count": 1}, True]),
([{"name": "test"}, 1], {"force": "Input should be a valid boolean"}),
([{"name": "test", "count": 0}], {"param.count": "Input should be greater than 0"}),
([{"name": "test", "amount": 0}], {"param.amount": "Extra inputs are not permitted"}),
])
def test__accept_params(params, result_or_error):
if isinstance(result_or_error, list):
assert accept_params(MethodArgs, params) == result_or_error
elif isinstance(result_or_error, dict):
with pytest.raises(ValidationErrors) as ve:
accept_params(MethodArgs, params)
assert {e.attribute: e.errmsg for e in ve.value.errors} == result_or_error
elif isinstance(result_or_error, str):
with pytest.raises(CallError) as ve:
accept_params(Param, params)
assert ve.value.errmsg == result_or_error
| 1,526 | Python | .py | 32 | 42.65625 | 90 | 0.66532 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,368 | test_types.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/handler/result/test_types.py | from middlewared.api.base import BaseModel, single_argument_result
from middlewared.api.base.handler.result import serialize_result
def test_preserves_types():
@single_argument_result
class MethodResult(BaseModel):
data: dict
value = {
"data": {
"id": {1, 2, 3},
},
}
assert serialize_result(MethodResult, value, False) == value
| 389 | Python | .py | 12 | 26.416667 | 66 | 0.670241 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,369 | test_secret.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/handler/result/test_secret.py | from pydantic import Secret
import pytest
from middlewared.api.base import BaseModel, ForUpdateMetaclass, single_argument_args, single_argument_result
from middlewared.api.base.handler.accept import accept_params
from middlewared.api.base.handler.dump_params import dump_params, remove_secrets
from middlewared.api.base.handler.result import serialize_result
@pytest.mark.parametrize("expose_secrets,result", [
(True, {"name": "ivan", "password": "pass"}),
(False, {"name": "ivan", "password": "********"}),
])
def test_private_str(expose_secrets, result):
@single_argument_result
class MethodResult(BaseModel):
name: str
password: Secret[str]
assert serialize_result(MethodResult, {"name": "ivan", "password": "pass"}, expose_secrets) == result
@pytest.mark.parametrize("args", [[{}], [{"password": "xxx"}]])
def test_private_update(args):
@single_argument_args("data")
class UpdateArgs(BaseModel, metaclass=ForUpdateMetaclass):
password: Secret[str]
assert accept_params(UpdateArgs, args) == args
@pytest.mark.parametrize("args,result", [
({"username": "ivan", "password": "xxx"}, {"username": "ivan", "password": "********"}),
({"username": 1, "password": "xxx"}, {"username": 1, "password": "********"}),
({"password": "xxx"}, {"password": "********"}),
])
def test_private_without_validation(args, result):
@single_argument_args("data")
class CreateArgs(BaseModel):
username: str
password: Secret[str]
assert dump_params(CreateArgs, [args], False) == [result]
def test_remove_secrets_nested():
class UserModel(BaseModel):
username: str
password: Secret[str]
class SystemModel(BaseModel):
users: list[UserModel]
assert remove_secrets(SystemModel, {
"users": [
{"username": "ivan", "password": "xxx"},
{"username": "oleg", "password": "xxx"},
],
}) == {
"users": [
{"username": "ivan", "password": "********"},
{"username": "oleg", "password": "********"},
],
}
def test_private_union():
with pytest.raises(TypeError) as ve:
class UserModel(BaseModel):
username: str
password: Secret[str] | None
assert ve.value.args[0] == ("Model UserModel has field password defined as Optional[pydantic.types.Secret[str]]. "
"pydantic.types.Secret[str] cannot be a member of an Optional or a Union, please make "
"the whole field Private.")
| 2,571 | Python | .py | 58 | 37.448276 | 119 | 0.626603 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,370 | test_adapt.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/handler/version/test_adapt.py | from pydantic import EmailStr
import pytest
from middlewared.api.base import BaseModel
from middlewared.api.base.handler.version import APIVersion, APIVersionsAdapter
class SettingsV1(BaseModel):
email: EmailStr | None = None
class SettingsV2(BaseModel):
emails: list[EmailStr]
@classmethod
def from_previous(cls, value):
email = value.pop("email")
if email is None:
value["emails"] = []
else:
value["emails"] = [email]
return value
@classmethod
def to_previous(cls, value):
emails = value.pop("emails")
if emails:
value["email"] = emails[0]
else:
value["email"] = None
return value
class SettingsV3(BaseModel):
contacts: list[dict]
@classmethod
def from_previous(cls, value):
emails = value.pop("emails")
value["contacts"] = [{"name": email.split("@")[0].title(), "email": email}
for email in emails]
return value
@classmethod
def to_previous(cls, value):
contacts = value.pop("contacts")
value["emails"] = [contact["email"] for contact in contacts]
return value
@pytest.mark.parametrize("version1,value,version2,result", [
("v1", {"email": "alice@ixsystems.com"}, "v3", {"contacts": [{"name": "Alice", "email": "alice@ixsystems.com"}]}),
("v3", {"contacts": [{"name": "Alice", "email": "alice@ixsystems.com"}]}, "v1", {"email": "alice@ixsystems.com"}),
])
def test_adapt(version1, value, version2, result):
adapter = APIVersionsAdapter([
APIVersion("v1", {"Settings": SettingsV1}),
APIVersion("v2", {"Settings": SettingsV2}),
APIVersion("v3", {"Settings": SettingsV3}),
])
assert adapter.adapt(value, "Settings", version1, version2) == result
| 1,843 | Python | .py | 48 | 31.1875 | 118 | 0.622322 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,371 | test_adapt_nested_model.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/handler/version/test_adapt_nested_model.py | from pydantic import EmailStr
import pytest
from middlewared.api.base import BaseModel
from middlewared.api.base.handler.version import APIVersion, APIVersionsAdapter
class Settings(BaseModel):
email: EmailStr | None = None
class UpdateSettingsArgsV1(BaseModel):
settings: Settings
class Settings(BaseModel):
emails: list[EmailStr]
@classmethod
def from_previous(cls, value):
email = value.pop("email")
if email is None:
value["emails"] = []
else:
value["emails"] = [email]
return value
@classmethod
def to_previous(cls, value):
emails = value.pop("emails")
if emails:
value["email"] = emails[0]
else:
value["email"] = None
return value
class UpdateSettingsArgsV2(BaseModel):
settings: Settings
class Settings(BaseModel):
contacts: list[dict]
@classmethod
def from_previous(cls, value):
emails = value.pop("emails")
value["contacts"] = [{"name": email.split("@")[0].title(), "email": email}
for email in emails]
return value
@classmethod
def to_previous(cls, value):
contacts = value.pop("contacts")
value["emails"] = [contact["email"] for contact in contacts]
return value
class UpdateSettingsArgsV3(BaseModel):
settings: Settings
@pytest.mark.parametrize("version1,value,version2,result", [
("v1", {"settings": {"email": "alice@ixsystems.com"}},
"v3", {"settings": {"contacts": [{"name": "Alice", "email": "alice@ixsystems.com"}]}}),
("v3", {"settings": {"contacts": [{"name": "Alice", "email": "alice@ixsystems.com"}]}},
"v1", {"settings": {"email": "alice@ixsystems.com"}}),
])
def test_adapt(version1, value, version2, result):
adapter = APIVersionsAdapter([
APIVersion("v1", {"UpdateSettingsArgs": UpdateSettingsArgsV1}),
APIVersion("v2", {"UpdateSettingsArgs": UpdateSettingsArgsV2}),
APIVersion("v3", {"UpdateSettingsArgs": UpdateSettingsArgsV3}),
])
assert adapter.adapt(value, "UpdateSettingsArgs", version1, version2) == result
| 2,166 | Python | .py | 56 | 31.839286 | 92 | 0.648415 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,372 | test_adapt_shortcuts.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/handler/version/test_adapt_shortcuts.py | import pytest
from middlewared.api.base import BaseModel, ForUpdateMetaclass, single_argument_args, single_argument_result
from middlewared.api.base.handler.version import APIVersion, APIVersionsAdapter
class ModelV1(BaseModel, metaclass=ForUpdateMetaclass):
number: int = 1
class ModelV2(BaseModel, metaclass=ForUpdateMetaclass):
number: int = 1
text: str = "1"
def test_adapt_for_update_metaclass():
adapter = APIVersionsAdapter([
APIVersion("v1", {"Model": ModelV1}),
APIVersion("v2", {"Model": ModelV2}),
])
assert adapter.adapt({}, "Model", "v1", "v2") == {}
class ArgsV1(BaseModel):
count: int
force: bool = False
@single_argument_args("options")
class ArgsV2(BaseModel):
count: int
exclude: list[str] = []
force: bool = False
def from_previous(cls, value):
return {
"options": value,
}
@pytest.mark.parametrize("version1,value,version2,result", [
("v1", {"count": 1}, "v2", {"options": {"count": 1, "force": False}}),
])
def test_adapt_single_argument_args(version1, value, version2, result):
adapter = APIVersionsAdapter([
APIVersion("v1", {"Args": ArgsV1}),
APIVersion("v2", {"Args": ArgsV2}),
])
assert adapter.adapt(value, "Args", version1, version2) == result
class ResultV1(BaseModel):
result: int
@single_argument_result
class ResultV2(BaseModel):
value: int
status: str
def from_previous(cls, value):
return {
"result": {
"value": value["result"],
"status": "OK",
},
}
@pytest.mark.parametrize("version1,value,version2,result", [
("v1", {"result": 1}, "v2", {"result": {"value": 1, "status": "OK"}}),
])
def test_adapt_single_argument_result(version1, value, version2, result):
adapter = APIVersionsAdapter([
APIVersion("v1", {"Result": ResultV1}),
APIVersion("v2", {"Result": ResultV2}),
])
assert adapter.adapt(value, "Result", version1, version2) == result
| 2,040 | Python | .py | 57 | 30.140351 | 108 | 0.644241 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,373 | test_adapt_default.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/handler/version/test_adapt_default.py | import pytest
from middlewared.api.base import BaseModel
from middlewared.api.base.handler.version import APIVersion, APIVersionsAdapter
class SettingsV1(BaseModel):
text1: str
class SettingsV2(BaseModel):
text1: str
text2: str = "text2"
@pytest.mark.parametrize("version1,value,version2,result", [
("v1", {"text1": "text1"}, "v2", {"text1": "text1", "text2": "text2"}),
("v2", {"text1": "text1", "text2": "text2"}, "v1", {"text1": "text1"}),
])
def test_adapt(version1, value, version2, result):
adapter = APIVersionsAdapter([
APIVersion("v1", {"Settings": SettingsV1}),
APIVersion("v2", {"Settings": SettingsV2}),
])
assert adapter.adapt(value, "Settings", version1, version2) == result
| 744 | Python | .py | 18 | 37.277778 | 79 | 0.680111 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,374 | test_adapt_validation.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/handler/version/test_adapt_validation.py | from unittest.mock import ANY
from pydantic import EmailStr
import pytest
from middlewared.api.base import BaseModel
from middlewared.api.base.handler.version import APIVersion, APIVersionsAdapter
from middlewared.service_exception import ValidationErrors, ValidationError
class SettingsV1(BaseModel):
email: EmailStr | None = None
class SettingsV2(BaseModel):
emails: list[EmailStr]
@classmethod
def from_previous(cls, value):
email = value.pop("email")
if email is None:
value["emails"] = []
else:
value["emails"] = [email]
return value
@classmethod
def to_previous(cls, value):
emails = value.pop("emails")
if emails:
value["email"] = emails[0]
else:
value["email"] = None
return value
def test_adapt_validation():
adapter = APIVersionsAdapter([
APIVersion("v1", {"Settings": SettingsV1}),
APIVersion("v2", {"Settings": SettingsV2}),
])
with pytest.raises(ValidationErrors) as ve:
assert adapter.adapt({"email": ""}, "Settings", "v1", "v2")
assert ve.value.errors == [ValidationError("email", ANY)]
def test_adapt_default():
adapter = APIVersionsAdapter([
APIVersion("v1", {"Settings": SettingsV1}),
APIVersion("v2", {"Settings": SettingsV2}),
])
assert adapter.adapt({}, "Settings", "v1", "v2") == {"emails": []}
| 1,438 | Python | .py | 40 | 29.425 | 79 | 0.650253 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,375 | test_adapt_nested_model_list.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/handler/version/test_adapt_nested_model_list.py | import pytest
from middlewared.api.base import BaseModel
from middlewared.api.base.handler.version import APIVersion, APIVersionsAdapter
class Contact(BaseModel):
name: str
email: str
class SettingsV1(BaseModel):
contacts: list[Contact]
class Contact(BaseModel):
first_name: str
last_name: str
email: str
@classmethod
def from_previous(cls, value):
if " " in value["name"]:
value["first_name"], value["last_name"] = value.pop("name").split(" ", 1)
else:
value["first_name"] = value.pop("name")
value["last_name"] = ""
return value
@classmethod
def to_previous(cls, value):
value["name"] = f"{value.pop('first_name')} {value.pop('last_name')}"
return value
class SettingsV2(BaseModel):
contacts: list[Contact]
@pytest.mark.parametrize("version1,value,version2,result", [
("v1", {"contacts": [{"name": "Jane Doe", "email": "jane@ixsystems.com"}]},
"v2", {"contacts": [{"first_name": "Jane", "last_name": "Doe", "email": "jane@ixsystems.com"}]}),
("v2", {"contacts": [{"first_name": "Jane", "last_name": "Doe", "email": "jane@ixsystems.com"}]},
"v1", {"contacts": [{"name": "Jane Doe", "email": "jane@ixsystems.com"}]}),
])
def test_adapt(version1, value, version2, result):
adapter = APIVersionsAdapter([
APIVersion("v1", {"Settings": SettingsV1}),
APIVersion("v2", {"Settings": SettingsV2}),
])
assert adapter.adapt(value, "Settings", version1, version2) == result
| 1,546 | Python | .py | 38 | 34.894737 | 102 | 0.632708 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,376 | test_model.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/base/test_model.py | import pytest
from middlewared.api.base import (BaseModel, Excluded, excluded_field, ForUpdateMetaclass, single_argument_args,
single_argument_result)
from middlewared.api.base.handler.accept import accept_params
from middlewared.api.base.handler.result import serialize_result
from middlewared.service_exception import ValidationErrors
class Object(BaseModel):
id: int
name: str
count: int = 0
class CreateObject(Object):
id: Excluded = excluded_field()
class UpdateObject(CreateObject, metaclass=ForUpdateMetaclass):
pass
class UpdateArgs(BaseModel):
id: int
data: UpdateObject
@pytest.mark.parametrize("data", [
{},
{"name": "Ivan"},
{"count": 0},
{"count": 1},
])
def test_for_update(data):
assert accept_params(UpdateArgs, [1, data]) == [1, data]
def test_single_argument_args():
@single_argument_args("param")
class MethodArgs(BaseModel):
name: str
count: int = 1
assert accept_params(MethodArgs, [{"name": "ivan"}]) == [{"name": "ivan", "count": 1}]
def test_single_argument_args_error():
@single_argument_args("param")
class MethodArgs(BaseModel):
name: str
count: int = 1
with pytest.raises(ValidationErrors) as ve:
accept_params(MethodArgs, [{"name": 1}])
assert ve.value.errors[0].attribute == "param.name"
def test_single_argument_result():
@single_argument_result
class MethodResult(BaseModel):
name: str
count: int
assert serialize_result(MethodResult, {"name": "ivan", "count": 1}, True) == {"name": "ivan", "count": 1}
| 1,634 | Python | .py | 45 | 30.888889 | 112 | 0.681122 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,377 | test_excluded.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/base/test_excluded.py | import pytest
from middlewared.api.base import BaseModel, Excluded, excluded_field
from middlewared.api.base.handler.accept import accept_params
from middlewared.service_exception import ValidationErrors
class Object(BaseModel):
id: int
name: str
class CreateObject(Object):
id: Excluded = excluded_field()
class CreateArgs(BaseModel):
data: CreateObject
def test_excluded_field():
with pytest.raises(ValidationErrors) as ve:
accept_params(CreateObject, [{"id": 1, "name": "Ivan"}])
assert ve.value.errors[0].attribute == "id"
assert ve.value.errors[0].errmsg == "Extra inputs are not permitted"
| 642 | Python | .py | 16 | 36.25 | 72 | 0.75974 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,378 | test_base.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/base/types/test_base.py | import pytest
from middlewared.api.base import BaseModel, LongString
from middlewared.api.base.handler.accept import accept_params
from middlewared.service_exception import ValidationErrors
@pytest.mark.parametrize("type,value,error", [
(str, "0" * 2000, "String should have at most 1024 characters")
])
def test_base_types(type, value, error):
class Model(BaseModel):
param: type
with pytest.raises(ValidationErrors) as ve:
assert accept_params(Model, [value])
assert ve.value.errors[0].errmsg == error
class LongStringMethodArgs(BaseModel):
str: LongString
dict: "LongStringDict"
class LongStringDict(BaseModel):
str: LongString
list: list[LongString]
def test_long_string():
data = ["test1" * 1000, {"str": "test2" * 1000, "list": ["test3" * 1000, "test4" * 1000]}]
assert accept_params(LongStringMethodArgs, data) == data
| 893 | Python | .py | 22 | 36.545455 | 94 | 0.733721 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,379 | test_legacy_api_method.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/api/base/server/test_legacy_api_method.py | from unittest.mock import Mock
from middlewared.api.base import BaseModel
from middlewared.api.base.decorator import api_method
from middlewared.api.base.handler.version import APIVersion, APIVersionsAdapter
from middlewared.api.base.server.legacy_api_method import LegacyAPIMethod
class MethodArgs(BaseModel):
number: int
multiplier: int = 2
class MethodResult(BaseModel):
result: str
MethodArgsV1 = MethodArgs
MethodResultV1 = MethodResult
class MethodArgs(BaseModel):
number: int
text: str = "Default"
multiplier: int = 2
class MethodResult(BaseModel):
result: int
@classmethod
def to_previous(cls, value):
value["result"] = str(value["result"])
return value
@api_method(MethodArgs, MethodResult)
def method(number, text, multiplier):
return {
"number": number * multiplier,
"text": text * multiplier,
}
adapter = APIVersionsAdapter([
APIVersion("v1", {"MethodArgs": MethodArgsV1, "MethodResult": MethodResultV1}),
APIVersion("v2", {"MethodArgs": MethodArgs, "MethodResult": MethodResult}),
])
legacy_api_method = LegacyAPIMethod(
Mock(
get_method=Mock(return_value=(Mock(), method))
),
"core.test",
"v1",
adapter,
)
def test_adapt_params():
assert legacy_api_method._adapt_params([1]) == [1, "Default", 2]
def test_adapt_result():
assert legacy_api_method._adapt_result(1) == "1"
| 1,425 | Python | .py | 44 | 28.181818 | 83 | 0.721324 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,380 | test_cloud_sync.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_cloud_sync.py | # flake8: noqa
import io
import textwrap
from unittest.mock import Mock
import pytest
from middlewared.plugins.cloud_sync import (
get_dataset_recursive, FsLockManager, lsjson_error_excerpt, RcloneVerboseLogCutter
)
def test__get_dataset_recursive_1():
dataset, recursive = get_dataset_recursive(
[
{
"children": [
{
"children": [],
"properties": {
"mountpoint": {
"value": "/mnt/data/test",
}
}
}
],
"properties": {
"mountpoint": {
"value": "/mnt/data",
}
}
},
{
"children": [],
"properties": {
"mountpoint": {
"value": "/mnt/data/test",
}
}
}
],
"/mnt/data",
)
assert dataset["properties"]["mountpoint"]["value"] == "/mnt/data"
assert recursive is True
def test__get_dataset_recursive_2():
dataset, recursive = get_dataset_recursive(
[
{
"children": [
{
"children": [],
"properties": {
"mountpoint": {
"value": "/mnt/data/test",
}
}
}
],
"properties": {
"mountpoint": {
"value": "/mnt/data",
}
}
},
{
"children": [],
"properties": {
"mountpoint": {
"value": "/mnt/data/test",
}
}
}
],
"/mnt/data/test",
)
assert dataset["properties"]["mountpoint"]["value"] == "/mnt/data/test"
assert recursive is False
def test__get_dataset_recursive_3():
dataset, recursive = get_dataset_recursive(
[
{
"children": [
{
"children": [],
"properties": {
"mountpoint": {
"value": "/mnt/data/test",
}
}
}
],
"properties": {
"mountpoint": {
"value": "/mnt/data",
}
}
},
{
"children": [],
"properties": {
"mountpoint": {
"value": "/mnt/data/test",
}
}
}
],
"/mnt/data/test2",
)
assert dataset["properties"]["mountpoint"]["value"] == "/mnt/data"
assert recursive is False
def test__get_dataset_recursive_4():
dataset, recursive = get_dataset_recursive(
[
{
"children": [
{
"children": [
{
"children": [],
"properties": {
"mountpoint": {
"value": "/mnt/data/backup/test0/test1/test2",
}
}
}
],
"properties": {
"mountpoint": {
"value": "/mnt/data/backup",
}
}
}
],
"properties": {
"mountpoint": {
"value": "/mnt/data",
}
}
},
{
"children": [
{
"children": [],
"properties": {
"mountpoint": {
"value": "/mnt/data/backup/test0/test1/test2",
}
}
}
],
"properties": {
"mountpoint": {
"value": "/mnt/data/backup",
}
}
},
{
"children": [],
"properties": {
"mountpoint": {
"value": "/mnt/data/backup/test0/test1/test2",
}
}
}
],
"/mnt/data/backup/test0",
)
assert dataset["properties"]["mountpoint"]["value"] == "/mnt/data/backup"
assert recursive is True
def test__get_dataset_recursive_5():
dataset, recursive = get_dataset_recursive(
[
{
"children": [
{
"children": [
{
"children": [],
"properties": {
"mountpoint": {
"value": "/mnt/data/backup/test0/test1/test2",
}
}
}
],
"properties": {
"mountpoint": {
"value": "/mnt/data/backup",
}
}
}
],
"properties": {
"mountpoint": {
"value": "/mnt/data",
}
}
},
{
"children": [
{
"children": [],
"properties": {
"mountpoint": {
"value": "/mnt/data/backup/test0/test1/test2",
}
}
}
],
"properties": {
"mountpoint": {
"value": "/mnt/data/backup",
}
}
},
{
"children": [],
"properties": {
"mountpoint": {
"value": "/mnt/data/backup/test0/test1/test2",
}
}
}
],
"/mnt/data/backup/test0/test3",
)
assert dataset["properties"]["mountpoint"]["value"] == "/mnt/data/backup"
assert recursive is False
def lock_mock(*args, **kwargs):
m = Mock(*args, **kwargs)
m._reader_lock._lock = Mock()
m._writer_lock._lock = Mock()
return m
def test__fs_lock_manager_1():
flm = FsLockManager()
flm._lock = lock_mock
flm._choose_lock = lambda lock, direction: lock
lock = flm.lock("/mnt/tank/work", Mock())
assert flm.lock("/mnt/tank", Mock()) == lock
def test__fs_lock_manager_2():
flm = FsLockManager()
flm._lock = lock_mock
flm._choose_lock = lambda lock, direction: lock
lock = flm.lock("/mnt/tank/work", Mock())
assert flm.lock("/mnt/tank/work/temp", Mock()) == lock
def test__fs_lock_manager_3():
flm = FsLockManager()
flm._lock = lock_mock
flm._choose_lock = lambda lock, direction: lock
lock = flm.lock("/mnt/tank/work", Mock())
assert flm.lock("/mnt/tank/temp", Mock()) != lock
@pytest.mark.parametrize("error,excerpt", [
(
"2019/09/18 12:26:40 ERROR : : error listing: InvalidAccessKeyId: The AWS Access Key Id you provided does not "
"exist in our records.\n\tstatus code: 403, request id: 26089FA2BCBF0B60, host id: A6E42cyE7S+KyVKBJh5DRDu/Jv+F"
"rd6LvXL5A0fLQyMhCvidM7JHA2FY2mLkn4h1IkepFU7G/BE=\n2019/09/18 12:26:40 Failed to lsjson: error in ListJSON: "
"InvalidAccessKeyId: The AWS Access Key Id you provided does not exist in our records.\n\tstatus code: 403, "
"request id: 26089FA2BCBF0B60, host id: A6E42cyE7S+KyVKBJh5DRDu/Jv+Frd6LvXL5A0fLQyMhCvidM7JHA2FY2mLkn4h1IkepFU7"
"G/BE=\n",
"InvalidAccessKeyId: The AWS Access Key Id you provided does not exist in our records."
),
(
"2019/09/18 12:29:42 Failed to create file system for \"remote:\": Failed to parse credentials: illegal base64 "
"data at input byte 0\n",
"Failed to parse credentials: illegal base64 data at input byte 0"
)
])
def test__lsjson_error_excerpt(error, excerpt):
assert lsjson_error_excerpt(error) == excerpt
def INFO(v=None):
if v is None:
prefix = "<6>"
else:
prefix = f"2020/01/22 22:32:{v:02d} "
return textwrap.dedent(f"""\
{prefix}INFO :
Transferred: 752.465G / 27.610 TBytes, 3%, 7.945 MBytes/s, ETA 5w6d1h16m55s
Errors: 478 (retrying may help)
Checks: 89 / 89, 100%
Transferred: 75 / 3546, 2%
Elapsed time: 26h56m23.1s
Transferring:
* Cam (2018)/Cam (2018) WEBDL-1080p.mkv: 0% /3.470G, 0/s, -
* Call Me by Your Name (…2017) Bluray-1080p.mkv: 0% /9.839G, 0/s, -
* Can't Take It Back (20…(2017) WEBDL-1080p.mkv: 0% /3.035G, 0/s, -
* Candleshoe (1977)/Cand… (1977) WEBDL-720p.mkv: 0% /2.865G, 0/s, -
""")
@pytest.mark.parametrize("input,output", [
(f"WELCOME TO RCLONE\n{INFO(1)}{INFO(2)}BYE!\n", f"WELCOME TO RCLONE\n{INFO(1)}BYE!\n"),
(f"WELCOME TO RCLONE\n{INFO(1)}{INFO(2)}{INFO(3)}{INFO(4)}{INFO(5)}{INFO(6)}BYE!\n",
f"WELCOME TO RCLONE\n{INFO(1)}{INFO(6)}BYE!\n"),
(f"WELCOME TO RCLONE\n{INFO(1)}{INFO(2)[:300]}\nKilled (9)",
f"WELCOME TO RCLONE\n{INFO(1)}{INFO(2)[:300]}\nKilled (9)"),
(f"2020/01/27 13:16:15 INFO : S3 bucket ixsystems: Waiting for transfers to finish\n"
f"{INFO(1)}{INFO(2)}{INFO(3)}{INFO(4)}{INFO(5)}{INFO(6)}BYE!\n",
f"2020/01/27 13:16:15 INFO : S3 bucket ixsystems: Waiting for transfers to finish\n{INFO(1)}{INFO(6)}BYE!\n"),
(f"WELCOME TO RCLONE\n{INFO()}{INFO()}BYE!\n", f"WELCOME TO RCLONE\n{INFO()}BYE!\n"),
])
def test__RcloneVerboseLogCutter(input, output):
cutter = RcloneVerboseLogCutter(5)
f = io.StringIO(input)
out = ""
while True:
line = f.readline()
if not line:
break
result = cutter.notify(line)
if result:
out += result
result = cutter.flush()
if result:
out += result
assert out == output
| 10,995 | Python | .py | 310 | 20.23871 | 120 | 0.404628 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,381 | test_gpu_critical.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_gpu_critical.py | import textwrap
import pytest
from unittest.mock import patch, MagicMock
from middlewared.utils.gpu import get_gpus
DEVICE_DATA = {
'0000:17:00.0': {
'PCI_ID': '1AF4:1050',
'ID_VENDOR_FROM_DATABASE': 'NVIDIA Corporation',
'ID_PCI_SUBCLASS_FROM_DATABASE': 'VGA compatible controller',
'PCI_SLOT_NAME': '0000:17:00.0',
},
'0000:17:00.1': {
'PCI_ID': '1AF4:1050',
'ID_VENDOR_FROM_DATABASE': 'NVIDIA Corporation',
'ID_PCI_SUBCLASS_FROM_DATABASE': 'Audio device',
'PCI_SLOT_NAME': '0000:17:00.1',
},
'0000:00:1f.4': {
'PCI_ID': '1AF4:1050',
'ID_VENDOR_FROM_DATABASE': 'Intel Corporation',
'ID_PCI_SUBCLASS_FROM_DATABASE': 'SMBus',
'PCI_SLOT_NAME': '0000:00:1f.4',
},
}
@pytest.mark.parametrize(
'ls_pci,gpu_pci_id,child_ids,iommu_group,uses_system_critical_devices,critical_reason',
[
(
textwrap.dedent('''
0000:17:00.0 VGA compatible controller: NVIDIA Corporation TU117GL [T400 4GB] (rev a1)
0000:17:00.1 Audio device: NVIDIA Corporation Device 10fa (rev a1)
'''),
'0000:17:00.0',
['0000:17:00.1'],
{
'0000:17:00.1': {
'number': 9,
'addresses': [],
'critical': False
},
'0000:17:00.0': {
'number': 9,
'addresses': [],
'critical': False
},
},
False,
None
),
(
textwrap.dedent('''
0000:17:00.0 VGA compatible controller: NVIDIA Corporation TU117GL [T400 4GB] (rev a1)
0000:17:00.1 Audio device: NVIDIA Corporation Device 10fa (rev a1)
0000:00:1f.4 SMBus: Intel Corporation C620 Series Chipset Family SMBus (rev 09)
'''),
'0000:17:00.0',
['0000:17:00.1', '0000:00:1f.4'],
{
'0000:17:00.1': {
'number': 9,
'addresses': [],
'critical': False
},
'0000:17:00.0': {
'number': 9,
'addresses': [],
'critical': False
},
'0000:00:1f.4': {
'number': 31,
'addresses': [],
'critical': True
},
},
True,
'Critical devices found: 0000:00:1f.4\nCritical devices found in same IOMMU group: 0000:00:1f.4'
),
(
textwrap.dedent('''
0000:17:00.0 VGA compatible controller: NVIDIA Corporation TU117GL [T400 4GB] (rev a1)
0000:17:00.1 Audio device: NVIDIA Corporation Device 10fa (rev a1)
0000:00:1f.4 SMBus: Intel Corporation C620 Series Chipset Family SMBus (rev 09)
'''),
'0000:17:00.0',
['0000:17:00.1'],
{
'0000:17:00.1': {
'number': 10,
'addresses': [],
'critical': False
},
'0000:17:00.0': {
'number': 9,
'addresses': [],
'critical': False
},
'0000:00:1f.4': {
'number': 9,
'addresses': [],
'critical': True
},
},
True,
'Critical devices found in same IOMMU group: 0000:17:00.0'
),
(
textwrap.dedent('''
0000:17:00.0 VGA compatible controller: NVIDIA Corporation TU117GL [T400 4GB] (rev a1)
0000:17:00.1 Audio device: NVIDIA Corporation Device 10fa (rev a1)
0000:00:1f.4 SMBus: Intel Corporation C620 Series Chipset Family SMBus (rev 09)
'''),
'0000:17:00.0',
['0000:17:00.1'],
{
'0000:17:00.1': {
'number': 9,
'addresses': [],
'critical': False
},
'0000:17:00.0': {
'number': 10,
'addresses': [],
'critical': False
},
'0000:00:1f.4': {
'number': 9,
'addresses': [],
'critical': True
},
},
True,
'Critical devices found in same IOMMU group: 0000:17:00.1'
)
]
)
def test_critical_gpu(
ls_pci, gpu_pci_id, child_ids, iommu_group, uses_system_critical_devices, critical_reason
):
with patch('middlewared.utils.gpu.pyudev.Devices.from_name', MagicMock()) as from_name_mock:
udev_mock = MagicMock()
udev_mock.get = lambda key, default: DEVICE_DATA[gpu_pci_id].get(key, default)
udev_mock.parent.children = [DEVICE_DATA[child_id] for child_id in child_ids]
from_name_mock.return_value = udev_mock
with patch('middlewared.utils.gpu.subprocess.Popen', MagicMock()) as popen_mock:
comm_mock = MagicMock()
comm_mock.returncode = 0
comm_mock.communicate.return_value = ls_pci.strip().encode(), b''
popen_mock.return_value = comm_mock
with patch('middlewared.utils.gpu.get_iommu_groups_info', lambda *args, **kwargs: iommu_group):
gpus = get_gpus()[0]
assert gpus['uses_system_critical_devices'] == uses_system_critical_devices
assert gpus['critical_reason'] == critical_reason
| 5,775 | Python | .py | 152 | 24.289474 | 108 | 0.470085 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,382 | test_zettarepl.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_zettarepl.py | import pytest
import middlewared.plugins.zettarepl # noqa
import middlewared.plugins.zettarepl_.util # noqa
from middlewared.pytest.unit.helpers import load_compound_service
ZettareplService = load_compound_service("zettarepl")
@pytest.mark.parametrize("source_datasets,target_dataset,reversed_source_datasets,reversed_target_dataset", [
(["tank/work"], "backup/tank-work",
["backup/tank-work"], "tank/work"),
(["tank/work/alice", "tank/work/bob"], "backup/tank-work",
["backup/tank-work/alice", "backup/tank-work/bob"], "tank/work"),
])
@pytest.mark.asyncio
async def test__reverse_source_target_datasets(source_datasets, target_dataset, reversed_source_datasets,
reversed_target_dataset):
zs = ZettareplService(None)
assert await zs.reverse_source_target_datasets(source_datasets, target_dataset) == (
reversed_source_datasets,
reversed_target_dataset,
)
| 957 | Python | .py | 19 | 44.210526 | 109 | 0.713826 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,383 | test_acl_inherit.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_acl_inherit.py | import pytest
from copy import deepcopy
from middlewared.plugins.filesystem_.utils import ACLType
NFS4_ACL = {'acl': [
{
'tag': 'GROUP',
'id': 100,
'perms': { 'BASIC': 'FULL_CONTROL' },
'flags': {
'FILE_INHERIT': True,
'DIRECTORY_INHERIT': True,
'INHERIT_ONLY': False,
'NO_PROPAGATE_INHERIT': False,
'INHERITED': False
},
'type': 'ALLOW'
},
{
'tag': 'GROUP',
'id': 200,
'perms': { 'BASIC': 'MODIFY' },
'flags': {
'FILE_INHERIT': False,
'DIRECTORY_INHERIT': True,
'INHERIT_ONLY': False,
'NO_PROPAGATE_INHERIT': False,
'INHERITED': False
},
'type': 'ALLOW'
},
{
'tag': 'GROUP',
'id': 300,
'perms': { 'BASIC': 'MODIFY' },
'flags': {
'FILE_INHERIT': True,
'DIRECTORY_INHERIT': False,
'INHERIT_ONLY': True,
'NO_PROPAGATE_INHERIT': False,
'INHERITED': False
},
'type': 'ALLOW'
},
{
'tag': 'GROUP',
'id': 400,
'perms': { 'BASIC': 'MODIFY' },
'flags': {
'FILE_INHERIT': True,
'DIRECTORY_INHERIT': True,
'INHERIT_ONLY': True,
'NO_PROPAGATE_INHERIT': True,
'INHERITED': False
},
'type': 'ALLOW'
},
{
'tag': 'GROUP',
'id': 500,
'perms': { 'BASIC': 'MODIFY' },
'flags': {
'FILE_INHERIT': False,
'DIRECTORY_INHERIT': False,
'INHERIT_ONLY': False,
'NO_PROPAGATE_INHERIT': False,
'INHERITED': False
},
'type': 'ALLOW'
},
{
'tag': 'GROUP',
'id': 600,
'perms': { 'BASIC': 'MODIFY' },
'flags': {
'FILE_INHERIT': True,
'DIRECTORY_INHERIT': True,
'INHERIT_ONLY': False,
'NO_PROPAGATE_INHERIT': True,
'INHERITED': False
},
'type': 'ALLOW'
},
], 'acltype': 'NFS4', 'trivial': False, 'uid': 0, 'gid': 0, 'path': '/mnt/dozer/SHARE'}
def test__nfs4_acl_inheritance():
dir_inherited = ACLType.NFS4.calculate_inherited(deepcopy(NFS4_ACL), True)
file_inherited = ACLType.NFS4.calculate_inherited(deepcopy(NFS4_ACL), False)
for entry in dir_inherited:
match entry['id']:
case 100:
expected = {
'FILE_INHERIT': True,
'DIRECTORY_INHERIT': True,
'INHERIT_ONLY': False,
'NO_PROPAGATE_INHERIT': False,
'INHERITED': True
}
case 200:
expected = {
'FILE_INHERIT': False,
'DIRECTORY_INHERIT': True,
'INHERIT_ONLY': False,
'NO_PROPAGATE_INHERIT': False,
'INHERITED': True
}
case 300:
expected = {
'FILE_INHERIT': True,
'DIRECTORY_INHERIT': False,
'INHERIT_ONLY': True,
'NO_PROPAGATE_INHERIT': False,
'INHERITED': True
}
case 400:
expected = {
'FILE_INHERIT': True,
'DIRECTORY_INHERIT': True,
'INHERIT_ONLY': False,
'NO_PROPAGATE_INHERIT': True,
'INHERITED': True
}
case 600:
expected = {
'FILE_INHERIT': False,
'DIRECTORY_INHERIT': False,
'INHERIT_ONLY': False,
'NO_PROPAGATE_INHERIT': False,
'INHERITED': True
}
case _:
assert False, f'Unexpected entry: {entry["id"]}'
assert entry['flags'] == expected, f'{entry["id"]}: flags do not match'
for entry in file_inherited:
match entry['id']:
case 100:
expected = {
'FILE_INHERIT': False,
'DIRECTORY_INHERIT': False,
'INHERIT_ONLY': False,
'NO_PROPAGATE_INHERIT': False,
'INHERITED': True
}
case 300:
expected = {
'FILE_INHERIT': False,
'DIRECTORY_INHERIT': False,
'INHERIT_ONLY': False,
'NO_PROPAGATE_INHERIT': False,
'INHERITED': True
}
case 400:
expected = {
'FILE_INHERIT': False,
'DIRECTORY_INHERIT': False,
'INHERIT_ONLY': False,
'NO_PROPAGATE_INHERIT': False,
'INHERITED': True
}
case 600:
expected = {
'FILE_INHERIT': False,
'DIRECTORY_INHERIT': False,
'INHERIT_ONLY': False,
'NO_PROPAGATE_INHERIT': False,
'INHERITED': True
}
case _:
assert False, f'Unexpected entry: {entry["id"]}'
assert entry['flags'] == expected, f'{entry["id"]}: flags do not match'
| 5,477 | Python | .py | 168 | 19.071429 | 87 | 0.423962 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,384 | test_port_attachments.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_port_attachments.py | import contextlib
import pytest
from unittest.mock import patch
from middlewared.plugins.ports.ports import PortService, ValidationErrors
from middlewared.pytest.unit.middleware import Middleware
PORTS_IN_USE = [
{
'namespace': 'snmp',
'title': 'SNMP Service',
'ports': [
[
'0.0.0.0',
160
],
[
'0.0.0.0',
161
]
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
160
],
[
'0.0.0.0',
161
]
]
}
]
},
{
'namespace': 'ssh',
'title': 'SSH Service',
'ports': [
[
'0.0.0.0',
22
]
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
22
]
]
}
]
},
{
'namespace': 'tftp',
'title': 'TFTP Service',
'ports': [
[
'0.0.0.0',
69
]
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
69
]
]
}
]
},
{
'namespace': 'kmip',
'title': 'KMIP Service',
'ports': [
[
'0.0.0.0',
5696
]
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
5696
]
]
}
]
},
{
'namespace': 'rsyncd',
'title': 'Rsyncd Service',
'ports': [
[
'0.0.0.0',
11000
]
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
11000
]
]
}
]
},
{
'namespace': 'webdav',
'title': 'Webdav Service',
'ports': [
[
'0.0.0.0',
10258
],
[
'0.0.0.0',
14658
]
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
10258
],
[
'0.0.0.0',
14658
]
]
}
]
},
{
'namespace': 'smb',
'title': 'SMB Service',
'ports': [
[
'0.0.0.0',
137
],
[
'0.0.0.0',
138
],
[
'0.0.0.0',
139
],
[
'0.0.0.0',
445
]
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
137
],
[
'0.0.0.0',
138
],
[
'0.0.0.0',
139
],
[
'0.0.0.0',
445
]
]
}
]
},
{
'namespace': 's3',
'title': 'S3 Service',
'ports': [
[
'192.168.0.70',
8703
],
[
'192.168.0.70',
9010
],
[
'2001:db8:3333:4444:5555:6666:7777:8888',
8704
]
],
'port_details': [
{
'description': None,
'ports': [
[
'192.168.0.70',
8703
],
[
'192.168.0.70',
9010
],
[
'2001:db8:3333:4444:5555:6666:7777:8888',
8704
]
]
}
]
},
{
'namespace': 'ftp',
'title': 'FTP Service',
'ports': [
[
'0.0.0.0',
3730
]
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
3730
]
]
}
]
},
{
'namespace': 'openvpn.server',
'title': 'Openvpn Server Service',
'ports': [
[
'0.0.0.0',
1194
]
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
1194
]
]
}
]
},
{
'namespace': 'system.general',
'title': 'WebUI Service',
'ports': [
[
'0.0.0.0',
80
],
[
'0.0.0.0',
443
],
[
'::',
8080
],
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
80
],
[
'0.0.0.0',
443
],
[
'::',
8080
],
]
}
]
},
{
'namespace': 'reporting',
'title': 'Reporting Service',
'ports': [
[
'0.0.0.0',
2003
]
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
2003
]
]
}
]
},
{
'namespace': 'iscsi.global',
'title': 'iSCSI Service',
'ports': [
[
'0.0.0.0',
3260
]
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
3260
]
]
}
]
},
{
'namespace': 'nfs',
'title': 'NFS Service',
'ports': [
[
'0.0.0.0',
2049
]
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
2049
]
]
}
]
},
{
'namespace': 'gluster.fuse',
'title': 'Gluster Service',
'ports': [
[
'0.0.0.0',
24007
],
[
'0.0.0.0',
24008
],
[
'::',
24008
]
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
24007
],
[
'0.0.0.0',
24008
],
[
'::',
24008
]
]
}
]
},
{
'title': 'System',
'ports': [
[
'0.0.0.0',
67
],
[
'0.0.0.0',
123
],
[
'0.0.0.0',
3702
],
[
'0.0.0.0',
5353
],
[
'0.0.0.0',
6000
],
[
'::',
68
]
],
'port_details': [
{
'description': None,
'ports': [
[
'0.0.0.0',
67
],
[
'::',
68
],
[
'0.0.0.0',
123
],
[
'0.0.0.0',
3702
],
[
'0.0.0.0',
5353
],
[
'0.0.0.0',
6000
]
]
}
],
'namespace': 'system'
}
]
@contextlib.contextmanager
def get_port_service():
with patch('middlewared.plugins.ports.ports.PortService.get_in_use') as get_in_use_port:
get_in_use_port.return_value = PORTS_IN_USE
yield PortService(Middleware())
@pytest.mark.parametrize('port,bindip,whitelist_namespace', [
(67, '0.0.0.0', 'system'),
(67, '192.168.0.12', 'system'),
(24007, '0.0.0.0', 'gluster.fuse'),
(24007, '192.168.0.12', 'gluster.fuse'),
(68, '::', 'system'),
(68, '2001:db8:3333:4444:5555:6666:7777:8888', 'system'),
(24008, '::', 'gluster.fuse'),
(24008, '2001:db8:3333:4444:5555:6666:7777:8888', 'gluster.fuse'),
])
@pytest.mark.asyncio
async def test_port_validate_whitelist_namespace_logic(port, bindip, whitelist_namespace):
with get_port_service() as port_service:
with pytest.raises(ValidationErrors):
await port_service.validate_port('test', port, bindip, raise_error=True)
assert (await port_service.validate_port('test', port, bindip, whitelist_namespace)).errors == []
@pytest.mark.parametrize('port,bindip,should_work', [
(80, '0.0.0.0', False),
(81, '0.0.0.0', True),
(8703, '0.0.0.0', False),
(8703, '192.168.0.70', False),
(8703, '192.168.0.71', True),
(9010, '0.0.0.0', False),
(9010, '192.168.0.70', False),
(9010, '192.168.0.71', True),
(80, '::', True),
(8080, '::', False),
(8081, '::', True),
(8703, '::', True),
(8704, '::', False),
(8704, '2001:db8:3333:4444:5555:6666:7777:8888', False),
(8704, '2001:db8:3333:4444:5555:6666:7777:8889', True),
])
@pytest.mark.asyncio
async def test_port_validation_logic(port, bindip, should_work):
with get_port_service() as port_service:
if should_work:
assert (await port_service.validate_port('test', port, bindip, raise_error=False)).errors == []
else:
with pytest.raises(ValidationErrors):
await port_service.validate_port('test', port, bindip, raise_error=True)
| 12,252 | Python | .py | 517 | 9.408124 | 107 | 0.259553 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,385 | test_nfs.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_nfs.py | from unittest.mock import Mock
from middlewared.plugins.nfs import SharingNFSService
def test__sharing_nfs_service__validate_hosts_and_networks__host_is_32_network():
middleware = Mock()
verrors = Mock()
SharingNFSService(middleware).validate_hosts_and_networks(
[
{
"path": "/mnt/data/a",
"hosts": ["192.168.0.1"],
"networks": [],
},
],
{
"path": "/mnt/data/b",
"hosts": ["192.168.0.1"],
"networks": [],
},
"sharingnfs_update",
verrors,
{
"192.168.0.1": "192.168.0.1",
},
)
# NAS-123042: A passing condition
verrors.add.assert_not_called()
def test__sharing_nfs_service__validate_hosts_and_networks__dataset_is_already_exported():
middleware = Mock()
verrors = Mock()
SharingNFSService(middleware).validate_hosts_and_networks(
[
{
"path": "/mnt/data/a",
"hosts": [],
"networks": ["192.168.0.0/24"],
},
],
{
"path": "/mnt/data/b",
"hosts": [],
"networks": ["192.168.0.0/24"],
},
"sharingnfs_update",
verrors,
{
"192.168.0.1": "192.168.0.1",
},
)
# NAS-123042: A passing condition
verrors.add.assert_not_called()
def test__sharing_nfs_service__validate_hosts_and_networks__fs_is_already_exported_for_world():
middleware = Mock()
verrors = Mock()
SharingNFSService(middleware).validate_hosts_and_networks(
[
{
"path": "/mnt/data/a",
"hosts": ["192.168.0.1"],
"networks": [],
},
],
{
"path": "/mnt/data/b",
"hosts": [],
"networks": [],
},
"sharingnfs_update",
verrors,
{
"192.168.0.1": "192.168.0.1",
},
)
# This is now a passing condition: NAS-120957
verrors.add.assert_not_called()
| 2,120 | Python | .py | 74 | 19.013514 | 95 | 0.484983 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,386 | test_smb.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_smb.py | import pytest
from middlewared.plugins.smb_.util_smbconf import generate_smb_conf_dict
from middlewared.utils.directoryservices.constants import DSType
BASE_SMB_CONFIG = {
'id': 1,
'netbiosname': 'TESTSERVER',
'netbiosalias': ['BOB', 'LARRY'],
'workgroup': 'TESTDOMAIN',
'description': 'TrueNAS Server',
'unixcharset': 'UTF-8',
'loglevel': 'MINIMUM',
'syslog': False,
'aapl_extensions': False,
'localmaster': False,
'guest': 'nobody',
'filemask': '',
'dirmask': '',
'smb_options': '',
'bindip': [],
'cifs_SID': 'S-1-5-21-732395397-2008429054-3061640861',
'ntlmv1_auth': False,
'enable_smb1': False,
'admin_group': None,
'next_rid': 0,
'multichannel': False,
'encryption': 'DEFAULT',
'netbiosname_local': 'TESTSERVER'
}
SMB_SYSLOG = BASE_SMB_CONFIG | {'syslog': True}
SMB_AAPL = BASE_SMB_CONFIG | {'aapl_extensions': True}
SMB_LOCALMASTER = BASE_SMB_CONFIG | {'localmaster': True}
SMB_GUEST = BASE_SMB_CONFIG | {'guest': 'mike'}
SMB_BINDIP = BASE_SMB_CONFIG | {'bindip': ['192.168.0.250', '192.168.0.251']}
SMB_NTLMV1 = BASE_SMB_CONFIG | {'ntlmv1_auth': True}
SMB_SMB1 = BASE_SMB_CONFIG | {'enable_smb1': True}
SMB_MULTICHANNEL = BASE_SMB_CONFIG | {'multichannel': True}
SMB_OPTIONS = BASE_SMB_CONFIG | {'smb_options': 'canary = bob\n canary2 = bob2 \n #comment\n ;othercomment'}
SMB_ENCRYPTION_NEGOTIATE = BASE_SMB_CONFIG | {'encryption': 'NEGOTIATE'}
SMB_ENCRYPTION_DESIRED = BASE_SMB_CONFIG | {'encryption': 'DESIRED'}
SMB_ENCRYPTION_REQUIRED = BASE_SMB_CONFIG | {'encryption': 'REQUIRED'}
BASE_SMB_SHARE = {
'id': 1,
'purpose': 'NO_PRESET',
'path': '/mnt/dozer/BASE',
'path_suffix': '',
'home': False,
'name': 'TEST_HOME',
'comment': 'canary',
'browsable': True,
'ro': False,
'guestok': False,
'recyclebin': False,
'hostsallow': [],
'hostsdeny': [],
'auxsmbconf': '',
'aapl_name_manging': False,
'abe': False,
'acl': True,
'durablehandle': True,
'streams': True,
'timemachine': False,
'timemachine_quota': 0,
'vuid': '',
'shadowcopy': True,
'fsrvp': False,
'enabled': True,
'afp': False,
'audit': {
'enable': False,
'watch_list': [],
'ignore_list': []
},
'path_local': '/mnt/dozer/BASE',
'locked': False
}
HOMES_SHARE = BASE_SMB_SHARE | {'path_suffix': '%U', 'home': True}
FSRVP_SHARE = BASE_SMB_SHARE | {'fsrvp': True}
GUEST_SHARE = BASE_SMB_SHARE | {'guestok': True}
BASE_IDMAP = [
{
'id': 1,
'name': 'DS_TYPE_ACTIVEDIRECTORY',
'dns_domain_name': None,
'range_low': 100000001,
'range_high': 200000000,
'idmap_backend': 'RID',
'options': {},
'certificate': None
},
{
'id': 2,
'name': 'DS_TYPE_LDAP',
'dns_domain_name': None,
'range_low': 10000,
'range_high': 90000000,
'idmap_backend': 'LDAP',
'options': {
'ldap_base_dn': '',
'ldap_user_dn': '',
'ldap_url': '',
'ssl': 'OFF'
},
'certificate': None
},
{
'id': 5,
'name': 'DS_TYPE_DEFAULT_DOMAIN',
'dns_domain_name': None,
'range_low': 90000001,
'range_high': 100000000,
'idmap_backend': 'TDB',
'options': {},
'certificate': None
}
]
ADDITIONAL_DOMAIN = {
'id': 6,
'name': 'BOBDOM',
'dns_domain_name': None,
'range_low': 200000001,
'range_high': 300000000,
'idmap_backend': 'RID',
'options': {},
'certificate': None
}
AUTORID_DOMAIN = {
'id': 1,
'name': 'DS_TYPE_ACTIVEDIRECTORY',
'dns_domain_name': None,
'range_low': 10000,
'range_high': 200000000,
'idmap_backend': 'AUTORID',
'options': {
'rangesize': 100000,
'readonly': False,
'ignore_builtin': False,
},
'certificate': None
}
BASE_AD_CONFIG = {
'id': 1,
'domainname': 'TESTDOMAIN.IXSYSTEMS.COM',
'bindname': '',
'verbose_logging': False,
'allow_trusted_doms': False,
'use_default_domain': False,
'allow_dns_updates': True,
'disable_freenas_cache': False,
'restrict_pam': False,
'site': None,
'timeout': 60,
'dns_timeout': 10,
'nss_info': 'TEMPLATE',
'enable': True,
'kerberos_principal': 'TESTSERVER$@TESTDOMAIN.IXSYSTEMS.COM',
'create_computer': None,
'kerberos_realm': 1,
'netbiosname': 'TESTSERVER',
'netbiosalias': [],
}
TRUSTED_DOMS = BASE_AD_CONFIG | {'allow_trusted_doms': True}
USE_DEFAULT_DOM = BASE_AD_CONFIG | {'use_default_domain': True}
DISABLE_ENUM = BASE_AD_CONFIG | {'disable_freenas_cache': True}
BASE_IPA_CONFIG = {
'id': 1,
'hostname': ['ipatest1.testdom.test'],
'basedn': 'dc=testdom,dc=test',
'binddn': 'uid=ipaadmin,cn=users,cn=accounts,dc=testdom,dc=test',
'bindpw': '',
'anonbind': False,
'ssl': 'ON', 'timeout': 30,
'dns_timeout': 5,
'has_samba_schema': False,
'auxiliary_parameters': '',
'schema': 'RFC2307',
'enable': True,
'kerberos_principal': 'host/awalkertest5.tn.ixsystems.net@TN.IXSYSTEMS.NET',
'validate_certificates': True,
'disable_freenas_cache': False,
'server_type': 'FREEIPA',
'certificate': None,
'kerberos_realm': 1,
'cert_name': None,
'uri_list': ['ldaps://ipatest1.testdom.test:636'],
'ipa_config': {
'realm': 'TESTDOM.TEST',
'domain': 'testdom.test',
'basedn': 'dc=testdom,dc=test',
'host': 'awalkertest5.testdom.test',
'target_server': 'ipatest1.testdom.test',
'username': 'ipaadmin'
},
'ipa_domain': {
'netbios_name': 'TN',
'domain_sid': 'S-1-5-21-157882827-213361071-3806343854',
'domain_name': 'testdom.test',
'range_id_min': 925000000,
'range_id_max': 925199999
}
}
BIND_IP_CHOICES = {"192.168.0.250": "192.168.0.250"}
def test__base_smb():
conf = generate_smb_conf_dict(
None, None, BASE_SMB_CONFIG, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['netbios name'] == 'TESTSERVER'
assert conf['netbios aliases'] == 'BOB LARRY'
assert conf['workgroup'] == 'TESTDOMAIN'
assert conf['server string'] == 'TrueNAS Server'
assert conf['obey pam restrictions'] is False
assert conf['restrict anonymous'] == 2
assert conf['guest account'] == 'nobody'
assert conf['local master'] is False
assert conf['ntlm auth'] is False
assert 'server min protocol' not in conf
assert conf['server multichannel support'] is False
assert conf['idmap config * : backend'] == 'tdb'
assert conf['idmap config * : range'] == '90000001 - 100000000'
assert conf['server smb encrypt'] == 'default'
assert conf['directory mask'] == '0775'
assert conf['create mask'] == '0664'
assert conf['zfs_core:zfs_integrity_streams'] is False
assert conf['zfs_core:zfs_block_cloning'] is False
def test__base_smb_enterprise():
conf = generate_smb_conf_dict(
None, None, BASE_SMB_CONFIG, [],
BIND_IP_CHOICES, BASE_IDMAP, True
)
assert conf['zfs_core:zfs_integrity_streams'] is True
assert conf['zfs_core:zfs_block_cloning'] is True
def test__syslog():
conf = generate_smb_conf_dict(
None, None, SMB_SYSLOG, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['logging'] == ('syslog@1 file')
def test__localmaster():
conf = generate_smb_conf_dict(
None, None, SMB_LOCALMASTER, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['local master'] is True
def test__guestaccount():
conf = generate_smb_conf_dict(
None, None, SMB_GUEST, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['guest account'] == 'mike'
def test__bindip():
conf = generate_smb_conf_dict(
None, None, SMB_BINDIP, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert set(conf['interfaces'].split(' ')) == set(['192.168.0.250', '127.0.0.1'])
def test__ntlmv1auth():
conf = generate_smb_conf_dict(
None, None, SMB_NTLMV1, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['ntlm auth'] is True
def test__smb1_enable():
conf = generate_smb_conf_dict(
None, None, SMB_SMB1, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['server min protocol'] == 'NT1'
def test__smb_options():
conf = generate_smb_conf_dict(
None, None, SMB_OPTIONS, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['canary'] == 'bob'
assert conf['canary2'] == 'bob2'
def test__multichannel():
conf = generate_smb_conf_dict(
None, None, SMB_MULTICHANNEL, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['server multichannel support'] is True
def test__homes_share():
conf = generate_smb_conf_dict(
None, None, BASE_SMB_CONFIG, [HOMES_SHARE],
BIND_IP_CHOICES, BASE_IDMAP
)
assert 'obey pam restrictions' in conf
assert conf['obey pam restrictions'] is True
def test__guest_share():
conf = generate_smb_conf_dict(
None, None, BASE_SMB_CONFIG, [GUEST_SHARE],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['restrict anonymous'] == 0
def test__fsrvp_share():
conf = generate_smb_conf_dict(
None, None, BASE_SMB_CONFIG, [FSRVP_SHARE],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['rpc_daemon:fssd'] == 'fork'
assert conf['fss:prune stale'] is True
def test__ad_base():
conf = generate_smb_conf_dict(
DSType.AD, BASE_AD_CONFIG,
BASE_SMB_CONFIG, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['realm'] == 'TESTDOMAIN.IXSYSTEMS.COM'
assert conf['winbind use default domain'] is False
assert conf['allow trusted domains'] is False
assert conf['template homedir'] == '/var/empty'
assert conf['winbind enum users'] is True
assert conf['winbind enum groups'] is True
assert conf['local master'] is False
assert conf['domain master'] is False
assert conf['idmap config * : backend'] == 'tdb'
assert conf['idmap config * : range'] == '90000001 - 100000000'
assert conf['idmap config TESTDOMAIN : backend'] == 'rid'
assert conf['idmap config TESTDOMAIN : range'] == '100000001 - 200000000'
def test__ad_homes_share():
conf = generate_smb_conf_dict(
DSType.AD, BASE_AD_CONFIG,
BASE_SMB_CONFIG, [HOMES_SHARE],
BIND_IP_CHOICES, BASE_IDMAP
)
assert 'obey pam restrictions' in conf
assert conf['obey pam restrictions'] is True
assert 'template homedir' in conf
assert conf['template homedir'] == '/mnt/dozer/BASE/%D/%U'
def test__ad_enumeration():
conf = generate_smb_conf_dict(
DSType.AD, DISABLE_ENUM,
BASE_SMB_CONFIG, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['winbind enum users'] is False
assert conf['winbind enum groups'] is False
def test__ad_trusted_doms():
conf = generate_smb_conf_dict(
DSType.AD, TRUSTED_DOMS,
BASE_SMB_CONFIG, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['allow trusted domains'] is True
def test__ad_default_domain():
conf = generate_smb_conf_dict(
DSType.AD, USE_DEFAULT_DOM,
BASE_SMB_CONFIG, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['winbind use default domain'] is True
def test__ad_additional_domain():
conf = generate_smb_conf_dict(
DSType.AD, TRUSTED_DOMS,
BASE_SMB_CONFIG, [],
BIND_IP_CHOICES, BASE_IDMAP + [ADDITIONAL_DOMAIN]
)
assert conf['idmap config BOBDOM : backend'] == 'rid'
assert conf['idmap config BOBDOM : range'] == '200000001 - 300000000'
def test__ad_autorid():
conf = generate_smb_conf_dict(
DSType.AD, BASE_AD_CONFIG,
BASE_SMB_CONFIG, [],
BIND_IP_CHOICES, [AUTORID_DOMAIN, BASE_IDMAP[1], BASE_IDMAP[2]]
)
assert conf['idmap config * : backend'] == 'autorid'
assert conf['idmap config * : range'] == '10000 - 200000000'
def test__encryption_negotiate():
conf = generate_smb_conf_dict(
None, None, SMB_ENCRYPTION_NEGOTIATE, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['server smb encrypt'] == 'if_required'
def test__encryption_desired():
conf = generate_smb_conf_dict(
None, None, SMB_ENCRYPTION_DESIRED, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['server smb encrypt'] == 'desired'
def test__encryption_required():
conf = generate_smb_conf_dict(
None, None, SMB_ENCRYPTION_REQUIRED, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['server smb encrypt'] == 'required'
def test__ipa_base():
conf = generate_smb_conf_dict(
DSType.IPA, BASE_IPA_CONFIG,
BASE_SMB_CONFIG, [],
BIND_IP_CHOICES, BASE_IDMAP
)
assert conf['workgroup'] == 'TN'
assert conf['server role'] == 'member server'
assert conf['kerberos method'] == 'dedicated keytab'
assert conf['dedicated keytab file'] == 'FILE:/etc/ipa/smb.keytab'
assert conf['realm'] == 'TESTDOM.TEST'
assert conf['idmap config TN : backend'] == 'sss'
assert conf['idmap config TN : range'] == '925000000 - 925199999'
| 13,232 | Python | .py | 397 | 27.712846 | 108 | 0.617348 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,387 | test_datastore.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_datastore.py | from contextlib import asynccontextmanager
import datetime
from unittest.mock import ANY, patch
import pytest
import sqlalchemy as sa
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from middlewared.sqlalchemy import EncryptedText, JSON, Time
import middlewared.plugins.datastore # noqa
import middlewared.plugins.datastore.connection # noqa
import middlewared.plugins.datastore.schema # noqa
import middlewared.plugins.datastore.util # noqa
from middlewared.pytest.unit.helpers import load_compound_service
from middlewared.pytest.unit.middleware import Middleware
DatastoreService = load_compound_service("datastore")
Model = declarative_base()
@asynccontextmanager
async def datastore_test():
m = Middleware()
with patch("middlewared.plugins.datastore.connection.FREENAS_DATABASE", ":memory:"):
with patch("middlewared.plugins.datastore.schema.Model", Model):
with patch("middlewared.plugins.datastore.util.Model", Model):
ds = DatastoreService(m)
ds.setup()
for part in ds.parts:
if hasattr(part, "connection"):
Model.metadata.create_all(bind=part.connection)
break
else:
raise RuntimeError("Could not find part that provides connection")
m["datastore.execute"] = ds.execute
m["datastore.execute_write"] = ds.execute_write
m["datastore.fetchall"] = ds.fetchall
m["datastore.query"] = ds.query
m["datastore.send_insert_events"] = ds.send_insert_events
m["datastore.send_update_events"] = ds.send_update_events
m["datastore.send_delete_events"] = ds.send_delete_events
m["datastore.insert"] = ds.insert
m["datastore.update"] = ds.update
m["datastore.delete"] = ds.delete
yield ds
class UserModel(Model):
__tablename__ = 'account_bsdusers'
id = sa.Column(sa.Integer(), primary_key=True)
bsdusr_uid = sa.Column(sa.Integer(), nullable=False)
bsdusr_group_id = sa.Column(sa.ForeignKey('account_bsdgroups.id'), nullable=False)
class GroupModel(Model):
__tablename__ = 'account_bsdgroups'
id = sa.Column(sa.Integer(), primary_key=True)
bsdgrp_gid = sa.Column(sa.Integer(), nullable=False)
class GroupMembershipModel(Model):
__tablename__ = 'account_bsdgroupmembership'
id = sa.Column(sa.Integer(), primary_key=True)
bsdgrpmember_group_id = sa.Column(sa.Integer(), sa.ForeignKey("account_bsdgroups.id", ondelete="CASCADE"),
nullable=False)
bsdgrpmember_user_id = sa.Column(sa.Integer(), sa.ForeignKey("account_bsdusers.id", ondelete="CASCADE"),
nullable=False)
class UserCascadeModel(Model):
__tablename__ = 'account_bsdusers_cascade'
id = sa.Column(sa.Integer(), primary_key=True)
bsdusr_uid = sa.Column(sa.Integer(), nullable=False)
bsdusr_group_id = sa.Column(sa.ForeignKey('account_bsdgroups.id', ondelete='CASCADE'), nullable=False)
@pytest.mark.asyncio
async def test__relationship_load():
async with datastore_test() as ds:
ds.execute("INSERT INTO `account_bsdgroups` VALUES (10, 1010)")
ds.execute("INSERT INTO `account_bsdgroups` VALUES (20, 2020)")
ds.execute("INSERT INTO `account_bsdusers` VALUES (5, 55, 20)")
ds.execute("INSERT INTO `account_bsdgroupmembership` VALUES (1, 10, 5)")
assert await ds.query("account.bsdgroupmembership") == [
{
"id": 1,
"bsdgrpmember_group": {
"id": 10,
"bsdgrp_gid": 1010,
},
"bsdgrpmember_user": {
"id": 5,
"bsdusr_uid": 55,
"bsdusr_group": {
"id": 20,
"bsdgrp_gid": 2020,
},
}
}
]
@pytest.mark.asyncio
async def test__filter_join():
async with datastore_test() as ds:
ds.execute("INSERT INTO `account_bsdgroups` VALUES (10, 1010)")
ds.execute("INSERT INTO `account_bsdgroups` VALUES (20, 2020)")
ds.execute("INSERT INTO `account_bsdusers` VALUES (4, 44, 10)")
ds.execute("INSERT INTO `account_bsdusers` VALUES (5, 55, 20)")
result = await ds.query("account.bsdusers", [("bsdusr_group__bsdgrp_gid", "=", 2020)])
assert len(result) == 1
assert result[0]["id"] == 5
@pytest.mark.asyncio
async def test__prefix():
async with datastore_test() as ds:
ds.execute("INSERT INTO `account_bsdgroups` VALUES (20, 2020)")
ds.execute("INSERT INTO `account_bsdusers` VALUES (5, 55, 20)")
assert await ds.query("account.bsdusers", [], {"prefix": "bsdusr_"}) == [
{
"id": 5,
"uid": 55,
"group": {
"id": 20,
"bsdgrp_gid": 2020,
},
}
]
@pytest.mark.asyncio
async def test__prefix_filter():
async with datastore_test() as ds:
ds.execute("INSERT INTO `account_bsdgroups` VALUES (20, 2020)")
ds.execute("INSERT INTO `account_bsdusers` VALUES (5, 55, 20)")
assert await ds.query("account.bsdusers", [("uid", "=", 55)], {"prefix": "bsdusr_"}) == [
{
"id": 5,
"uid": 55,
"group": {
"id": 20,
"bsdgrp_gid": 2020,
},
}
]
assert await ds.query("account.bsdusers", [("uid", "=", 56)], {"prefix": "bsdusr_"}) == []
with pytest.raises(Exception):
assert await ds.query("account.bsdusers", [("uuid", "=", 55)], {"prefix": "bsdusr_"})
@pytest.mark.asyncio
async def test__fk_filter():
async with datastore_test() as ds:
ds.execute("INSERT INTO `account_bsdgroups` VALUES (20, 2020)")
ds.execute("INSERT INTO `account_bsdusers` VALUES (5, 55, 20)")
assert await ds.query("account.bsdusers", [("group", "=", 20)], {"prefix": "bsdusr_"}) == [
{
"id": 5,
"uid": 55,
"group": {
"id": 20,
"bsdgrp_gid": 2020,
},
}
]
@pytest.mark.asyncio
async def test__inserted_primary_key():
async with datastore_test() as ds:
assert await ds.insert("account.bsdgroups", {"bsdgrp_gid": 5}) == 1
assert await ds.insert("account.bsdgroups", {"bsdgrp_gid": 10}) == 2
@pytest.mark.asyncio
async def test__update_filter__too_much_rows():
async with datastore_test() as ds:
ds.execute("INSERT INTO `account_bsdgroups` VALUES (20, 2020)")
ds.execute("INSERT INTO `account_bsdgroups` VALUES (30, 3030)")
with pytest.raises(RuntimeError):
await ds.update("account_bsdgroups", [("bsdgrp_gid", ">", 1000)], {"bsdgrp_gid": 1000})
@pytest.mark.asyncio
async def test__update_fk():
async with datastore_test() as ds:
ds.execute("INSERT INTO `account_bsdgroups` VALUES (20, 2020)")
ds.execute("INSERT INTO `account_bsdgroups` VALUES (30, 3030)")
ds.execute("INSERT INTO `account_bsdusers` VALUES (5, 55, 20)")
await ds.update("account_bsdusers", 5, {"bsdusr_uid": 100, "bsdusr_group": 30})
ds.middleware.call_hook_inline.assert_called_once_with(
"datastore.post_execute_write",
"UPDATE account_bsdusers SET bsdusr_uid=?, bsdusr_group_id=? WHERE account_bsdusers.id = ?",
[100, 30, 5],
ANY,
)
@pytest.mark.asyncio
async def test__bad_fk_update():
async with datastore_test() as ds:
with pytest.raises(RuntimeError):
ds.execute("INSERT INTO `account_bsdgroups` VALUES (5, 50)")
assert await ds.update("account.bsdgroups", 1, {"bsdgrp_gid": 5})
@pytest.mark.asyncio
async def test__bad_fk_insert():
async with datastore_test() as ds:
with pytest.raises(IntegrityError):
assert await ds.insert("account.bsdusers", {"bsdusr_uid": 100, "bsdusr_group": 30})
@pytest.mark.asyncio
async def test__bad_fk_load():
async with datastore_test() as ds:
ds.execute("PRAGMA foreign_keys=OFF")
ds.execute("INSERT INTO `account_bsdgroups` VALUES (20, 2020)")
ds.execute("INSERT INTO `account_bsdusers` VALUES (5, 55, 21)")
assert await ds.query("account.bsdusers", [], {"prefix": "bsdusr_"}) == [
{
"id": 5,
"uid": 55,
"group": None,
}
]
@pytest.mark.asyncio
async def test__delete_fk():
async with datastore_test() as ds:
ds.execute("INSERT INTO `account_bsdgroups` VALUES (20, 2020)")
ds.execute("INSERT INTO `account_bsdusers` VALUES (5, 55, 20)")
with pytest.raises(IntegrityError):
await ds.delete("account.bsdgroups", 20)
@pytest.mark.asyncio
async def test__delete_fk_cascade():
async with datastore_test() as ds:
ds.execute("INSERT INTO `account_bsdgroups` VALUES (20, 2020)")
ds.execute("INSERT INTO `account_bsdusers_cascade` VALUES (5, 55, 20)")
await ds.delete("account.bsdgroups", 20)
assert await ds.query("account.bsdgroups") == []
@pytest.mark.asyncio
async def test__get_backrefs():
async with datastore_test() as ds:
assert await ds.get_backrefs("account.bsdgroups") == [
("account.bsdusers", "bsdusr_group"),
]
class NullableFkModel(Model):
__tablename__ = 'test_nullablefk'
id = sa.Column(sa.Integer(), primary_key=True)
user_id = sa.Column(sa.Integer(), sa.ForeignKey("account_bsdusers.id"), nullable=True)
@pytest.mark.asyncio
async def test__null_fk_load():
async with datastore_test() as ds:
ds.execute("INSERT INTO `test_nullablefk` VALUES (1, NULL)")
assert await ds.query("test.nullablefk") == [
{
"id": 1,
"user": None,
}
]
class StringModel(Model):
__tablename__ = 'test_string'
id = sa.Column(sa.Integer(), primary_key=True)
string = sa.Column(sa.String(100), nullable=True)
@pytest.mark.parametrize("filter_,ids", [
([("string", "~", "(e|u)m")], [1, 2]),
([("string", "~", "L?rem")], [1]),
([("string", "in", ["Ipsum", "dolor"])], [2]),
([("string", "nin", ["Ipsum", "dolor"])], [1]),
([("string", "in", [None, "Ipsum"])], [2, 3]),
([("string", "nin", [None, "Ipsum"])], [1]),
([("string", "in", [None])], [3]),
([("string", "nin", [None])], [1, 2]),
([("string", "^", "Lo")], [1]),
([("string", "$", "um")], [2]),
])
@pytest.mark.asyncio
async def test__string_filters(filter_, ids):
async with datastore_test() as ds:
ds.execute("INSERT INTO test_string VALUES (1, 'Lorem')")
ds.execute("INSERT INTO test_string VALUES (2, 'Ipsum')")
ds.execute("INSERT INTO test_string VALUES (3, NULL)")
assert [row["id"] for row in await ds.query("test.string", filter_)] == ids
@pytest.mark.asyncio
async def test_delete_not_in():
async with datastore_test() as ds:
ds.execute("INSERT INTO test_string VALUES (1, 'Lorem')")
ds.execute("INSERT INTO test_string VALUES (2, 'Ipsum')")
ds.execute("INSERT INTO test_string VALUES (3, 'dolor')")
ds.execute("INSERT INTO test_string VALUES (4, 'sit')")
ds.execute("INSERT INTO test_string VALUES (5, 'amer')")
await ds.delete("test_string", [["string", "nin", ["Lorem", "dolor"]]])
assert [row["id"] for row in await ds.query("test.string")] == [1, 3]
class IntegerModel(Model):
__tablename__ = 'test_integer'
id = sa.Column(sa.Integer(), primary_key=True)
integer = sa.Column(sa.Integer())
@pytest.mark.parametrize("filter_,ids", [
([("integer", ">", 1), ("integer", "<", 5)], [2, 3, 4]),
([("OR", [("integer", ">=", 4), ("integer", "<=", 2)])], [1, 2, 4, 5]),
])
@pytest.mark.asyncio
async def test__logic(filter_, ids):
async with datastore_test() as ds:
ds.execute("INSERT INTO test_integer VALUES (1, 1)")
ds.execute("INSERT INTO test_integer VALUES (2, 2)")
ds.execute("INSERT INTO test_integer VALUES (3, 3)")
ds.execute("INSERT INTO test_integer VALUES (4, 4)")
ds.execute("INSERT INTO test_integer VALUES (5, 5)")
assert [row["id"] for row in await ds.query("test.integer", filter_)] == ids
@pytest.mark.parametrize("order_by,ids", [
(["integer", "id"], [1, 2, 3, 4]),
(["integer", "-id"], [1, 3, 2, 4]),
])
@pytest.mark.asyncio
async def test__order_by(order_by, ids):
async with datastore_test() as ds:
ds.execute("INSERT INTO test_integer VALUES (1, 1)")
ds.execute("INSERT INTO test_integer VALUES (2, 2)")
ds.execute("INSERT INTO test_integer VALUES (3, 2)")
ds.execute("INSERT INTO test_integer VALUES (4, 3)")
assert [row["id"] for row in await ds.query("test.integer", [], {"order_by": order_by})] == ids
class JSONModel(Model):
__tablename__ = 'test_json'
id = sa.Column(sa.Integer(), primary_key=True)
object = sa.Column(JSON())
@pytest.mark.parametrize("string,object_", [
('{"key": "value"}', {"key": "value"}),
('{"key": "value"', {}),
])
@pytest.mark.asyncio
async def test__json_load(string, object_):
async with datastore_test() as ds:
ds.execute("INSERT INTO test_json VALUES (1, ?)", string)
assert (await ds.query("test.json", [], {"get": True}))["object"] == object_
@pytest.mark.asyncio
async def test__json_save():
async with datastore_test() as ds:
await ds.insert("test.json", {"object": {"key": "value"}})
assert (ds.fetchall("SELECT * FROM test_json"))[0]["object"] == '{"key": "value"}'
class EncryptedJSONModel(Model):
__tablename__ = 'test_encryptedjson'
id = sa.Column(sa.Integer(), primary_key=True)
object = sa.Column(JSON(encrypted=True))
class EncryptedTextModel(Model):
__tablename__ = 'test_encryptedtext'
id = sa.Column(sa.Integer(), primary_key=True)
object = sa.Column(EncryptedText(), nullable=True)
def decrypt(s, _raise=False):
assert _raise is True
if not s.startswith("!"):
raise Exception("Decryption failed")
return s[1:]
def decrypt_safe(s):
if not s.startswith("!"):
return ""
return s[1:]
def encrypt(s):
return f"!{s}"
@pytest.mark.parametrize("string,object_", [
('!{"key":"value"}', {"key": "value"}),
('!{"key":"value"', {}),
('{"key":"value"}', {}),
])
@pytest.mark.asyncio
async def test__encrypted_json_load(string, object_):
async with datastore_test() as ds:
ds.execute("INSERT INTO test_encryptedjson VALUES (1, ?)", string)
with patch("middlewared.sqlalchemy.decrypt", decrypt):
assert (await ds.query("test.encryptedjson", [], {"get": True}))["object"] == object_
@pytest.mark.asyncio
async def test__encrypted_json_save():
async with datastore_test() as ds:
with patch("middlewared.sqlalchemy.encrypt", encrypt):
await ds.insert("test.encryptedjson", {"object": {"key": "value"}})
assert (ds.fetchall("SELECT * FROM test_encryptedjson"))[0]["object"] == '!{"key": "value"}'
ds.middleware.call_hook_inline.assert_called_once_with(
"datastore.post_execute_write",
"INSERT INTO test_encryptedjson (object) VALUES (?)",
['!{"key": "value"}'],
ANY,
)
@pytest.mark.parametrize("string,object_", [
('!Text', 'Text'),
('Text', ''),
])
@pytest.mark.asyncio
async def test__encrypted_text_load(string, object_):
async with datastore_test() as ds:
ds.execute("INSERT INTO test_encryptedtext VALUES (1, ?)", string)
with patch("middlewared.sqlalchemy.decrypt", decrypt_safe):
assert (await ds.query("test.encryptedtext", [], {"get": True}))["object"] == object_
@pytest.mark.asyncio
async def test__encrypted_text_save():
async with datastore_test() as ds:
with patch("middlewared.sqlalchemy.encrypt", encrypt):
await ds.insert("test.encryptedtext", {"object": 'Text'})
assert (ds.fetchall("SELECT * FROM test_encryptedtext"))[0]["object"] == '!Text'
ds.middleware.call_hook_inline.assert_called_once_with(
"datastore.post_execute_write",
"INSERT INTO test_encryptedtext (object) VALUES (?)",
['!Text'],
ANY,
)
@pytest.mark.asyncio
async def test__encrypted_text_load_null():
async with datastore_test() as ds:
ds.execute("INSERT INTO test_encryptedtext VALUES (1, NULL)")
with patch("middlewared.sqlalchemy.decrypt", decrypt_safe):
assert (await ds.query("test.encryptedtext", [], {"get": True}))["object"] is None
@pytest.mark.asyncio
async def test__encrypted_text_save_null():
async with datastore_test() as ds:
with patch("middlewared.sqlalchemy.encrypt", encrypt):
await ds.insert("test.encryptedtext", {"object": None})
assert (ds.fetchall("SELECT * FROM test_encryptedtext"))[0]["object"] is None
ds.middleware.call_hook_inline.assert_called_once_with(
"datastore.post_execute_write",
"INSERT INTO test_encryptedtext (object) VALUES (?)",
[None],
ANY,
)
class CustomPkModel(Model):
__tablename__ = 'test_custompk'
custom_identifier = sa.Column(sa.String(42), primary_key=True)
custom_name = sa.Column(sa.String(120))
@pytest.mark.asyncio
async def test__custom_pk_query():
async with datastore_test() as ds:
ds.execute("INSERT INTO test_custompk VALUES ('ID1', 'Test 1')")
ds.execute("INSERT INTO test_custompk VALUES ('ID2', 'Test 2')")
result = await ds.query("test.custompk", [("identifier", "=", "ID1")], {"prefix": "custom_", "get": True})
assert result == {"identifier": "ID1", "name": "Test 1"}
@pytest.mark.asyncio
async def test__custom_pk_count():
async with datastore_test() as ds:
ds.execute("INSERT INTO test_custompk VALUES ('ID1', 'Test 1')")
ds.execute("INSERT INTO test_custompk VALUES ('ID2', 'Test 2')")
ds.execute("INSERT INTO test_custompk VALUES ('ID3', 'Other Test')")
assert await ds.query("test.custompk", [("name", "^", "Test")], {"prefix": "custom_", "count": True}) == 2
@pytest.mark.asyncio
async def test__custom_pk_update():
async with datastore_test() as ds:
ds.execute("INSERT INTO test_custompk VALUES ('ID1', 'Test 1')")
ds.execute("INSERT INTO test_custompk VALUES ('ID2', 'Test 2')")
await ds.update("test.custompk", "ID1", {"name": "Updated"}, {"prefix": "custom_"})
result = await ds.query("test.custompk", [("identifier", "=", "ID1")], {"prefix": "custom_", "get": True})
assert result == {"identifier": "ID1", "name": "Updated"}
@pytest.mark.asyncio
async def test__custom_pk_delete():
async with datastore_test() as ds:
ds.execute("INSERT INTO test_custompk VALUES ('ID1', 'Test 1')")
ds.execute("INSERT INTO test_custompk VALUES ('ID2', 'Test 2')")
await ds.delete("test.custompk", "ID1")
assert await ds.query("test.custompk", [], {"count": True}) == 1
@pytest.mark.asyncio
async def test__delete_by_filter():
async with datastore_test() as ds:
ds.execute("INSERT INTO test_custompk VALUES ('ID1', 'Test 1')")
ds.execute("INSERT INTO test_custompk VALUES ('ID2', 'Test 2')")
ds.execute("INSERT INTO test_custompk VALUES ('ID3', 'Other Test')")
await ds.delete("test.custompk", [("custom_name", "^", "Test")])
assert await ds.query("test.custompk", [], {"count": True}) == 1
class DiskModel(Model):
__tablename__ = 'storage_disk'
id = sa.Column(sa.Integer(), primary_key=True)
class SMARTTestModel(Model):
__tablename__ = 'tasks_smarttest'
id = sa.Column(sa.Integer(), primary_key=True)
smarttest_disks = relationship('DiskModel', secondary=lambda: SMARTTestDiskModel.__table__)
class SMARTTestDiskModel(Model):
__tablename__ = 'tasks_smarttest_smarttest_disks'
id = sa.Column(sa.Integer(), primary_key=True)
smarttest_id = sa.Column(sa.Integer(), sa.ForeignKey('tasks_smarttest.id'))
disk_id = sa.Column(sa.Integer(), sa.ForeignKey('storage_disk.id'))
@pytest.mark.asyncio
async def test__mtm_loader():
async with datastore_test() as ds:
ds.execute("INSERT INTO storage_disk VALUES (10)")
ds.execute("INSERT INTO storage_disk VALUES (20)")
ds.execute("INSERT INTO storage_disk VALUES (30)")
ds.execute("INSERT INTO tasks_smarttest VALUES (100)")
ds.execute("INSERT INTO tasks_smarttest_smarttest_disks VALUES (NULL, 100, 10)")
ds.execute("INSERT INTO tasks_smarttest_smarttest_disks VALUES (NULL, 100, 30)")
assert await ds.query("tasks.smarttest", [], {"prefix": "smarttest_"}) == [
{
"id": 100,
"disks": [{"id": 10}, {"id": 30}],
}
]
@pytest.mark.asyncio
async def test__mtm_insert():
async with datastore_test() as ds:
ds.execute("INSERT INTO storage_disk VALUES (10)")
ds.execute("INSERT INTO storage_disk VALUES (20)")
ds.execute("INSERT INTO storage_disk VALUES (30)")
await ds.insert("tasks.smarttest", {"disks": [10, 30]}, {"prefix": "smarttest_"})
assert await ds.query("tasks.smarttest", [], {"prefix": "smarttest_"}) == [
{
"id": 1,
"disks": [{"id": 10}, {"id": 30}],
}
]
@pytest.mark.asyncio
async def test__mtm_update():
async with datastore_test() as ds:
ds.execute("INSERT INTO storage_disk VALUES (10)")
ds.execute("INSERT INTO storage_disk VALUES (20)")
ds.execute("INSERT INTO storage_disk VALUES (30)")
ds.execute("INSERT INTO tasks_smarttest VALUES (100)")
ds.execute("INSERT INTO tasks_smarttest_smarttest_disks VALUES (NULL, 100, 10)")
ds.execute("INSERT INTO tasks_smarttest_smarttest_disks VALUES (NULL, 100, 30)")
await ds.update("tasks.smarttest", 100, {"disks": [20, 30]}, {"prefix": "smarttest_"})
assert await ds.query("tasks.smarttest", [], {"prefix": "smarttest_"}) == [
{
"id": 100,
"disks": [{"id": 20}, {"id": 30}],
}
]
class DefaultModel(Model):
__tablename__ = "test_default"
id = sa.Column(sa.Integer(), primary_key=True)
string = sa.Column(sa.String(100), default="DEFAULT")
@pytest.mark.asyncio
async def test__insert_default():
async with datastore_test() as ds:
await ds.insert("test.default", {})
assert (await ds.query("test.default", [], {"get": True}))["string"] == "DEFAULT"
@pytest.mark.asyncio
async def test__insert_default_has_value():
async with datastore_test() as ds:
await ds.insert("test.default", {"string": "VALUE"})
assert (await ds.query("test.default", [], {"get": True}))["string"] == "VALUE"
class StringPrimaryKeyModel(Model):
__tablename__ = 'test_stringprimarykey'
string_id = sa.Column(sa.String(100), primary_key=True)
value = sa.Column(sa.Integer(), nullable=True)
class BigIntegerPrimaryKeyModel(Model):
__tablename__ = 'test_bigintegerprimarykey'
integer_id = sa.Column(sa.BigInteger(), primary_key=True)
value = sa.Column(sa.Integer(), nullable=True)
@pytest.mark.asyncio
async def test__insert_string_pk_record():
async with datastore_test() as ds:
payload = {"string_id": "unique_key", "value": 1}
pk = await ds.insert("test.stringprimarykey", payload)
assert pk == payload["string_id"]
assert len(await ds.query("test.stringprimarykey", [["string_id", "=", pk]])) == 1
@pytest.mark.asyncio
async def test__insert_default_integer_pk_value():
async with datastore_test() as ds:
pk = await ds.insert("test.default", {"string": "VALUE"})
assert len(await ds.query("test.default", [["id", "=", pk]])) == 1
@pytest.mark.asyncio
async def test__insert_integer_pk_record():
async with datastore_test() as ds:
payload = {"integer_id": 120093877, "value": 1}
pk = await ds.insert("test.bigintegerprimarykey", payload)
assert pk == payload["integer_id"]
assert len(await ds.query("test.bigintegerprimarykey", [["integer_id", "=", pk]])) == 1
class SMBModel(Model):
__tablename__ = 'test_smb'
id = sa.Column(sa.Integer(), primary_key=True)
cifs_srv_netbiosname = sa.Column(sa.String(120))
cifs_SID = sa.Column(sa.String(120), nullable=True)
@pytest.mark.asyncio
async def test__already_has_prefix():
async with datastore_test() as ds:
await ds.insert("test.smb", {"cifs_srv_netbiosname": "", "cifs_SID": None})
await ds.update("test.smb", 1, {"netbiosname": "localhost", "cifs_SID": "ABCDEF"}, {"prefix": "cifs_srv_"})
assert (await ds.query("test.smb", [], {"prefix": "cifs_srv_", "get": True})) == {
"id": 1,
"netbiosname": "localhost",
"cifs_SID": "ABCDEF",
}
class TimeModel(Model):
__tablename__ = 'test_time'
id = sa.Column(sa.Integer(), primary_key=True)
time = sa.Column(Time())
@pytest.mark.asyncio
async def test__time():
async with datastore_test() as ds:
await ds.insert("test.time", {"time": datetime.time(21, 30)})
assert (await ds.query("test.time", [], {"get": True}))["time"] == datetime.time(21, 30)
assert (await ds.sql("SELECT * FROM test_time"))[0]["time"] == "21:30:00"
class NullModel(Model):
__tablename__ = 'test_null'
id = sa.Column(sa.Integer(), primary_key=True)
value = sa.Column(sa.Integer(), nullable=True)
@pytest.mark.parametrize("order_by,result", [
(["value"], [2, 3, 1]),
(["-value"], [1, 3, 2]),
(["nulls_last:value"], [3, 1, 2]),
(["nulls_first:-value"], [2, 1, 3]),
])
@pytest.mark.asyncio
async def test__null_order_by(order_by, result):
async with datastore_test() as ds:
await ds.insert("test.null", {"value": 3})
await ds.insert("test.null", {"value": None})
await ds.insert("test.null", {"value": 1})
assert [row["id"] for row in await ds.query("test.null", [], {"order_by": order_by})] == result
| 26,752 | Python | .py | 570 | 39.04386 | 115 | 0.608984 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,388 | test_ups.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_ups.py | import pytest
from unittest.mock import Mock, patch, mock_open
from middlewared.plugins.ups import UPSService
@pytest.mark.parametrize('config_line,key,value', [
(
'"Victron/IMV" "ups" "1" "(various)" "" "victronups"',
'victronups$(various)', 'Victron/IMV ups 1 (various) (victronups)'
),
(
'"Various" "ups" "1" "(various)" "Generic RUPS model" "genericups upstype=4"',
'genericups upstype=4$(various)', 'Various ups 1 (various) Generic RUPS model (genericups)'
),
(
'"Various" "ups" "1" "(various)" "Generic RUPS 2000 (Megatec M2501 cable)" "genericups upstype=21"',
'genericups upstype=21$(various)',
'Various ups 1 (various) Generic RUPS 2000 (Megatec M2501 cable) (genericups)'
),
(
'"Victron/IMV" "ups" "1" "Lite" "crack cable" "genericups upstype=10"',
'genericups upstype=10$Lite', 'Victron/IMV ups 1 Lite crack cable (genericups)'
),
(
'"UNITEK" "ups" "2" "Alpha 1200Sx" "USB" "blazer_usb"',
'blazer_usb$Alpha 1200Sx', 'UNITEK ups 2 Alpha 1200Sx USB (blazer_usb)'
),
(
'"Tripp Lite" "ups" "2" "SMART500RT1U" "USB (older; product ID 0001, protocol 3005)" "tripplite_usb"',
'tripplite_usb$SMART500RT1U',
'Tripp Lite ups 2 SMART500RT1U USB (older; product ID 0001, protocol 3005) (tripplite_usb)'
),
])
@patch('os.path.exists', lambda x: True)
def test__services_ups_service__driver_choices(config_line, key, value):
with patch('builtins.open', mock_open(read_data=config_line)):
assert UPSService(Mock()).driver_choices() == {key: value}
@pytest.mark.parametrize('driver_str,normalized', [
('victronups$(various)', 'driver = victronups'),
('genericups upstype=4$(various)', 'driver = genericups\n\tupstype=4'),
('genericups upstype=21$(various)', 'driver = genericups\n\tupstype=21'),
('genericups upstype=10$(various)', 'driver = genericups\n\tupstype=10'),
('blazer_usb$Alpha 1200Sx', 'driver = blazer_usb'),
('tripplite_usb$SMART500RT1U', 'driver = tripplite_usb'),
])
@patch('os.path.exists', lambda x: True)
def test__services_ups_service__driver_string_normalization(driver_str, normalized):
assert UPSService(Mock()).normalize_driver_string(driver_str) == normalized
| 2,286 | Python | .py | 46 | 44.217391 | 110 | 0.662489 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,389 | test_interface.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_interface.py | import copy
from unittest.mock import Mock
import pytest
from middlewared.service import ValidationErrors
from middlewared.plugins.network import InterfaceService
from middlewared.pytest.unit.helpers import create_service
from middlewared.pytest.unit.middleware import Middleware
INTERFACES = [
{
'id': 'em0',
'name': 'em0',
'fake': False,
'type': 'PHYSICAL',
'aliases': [],
'options': '',
'ipv4_dhcp': False,
'ipv6_auto': False,
'state': {
'cloned': False,
},
},
{
'id': 'em1',
'name': 'em1',
'fake': False,
'type': 'PHYSICAL',
'aliases': [],
'options': '',
'ipv4_dhcp': False,
'ipv6_auto': False,
'state': {
'cloned': False,
},
},
]
INTERFACES_WITH_VLAN = INTERFACES + [
{
'id': 'vlan5',
'name': 'vlan5',
'fake': False,
'type': 'VLAN',
'aliases': [],
'options': '',
'ipv4_dhcp': False,
'ipv6_auto': False,
'state': {
'cloned': True,
},
'vlan_tag': 5,
'vlan_parent_interface': 'em0',
},
]
INTERFACES_WITH_LAG = INTERFACES + [
{
'id': 'bond0',
'name': 'bond0',
'fake': False,
'type': 'LINK_AGGREGATION',
'aliases': [],
'options': '',
'ipv4_dhcp': False,
'ipv6_auto': False,
'state': {
'cloned': True,
},
'lag_ports': ['em0'],
},
]
INTERFACES_WITH_BRIDGE = INTERFACES + [
{
'id': 'br0',
'name': 'br0',
'fake': False,
'type': 'BRIDGE',
'aliases': [],
'options': '',
'ipv4_dhcp': False,
'ipv6_auto': False,
'state': {
'cloned': True,
},
'bridge_members': ['em0'],
},
]
def mock_datastore_query_side_effect():
return Mock(
side_effect=lambda method, *args: {
'network.interfaces': Mock(return_value=[]),
'network.alias': Mock(return_value=[]),
'network.bridge': Mock(return_value=[]),
'network.lagginterface': Mock(return_value=[]),
'network.vlan': Mock(return_value=[]),
'network.lagginterfacemembers': Mock(return_value=[]),
'network.globalconfiguration': Mock(return_value={'gc_ipv4gateway': ''}),
}[method](*args)
)
@pytest.mark.asyncio
async def test__interfaces_service__create_bridge_invalid_ports():
m = Middleware()
m['interface.query'] = Mock(return_value=INTERFACES)
m['datastore.query'] = Mock(return_value=[])
m['network.common.check_failover_disabled'] = Mock()
with pytest.raises(ValidationErrors) as ve:
await create_service(m, InterfaceService).do_create({
'type': 'BRIDGE',
'bridge_members': ['em0', 'igb2'],
})
assert 'interface_create.bridge_members.1' in ve.value
@pytest.mark.asyncio
async def test__interfaces_service__create_bridge_invalid_ports_used():
m = Middleware()
m['interface.query'] = Mock(return_value=INTERFACES_WITH_BRIDGE)
m['datastore.query'] = Mock(return_value=[])
m['network.common.check_failover_disabled'] = Mock()
with pytest.raises(ValidationErrors) as ve:
await create_service(m, InterfaceService).do_create({
'type': 'BRIDGE',
'bridge_members': ['em0'],
})
assert 'interface_create.bridge_members.0' in ve.value
@pytest.mark.asyncio
async def test__interfaces_service__create_lagg_invalid_ports():
m = Middleware()
m['interface.query'] = Mock(return_value=INTERFACES)
m['interface.lag_supported_protocols'] = Mock(return_value=['LACP'])
m['datastore.query'] = Mock(return_value=[])
m['network.common.check_failover_disabled'] = Mock()
with pytest.raises(ValidationErrors) as ve:
await create_service(m, InterfaceService).do_create({
'type': 'LINK_AGGREGATION',
'lag_protocol': 'LACP',
'lag_ports': ['em0', 'igb2'],
})
assert 'interface_create.lag_ports.1' in ve.value
@pytest.mark.asyncio
async def test__interfaces_service__create_lagg_invalid_ports_cloned():
m = Middleware()
m['interface.query'] = Mock(return_value=INTERFACES_WITH_VLAN)
m['interface.lag_supported_protocols'] = Mock(return_value=['LACP'])
m['datastore.query'] = mock_datastore_query_side_effect()
m['network.common.check_failover_disabled'] = Mock()
with pytest.raises(ValidationErrors) as ve:
await create_service(m, InterfaceService).do_create({
'type': 'LINK_AGGREGATION',
'lag_protocol': 'LACP',
'lag_ports': ['em1', 'vlan5'],
})
assert 'interface_create.lag_ports.1' in ve.value
@pytest.mark.asyncio
async def test__interfaces_service__create_lagg_invalid_ports_used():
m = Middleware()
m['interface.query'] = Mock(return_value=INTERFACES_WITH_LAG)
m['interface.lag_supported_protocols'] = Mock(return_value=['LACP'])
m['datastore.query'] = Mock(return_value=[])
m['network.common.check_failover_disabled'] = Mock()
with pytest.raises(ValidationErrors) as ve:
await create_service(m, InterfaceService).do_create({
'type': 'LINK_AGGREGATION',
'lag_protocol': 'LACP',
'lag_ports': ['em0'],
})
assert 'interface_create.lag_ports.0' in ve.value
@pytest.mark.asyncio
async def test__interfaces_service__create_lagg():
m = Middleware()
m['interface.query'] = Mock(return_value=INTERFACES)
m['interface.lag_supported_protocols'] = Mock(return_value=['LACP'])
m['interface.validate_name'] = Mock()
m['datastore.query'] = mock_datastore_query_side_effect()
m['datastore.insert'] = Mock(return_value=5)
m['network.common.check_failover_disabled'] = Mock()
await create_service(m, InterfaceService).do_create({
'name': 'bond0',
'type': 'LINK_AGGREGATION',
'lag_protocol': 'LACP',
'lag_ports': ['em0', 'em1'],
})
@pytest.mark.parametrize('attr_val', [
('aliases', [{'address': '192.168.8.2', 'netmask': 24}]),
('mtu', 1500),
('ipv4_dhcp', True),
('ipv6_auto', True),
])
@pytest.mark.asyncio
async def test__interfaces_service__lagg_update_members_invalid(attr_val):
m = Middleware()
m['interface.query'] = m._query_filter(INTERFACES_WITH_LAG)
m['datastore.query'] = Mock(return_value=[])
m['network.common.check_failover_disabled'] = Mock()
with pytest.raises(ValidationErrors) as ve:
await create_service(m, InterfaceService).do_update('em0', {
attr_val[0]: attr_val[1],
})
assert f'interface_update.{attr_val[0]}' in ve.value
@pytest.mark.asyncio
async def test__interfaces_service__create_vlan_invalid_parent():
m = Middleware()
m['interface.query'] = Mock(return_value=INTERFACES)
m['interface.validate_name'] = Mock()
m['datastore.query'] = Mock(return_value=[])
m['network.common.check_failover_disabled'] = Mock()
with pytest.raises(ValidationErrors) as ve:
await create_service(m, InterfaceService).do_create({
'type': 'VLAN',
'name': 'myvlan1',
'vlan_tag': 5,
'vlan_parent_interface': 'igb2',
})
assert 'interface_create.vlan_parent_interface' in ve.value
@pytest.mark.asyncio
async def test__interfaces_service__create_vlan_invalid_parent_used():
m = Middleware()
m['interface.query'] = Mock(return_value=INTERFACES_WITH_LAG)
m['datastore.query'] = Mock(return_value=[])
m['network.common.check_failover_disabled'] = Mock()
with pytest.raises(ValidationErrors) as ve:
await create_service(m, InterfaceService).do_create({
'type': 'VLAN',
'vlan_tag': 5,
'vlan_parent_interface': 'em0',
})
assert 'interface_create.vlan_parent_interface' in ve.value
@pytest.mark.asyncio
async def test__interfaces_service__create_vlan():
m = Middleware()
m['interface.query'] = Mock(return_value=INTERFACES)
m['interface.validate_name'] = Mock()
m['datastore.query'] = mock_datastore_query_side_effect()
m['datastore.insert'] = Mock(return_value=5)
m['network.common.check_failover_disabled'] = Mock()
await create_service(m, InterfaceService).do_create({
'name': 'vlan0',
'type': 'VLAN',
'vlan_tag': 5,
'vlan_parent_interface': 'em0',
})
@pytest.mark.asyncio
async def test__interfaces_service__update_vlan_mtu_bigger_parent():
m = Middleware()
m['interface.query'] = m._query_filter(INTERFACES_WITH_VLAN)
m['interface.validate_name'] = Mock()
m['datastore.query'] = Mock(return_value=[])
m['network.common.check_failover_disabled'] = Mock()
with pytest.raises(ValidationErrors) as ve:
await create_service(m, InterfaceService).do_update(INTERFACES_WITH_VLAN[-1]['id'], {
'mtu': 9000,
})
assert 'interface_update.mtu' in ve.value
@pytest.mark.asyncio
async def test__interfaces_service__update_two_dhcp():
interfaces_with_one_dhcp = copy.deepcopy(INTERFACES)
interfaces_with_one_dhcp[0]['ipv4_dhcp'] = True
m = Middleware()
m['interface.query'] = Mock(return_value=interfaces_with_one_dhcp)
m['datastore.query'] = Mock(return_value=[
{'int_interface': interfaces_with_one_dhcp[0]['name'], 'int_dhcp': True, 'int_ipv6auto': False}
])
m['network.common.check_failover_disabled'] = Mock()
update_interface = interfaces_with_one_dhcp[1]
with pytest.raises(ValidationErrors) as ve:
await create_service(m, InterfaceService).do_update(
update_interface['id'], {
'ipv4_dhcp': True,
},
)
assert 'interface_update.ipv4_dhcp' in ve.value, list(ve.value)
@pytest.mark.asyncio
async def test__interfaces_service__update_two_same_network():
interfaces_one_network = copy.deepcopy(INTERFACES)
interfaces_one_network[0]['aliases'] = [
{'type': 'INET', 'address': '192.168.5.2', 'netmask': 24},
]
m = Middleware()
m['interface.query'] = Mock(return_value=interfaces_one_network)
m['datastore.query'] = Mock(return_value=[])
m['datastore.insert'] = Mock(return_value=5)
m['network.common.check_failover_disabled'] = Mock()
update_interface = interfaces_one_network[1]
with pytest.raises(ValidationErrors) as ve:
await create_service(m, InterfaceService).do_update(
update_interface['id'], {
'aliases': [{'address': '192.168.5.3', 'netmask': 24}],
},
)
assert 'interface_update.aliases.0' in ve.value
@pytest.mark.asyncio
async def test__interfaces_service__update_mtu_options():
m = Middleware()
m['interface.query'] = Mock(return_value=INTERFACES)
m['interface.validate_name'] = Mock()
m['datastore.query'] = Mock(return_value=[])
m['datastore.insert'] = Mock(return_value=5)
m['network.common.check_failover_disabled'] = Mock()
update_interface = INTERFACES[1]
with pytest.raises(ValidationErrors) as ve:
await create_service(m, InterfaceService).do_update(
update_interface['id'], {
'options': 'mtu 1550',
},
)
assert 'interface_update.options' in ve.value
| 11,451 | Python | .py | 301 | 31.036545 | 103 | 0.619968 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,390 | test_vm.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_vm.py | import pytest
from middlewared.service_exception import ValidationErrors
from middlewared.pytest.unit.helpers import load_compound_service
from middlewared.pytest.unit.middleware import Middleware
VMService = load_compound_service('vm')
@pytest.mark.parametrize('ha_capable,license_features,should_work', [
(True, [], False),
(True, ['VM'], True),
(False, [], True),
])
@pytest.mark.asyncio
async def test_vm_license_active_response(ha_capable, license_features, should_work):
m = Middleware()
vm_svc = VMService(m)
m['system.is_ha_capable'] = lambda *args: ha_capable
m['system.license'] = lambda *args: {'features': license_features}
assert await vm_svc.license_active() is should_work
@pytest.mark.parametrize('license_active', [
True,
False,
])
@pytest.mark.asyncio
async def test_vm_creation_for_licensed_and_unlicensed_systems(license_active):
m = Middleware()
vm_svc = VMService(m)
vm_payload = {
'name': 'test_vm',
'description': '',
'vcpus': 0,
'memory': 14336,
'min_memory': None,
'autostart': False,
'time': 'LOCAL',
'bootloader': 'UEFI',
'bootloader_ovmf': 'OVMF_CODE.fd',
'cores': 1,
'threads': 1,
'hyperv_enlightenments': False,
'shutdown_timeout': 90,
'cpu_mode': 'HOST-PASSTHROUGH',
'cpu_model': None,
'cpuset': None,
'nodeset': None,
'pin_vcpus': False,
'hide_from_msr': False,
'ensure_display_device': True,
'arch_type': None,
'machine_type': None,
'uuid': '64e31dd7-8c76-4dca-8b4b-0126b8853c5b',
'command_line_args': '',
}
m['vm.bootloader_ovmf_choices'] = lambda *args: {'OVMF_CODE.fd': 'OVMF_CODE.fd'}
m['vm.license_active'] = lambda *args: license_active
m['vm.query'] = lambda *args: []
verrors = ValidationErrors()
await vm_svc.common_validation(verrors, 'vm_create', vm_payload)
assert [e.errmsg for e in verrors.errors] == (
[] if license_active else ['System is not licensed to use VMs']
)
| 2,113 | Python | .py | 59 | 29.79661 | 85 | 0.636141 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,391 | test_system_dataset.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_system_dataset.py | import jsonschema
import pytest
from middlewared.plugins.system_dataset.hierarchy import get_system_dataset_spec, SYSTEM_DATASET_JSON_SCHEMA
@pytest.mark.parametrize('pool_name,uuid', [
('test', '12345678'),
('test2', '12345679'),
])
@pytest.mark.asyncio
async def test_system_dataset_spec(pool_name, uuid):
jsonschema.validate(get_system_dataset_spec(pool_name, uuid), SYSTEM_DATASET_JSON_SCHEMA)
| 413 | Python | .py | 10 | 38.8 | 108 | 0.775 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,392 | test_update.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_update.py | import pytest
from middlewared.plugins.update import CompareTrainsResult, compare_trains
@pytest.mark.parametrize("t1,t2,result", [
("TrueNAS-SCALE-Angelfish", "TrueNAS-SCALE-Bluefin", CompareTrainsResult.MAJOR_UPGRADE),
("TrueNAS-SCALE-Bluefin", "TrueNAS-SCALE-Angelfish", CompareTrainsResult.MAJOR_DOWNGRADE),
("TrueNAS-SCALE-Angelfish-Nightlies", "TrueNAS-SCALE-Bluefin-Nightlies", CompareTrainsResult.MAJOR_UPGRADE),
("TrueNAS-SCALE-Bluefin-Nightlies", "TrueNAS-SCALE-Angelfish-Nightlies", CompareTrainsResult.MAJOR_DOWNGRADE),
("TrueNAS-SCALE-Angelfish", "TrueNAS-SCALE-Angelfish-Nightlies", CompareTrainsResult.NIGHTLY_UPGRADE),
("TrueNAS-SCALE-Angelfish", "TrueNAS-SCALE-Bluefin-Nightlies", CompareTrainsResult.NIGHTLY_UPGRADE),
("TrueNAS-SCALE-Angelfish-Nightlies", "TrueNAS-SCALE-Angelfish", CompareTrainsResult.NIGHTLY_DOWNGRADE),
("TrueNAS-SCALE-Angelfish-Nightlies", "TrueNAS-SCALE-Bluefin", CompareTrainsResult.NIGHTLY_DOWNGRADE),
("TrueNAS-SCALE-Angelfish", "TrueNAS-SCALE-Angelfish", None),
])
def test__compare_trains(t1, t2, result):
assert compare_trains(t1, t2) == result
| 1,136 | Python | .py | 15 | 71.733333 | 114 | 0.783154 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,393 | test_interface_link_address.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_interface_link_address.py | # -*- coding=utf-8 -*-
import logging
from unittest.mock import ANY, AsyncMock
import pytest
from middlewared.plugins.interface.link_address import InterfaceService, setup as link_address_setup
from middlewared.pytest.unit.plugins.test_datastore import Model, datastore_test
import middlewared.sqlalchemy as sa
logger = logging.getLogger(__name__)
class NetworkBridgeModel(Model):
__tablename__ = 'network_bridge'
id = sa.Column(sa.Integer(), primary_key=True) # noqa
interface_id = sa.Column(sa.ForeignKey('network_interfaces.id', ondelete='CASCADE'))
members = sa.Column(sa.JSON(list), default=[])
class NetworkInterfaceModel(Model):
__tablename__ = "network_interfaces"
id = sa.Column(sa.Integer(), primary_key=True) # noqa
int_interface = sa.Column(sa.Integer(), nullable=False)
int_settings = sa.Column(sa.Integer())
class NetworkInterfaceLinkAddressModel(Model):
__tablename__ = 'network_interface_link_address'
id = sa.Column(sa.Integer, primary_key=True) # noqa
interface = sa.Column(sa.String(300))
link_address = sa.Column(sa.String(17), nullable=True)
link_address_b = sa.Column(sa.String(17), nullable=True)
class NetworkLaggInterfaceModel(Model):
__tablename__ = 'network_lagginterface'
id = sa.Column(sa.Integer, primary_key=True) # noqa
lagg_interface_id = sa.Column(sa.Integer(), sa.ForeignKey('network_interfaces.id'))
class NetworkLaggInterfaceMemberModel(Model):
__tablename__ = 'network_lagginterfacemembers'
id = sa.Column(sa.Integer, primary_key=True) # noqa
lagg_ordernum = sa.Column(sa.Integer())
lagg_interfacegroup_id = sa.Column(sa.ForeignKey('network_lagginterface.id', ondelete='CASCADE'), index=True)
lagg_physnic = sa.Column(sa.String(120), unique=True)
class NetworkVlanModel(Model):
__tablename__ = 'network_vlan'
id = sa.Column(sa.Integer(), primary_key=True) # noqa
vlan_vint = sa.Column(sa.String(120))
vlan_pint = sa.Column(sa.String(300))
class VMDeviceModel(Model):
__tablename__ = 'vm_device'
id = sa.Column(sa.Integer(), primary_key=True) # noqa
dtype = sa.Column(sa.String(50))
attributes = sa.Column(sa.JSON())
@pytest.mark.parametrize("before,after", [
# BSD -> Linux interface rename
(
{
"hw": {"em0": "08:00:27:1e:9f:d3", "em1": "08:00:27:1e:9f:d4"},
"interface": {"em0": 0, "em1": 1, "bridge6": 66, "lagg7": 77, "vlan8": 88},
"bridge": {"bridge6": ["em0", "em1"]},
"lagg": {"lagg7": ["em1", "em0"]},
"vlan": {"vlan8": "em1"},
"vm": ["em1"],
},
{
"hw": {"eth0": "08:00:27:1e:9f:d3", "eth1": "08:00:27:1e:9f:d4"},
"interface": {"eth0": 0, "eth1": 1, "br6": 66, "bond7": 77, "vlan8": 88},
"bridge": {"br6": ["eth0", "eth1"]},
"lagg": {"bond7": ["eth1", "eth0"]},
"vlan": {"vlan8": "eth1"},
"vm": ["eth1"],
},
),
# Interfaces swapped names
(
{
"hw": {"eth0": "08:00:27:1e:9f:d3", "eth1": "08:00:27:1e:9f:d4"},
"interface": {"eth0": 0, "eth1": 1, "lagg0": 10, "lagg1": 11},
"lagg": {"lagg0": ["eth0"], "lagg1": ["eth1"]},
},
{
"hw": {"eth1": "08:00:27:1e:9f:d3", "eth0": "08:00:27:1e:9f:d4"},
"interface": {"eth1": 0, "eth0": 1, "bond0": 10, "bond1": 11},
"lagg": {"bond0": ["eth1"], "bond1": ["eth0"]},
},
),
# Multiple LAGGs
(
{
"hw": {"eth0": "00:00:00:00:00:00", "eth1": "00:00:00:00:00:01",
"eth2": "00:00:00:00:00:02", "eth3": "00:00:00:00:00:03"},
"interface": {"eth0": 0, "eth1": 1, "eth2": 2, "eth3": 3, "bond0": 10, "bond1": 11},
"lagg": {"bond0": ["eth0", "eth1"], "bond1": ["eth2", "eth3"]},
},
{
"hw": {"enp131s0": "00:00:00:00:00:00", "enp131s1": "00:00:00:00:00:01",
"enp131s2": "00:00:00:00:00:02", "enp131s3": "00:00:00:00:00:03"},
"interface": {"enp131s0": 0, "enp131s1": 1, "enp131s2": 2, "enp131s3": 3, "bond0": 10, "bond1": 11},
"lagg": {"bond0": ["enp131s0", "enp131s1"], "bond1": ["enp131s2", "enp131s3"]},
},
),
# Interface gone
(
{
"hw": {"eth0": "08:00:27:1e:9f:d3", "eth1": "08:00:27:1e:9f:d4"},
"interface": {"eth0": 0, "eth1": 1, "br0": 66, "bond0": 77, "vlan0": 88},
"bridge": {"br0": ["eth0", "eth1"]},
"lagg": {"bond0": ["eth0", "eth1"]},
"vlan": {"vlan0": "eth1"},
"vm": ["eth1"],
},
{
"interface.query": {"eth0": "08:00:27:1e:9f:d3"},
"hw": {"eth0": "08:00:27:1e:9f:d3", "eth1": "08:00:27:1e:9f:d4"},
"interface": {"eth0": 0, "eth1": 1, "br0": 66, "bond0": 77, "vlan0": 88},
"bridge": {"br0": ["eth0", "eth1"]},
"lagg": {"bond0": ["eth0", "eth1"]},
"vlan": {"vlan0": "eth1"},
"vm": ["eth1"],
},
),
# New interface
(
{
"hw": {"eth0": "08:00:27:1e:9f:d3"},
"interface": {"eth0": 0, "eth1": 1, "br0": 66, "bond0": 77, "vlan0": 88},
"bridge": {"br0": ["eth0", "eth1"]},
"lagg": {"bond0": ["eth0", "eth1"]},
"vlan": {"vlan0": "eth1"},
"vm": ["eth1"],
},
{
"interface.query": {"eth0": "08:00:27:1e:9f:d3", "eth1": "08:00:27:1e:9f:d4"},
"hw": {"eth0": "08:00:27:1e:9f:d3", "eth1": "08:00:27:1e:9f:d4"},
"interface": {"eth0": 0, "eth1": 1, "br0": 66, "bond0": 77, "vlan0": 88},
"bridge": {"br0": ["eth0", "eth1"]},
"lagg": {"bond0": ["eth0", "eth1"]},
"vlan": {"vlan0": "eth1"},
"vm": ["eth1"],
},
),
# Duplicate addresses
(
{
"hw": {"eth0": "08:00:27:1e:9f:d3", "eth1": "08:00:27:1e:9f:d3"},
"interface": {"eth0": 0, "eth1": 1},
},
{
"interface.query": {"eth0": "08:00:27:1e:9f:d3", "eth1": "08:00:27:1e:9f:d3"},
"hw": {"eth0": "08:00:27:1e:9f:d3", "eth1": "08:00:27:1e:9f:d3"},
"interface": {"eth0": 0, "eth1": 1},
},
),
])
@pytest.mark.asyncio
async def test__interface_link_address_setup(before, after):
async with datastore_test() as ds:
ds.middleware["failover.node"] = AsyncMock(return_value="MANUAL")
ds.middleware["failover.status"] = AsyncMock(return_value="SINGLE")
ds.middleware["interface.persist_link_addresses"] = InterfaceService(ds.middleware).persist_link_addresses
for interface, link_address in before.get("hw", {}).items():
await ds.insert("network.interface_link_address", {
"interface": interface,
"link_address": link_address,
})
interface_id = {}
for interface, settings in before.get("interface", {}).items():
interface_id[interface] = await ds.insert("network.interfaces", {
"interface": interface,
"settings": settings,
}, {"prefix": "int_"})
for interface, members in before.get("bridge", {}).items():
await ds.insert("network.bridge", {
"interface": interface_id[interface],
"members": members,
})
for interface, members in before.get("lagg", {}).items():
lagg_id = await ds.insert("network.lagginterface", {
"interface": interface_id[interface],
}, {"prefix": "lagg_"})
for order, member in enumerate(members):
await ds.insert("network.lagginterfacemembers", {
"interfacegroup": lagg_id,
"physnic": member,
"ordernum": order,
}, {"prefix": "lagg_"})
for vint, pint in before.get("vlan", {}).items():
await ds.insert("network.vlan", {
"vint": vint,
"pint": pint,
}, {"prefix": "vlan_"})
for interface in before.get("vm", []):
await ds.insert("vm.device", {
"dtype": "NIC",
"attributes": {"nic_attach": interface},
})
ds.middleware["interface.query"] = AsyncMock(return_value=[
{
"name": name,
"state": {
"hardware_link_address": link_address,
}
}
for name, link_address in (
after["interface.query"] if "interface.query" in after else after.get("hw", {})
).items()
])
await link_address_setup(ds.middleware)
assert await ds.query("network.interface_link_address", [], {"prefix": "int_"}) == [
{
"id": ANY,
"interface": interface,
"link_address": link_address,
"link_address_b": None,
}
for interface, link_address in after.get("hw", {}).items()
]
assert await ds.query("network.interfaces", [], {"prefix": "int_"}) == [
{
"id": ANY,
"interface": interface,
"settings": settings,
}
for interface, settings in after.get("interface", {}).items()
]
assert await ds.query("network.bridge") == [
{
"id": ANY,
"interface": {"id": ANY, "int_interface": interface, "int_settings": ANY},
"members": members,
}
for interface, members in after.get("bridge", {}).items()
]
assert await ds.query("network.lagginterface", [], {"prefix": "lagg_"}) == [
{
"id": ANY,
"interface": {"id": ANY, "int_interface": interface, "int_settings": ANY},
}
for interface, members in after.get("lagg", {}).items()
]
for lagg in await ds.query("network.lagginterface", [], {"prefix": "lagg_"}):
members = after["lagg"].get(lagg["interface"]["int_interface"])
assert await ds.query(
"network.lagginterfacemembers",
[["interfacegroup", "=", lagg["id"]]],
{"prefix": "lagg_", "order_by": ["ordernum"]}
) == [
{
"id": ANY,
"interfacegroup": ANY,
"physnic": member,
"ordernum": ANY,
}
for member in members
]
assert await ds.query("network.vlan", [], {"prefix": "vlan_"}) == [
{
"id": ANY,
"vint": vint,
"pint": pint,
}
for vint, pint in after.get("vlan", {}).items()
]
assert await ds.query("vm.device") == [
{
"id": ANY,
"dtype": "NIC",
"attributes": {"nic_attach": interface},
}
for interface in after.get("vm", [])
]
| 11,206 | Python | .py | 262 | 31.782443 | 114 | 0.496376 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,394 | test_gpu_isolation.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_gpu_isolation.py | import pytest
from middlewared.pytest.unit.helpers import load_compound_service
from middlewared.pytest.unit.middleware import Middleware
from middlewared.service_exception import ValidationErrors
AVAILABLE_GPUS = [
{
'addr': {
'pci_slot': '0000:09:00.0',
'domain': '0000',
'bus': '09',
'slot': '00'
},
'description': 'Red Hat, Inc. GPU',
'devices': [
{
'pci_id': '1AF4:1050',
'pci_slot': '0000:09:00.0',
'vm_pci_slot': 'pci_0000_09_00_0'
}
],
'vendor': None,
'uses_system_critical_devices': False,
'available_to_host': False
},
{
'addr': {
'pci_slot': '0000:02:00.0',
'domain': '0000',
'bus': '09',
'slot': '00'
},
'description': 'Red Hat, Inc. GPU',
'devices': [
{
'pci_id': '1AF4:1050',
'pci_slot': '0000:02:00.0',
'vm_pci_slot': 'pci_0000_02_00_0'
}
],
'vendor': None,
'uses_system_critical_devices': True,
'available_to_host': True
}
]
ADVANCED_SVC = load_compound_service('system.advanced')
@pytest.mark.parametrize('gpu_pci_ids,errors', [
(['0000:09:00.0'], []),
(['0000:09:00.0'], []),
(['0000:02:00.0'], ['0000:02:00.0 GPU pci slot(s) consists of devices which cannot be isolated from host.']),
])
@pytest.mark.asyncio
async def test_valid_isolated_gpu(gpu_pci_ids, errors):
m = Middleware()
m['device.get_gpus'] = lambda *args: AVAILABLE_GPUS
system_advance_svc = ADVANCED_SVC(m)
verrors = ValidationErrors()
verrors = await system_advance_svc.validate_gpu_pci_ids(gpu_pci_ids, verrors, 'test')
assert [e.errmsg for e in verrors.errors] == errors
| 1,883 | Python | .py | 58 | 24.12069 | 113 | 0.539307 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,395 | test_smart.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_smart.py | import textwrap
import json
import pytest
from middlewared.plugins.smart import parse_smart_selftest_results, parse_current_smart_selftest
from middlewared.api.current import (
AtaSelfTest, NvmeSelfTest, ScsiSelfTest
)
def test__parse_smart_selftest_results__ataprint__1():
data = {
"power_on_time": {
"hours": 16590
},
"ata_smart_self_test_log": {
"standard": {
"revision": 1,
"table": [
{
"type": {
"value": 1,
"string": "Short offline"
},
"status": {
"value": 0,
"string": "Completed without error",
"passed": True
},
"lifetime_hours": 16590
},
{
"type": {
"value": 1,
"string": "Short offline"
},
"status": {
"value": 0,
"string": "Completed without error",
"passed": True
},
"lifetime_hours": 16589
}
],
"error_count_total": 0,
"error_count_outdated": 0
}
}
}
assert parse_smart_selftest_results(data) == [
{
"num": 0,
"description": "Short offline",
"status": "SUCCESS",
"status_verbose": "Completed without error",
"remaining": 0.0,
"lifetime": 16590,
"lba_of_first_error": None,
"power_on_hours_ago": 0
},
{
"num": 1,
"description": "Short offline",
"status": "SUCCESS",
"status_verbose": "Completed without error",
"remaining": 0.0,
"lifetime": 16589,
"lba_of_first_error": None,
"power_on_hours_ago": 1
}
]
def test__parse_smart_selftest_results__ataprint__2():
data = {
"power_on_time": {
"hours": 16590
},
"ata_smart_self_test_log": {
"standard": {
"revision": 1,
"table": [
{
"type": {
"value": 1,
"string": "Offline"
},
"status": {
"value": 249,
"string": "Self-test routine in progress",
"remaining_percent": 100,
"passed": True
},
"lifetime_hours": 16590
}
],
"error_count_total": 0,
"error_count_outdated": 0
}
}
}
assert parse_smart_selftest_results(data) == [
{
"num": 0,
"description": "Offline",
"status": "RUNNING",
"status_verbose": "Self-test routine in progress",
"remaining": 1.0,
"lifetime": 16590,
"lba_of_first_error": None,
"power_on_hours_ago": 0
}
]
def test__parse_smart_selftest_results__nvmeprint__1():
assert parse_smart_selftest_results({
"power_on_time": {
"hours": 18636
},
"nvme_self_test_log": {
"table": [
{
"self_test_code": {
"string": "Short"
},
"self_test_result": {
"string": "Completed without error"
},
"power_on_hours": 18636
}
],
"error_count_total": 0,
"error_count_outdated": 0
}
}) == [
{
"num": 0,
"description": "Short",
"status": "SUCCESS",
"status_verbose": "Completed without error",
"power_on_hours": 18636,
"failing_lba": None,
"nsid": None,
"seg": None,
"sct": 0x0,
"code": 0x0,
"power_on_hours_ago": 0
},
]
def test__parse_smart_selftest_results__scsiprint__1():
assert parse_smart_selftest_results({
"power_on_time": {
"hours": 3943
},
"scsi_self_test_0": {
"code": {
"string": "Background short"
},
"result": {
"string": "Completed, segment failed"
},
"power_on_time": {
"hours": 3943
}
}
}) == [
{
"num": 0,
"description": "Background short",
"status": "FAILED",
"status_verbose": "Completed, segment failed",
"segment_number": None,
"lifetime": 3943,
"lba_of_first_error": None,
"power_on_hours_ago": 0
}
]
@pytest.mark.parametrize("stdout,result", [
# ataprint.cpp
(
{
"ata_smart_self_test_log": {
"standard": {
"revision": 1,
"table": [
{
"type": {
"value": 1,
"string": "Offline"
},
"status": {
"value": 249,
"string": "Self-test routine in progress",
"remaining_percent": 41,
"passed": True
},
"lifetime_hours": 0
}
],
"error_count_total": 0,
"error_count_outdated": 0
}
}
},
{"progress": 59},
),
# nvmeprint.cpp
(
{
"nvme_self_test_log": {
"current_self_test_completion_percent": 3
}
},
{"progress": 3},
),
# scsiprint.spp
(
{"junkjson":True},
None,
),
(
{"self_test_in_progress":True},
{"progress": 0}
)
])
def test__parse_current_smart_selftest(stdout, result):
assert parse_current_smart_selftest(stdout) == result
| 6,666 | Python | .py | 221 | 15.696833 | 96 | 0.372299 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,396 | test_ldap.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_ldap.py | import pytest
from unittest.mock import Mock
from middlewared.service_exception import ValidationErrors
from middlewared.schema import (
accepts, LDAP_DN
)
from middlewared.plugins.ldap_ import constants, utils
FREEIPA_SAMPLE_SEARCH_BASE = {
"base_user": "cn=users,cn=accounts,dc=tn,dc=ixsystems,dc=net",
"base_group": "cn=groups,cn=accounts,dc=tn,dc=ixsystems,dc=net",
"base_netgroup": "netgroup cn=ng,cn=compat,dc=tn,dc=ixsystems,dc=net"
}
FREEIPA_SAMPLE_ATTR_MAPS = {
"passwd": {
"user_object_class": None,
"user_name": None,
"user_uid": None,
"user_gid": None,
"user_gecos": None,
"user_home_directory": None,
"user_shell": None
},
"shadow": {
"shadow_object_class": None,
"shadow_last_change": None,
"shadow_min": None,
"shadow_max": None,
"shadow_warning": None,
"shadow_inactive": None,
"shadow_expire": None
},
"group": {
"group_object_class": None,
"group_gid": None,
"group_member": None
},
"netgroup": {
"netgroup_object_class": None,
"netgroup_member": None,
"netgroup_triple": None
}
}
NONE_SAMPLE_SEARCH_BASE = {
"base_user": None,
"base_group": None,
"base_netgroup": None
}
@pytest.mark.parametrize('value,expected', [
('o=5def63d2b12d4332c706a57f,dc=jumpcloud,dc=com', 'o=5def63d2b12d4332c706a57f,dc=jumpcloud,dc=com'),
('canary', ValidationErrors),
(420, ValidationErrors),
])
def test__schema_ldapdn(value, expected):
@accepts(LDAP_DN('data', null=True))
def ldapdnnotnull(self, data):
return data
self = Mock()
if expected is ValidationErrors:
with pytest.raises(expected):
ldapdnnotnull(self, value)
else:
assert ldapdnnotnull(self, value) == expected
def test__schema_ldapdn_null():
@accepts(LDAP_DN('data', null=True))
def ldapdnnull(self, data):
return data
self = Mock()
assert ldapdnnull(self, None) is None
def test__freeipa_schema_conversion():
# This verifies we're not getting unexpected lines added by having NULL entries
assert len(utils.attribute_maps_data_to_params(FREEIPA_SAMPLE_ATTR_MAPS)) == 0
search_bases = utils.search_base_data_to_params(FREEIPA_SAMPLE_SEARCH_BASE)
assert len(search_bases) == 3
for key, entry in FREEIPA_SAMPLE_SEARCH_BASE.items():
match key:
case "base_user":
assert f'ldap_user_search_base = {entry}' in search_bases
case "base_group":
assert f'ldap_group_search_base = {entry}' in search_bases
case "base_netgroup":
assert f'ldap_netgroup_search_base = {entry}' in search_bases
def test__default_search_base():
assert len(utils.search_base_data_to_params(NONE_SAMPLE_SEARCH_BASE)) == 0
def test__attribute_map_keys_passwd():
for key in constants.LDAP_PASSWD_MAP_KEYS:
data = {"passwd": {key: "canary"}}
results = utils.attribute_maps_data_to_params(data)
assert len(results) == 1
match key:
case constants.ATTR_USER_OBJ:
assert results[0] == "ldap_user_object_class = (objectClass=canary)"
case constants.ATTR_USER_NAME:
assert results[0] == "ldap_user_name = canary"
case constants.ATTR_USER_UID:
assert results[0] == "ldap_user_uid_number = canary"
case constants.ATTR_USER_GID:
assert results[0] == "ldap_user_gid_number = canary"
case constants.ATTR_USER_GECOS:
assert results[0] == "ldap_user_gecos = canary"
case constants.ATTR_USER_HOMEDIR:
assert results[0] == "ldap_user_home_directory = canary"
case constants.ATTR_USER_SHELL:
assert results[0] == "ldap_user_shell = canary"
case _:
assert key is None, f"{key}: Unexpected key"
def test__attribute_map_keys_shadow():
for key in constants.LDAP_SHADOW_MAP_KEYS:
data = {"shadow": {key: "canary"}}
results = utils.attribute_maps_data_to_params(data)
assert len(results) == 1
match key:
case constants.ATTR_SHADOW_OBJ:
assert results[0] == ""
case constants.ATTR_SHADOW_LAST_CHANGE:
assert results[0] == "ldap_user_shadow_last_change = canary"
case constants.ATTR_SHADOW_MIN:
assert results[0] == "ldap_user_shadow_min = canary"
case constants.ATTR_SHADOW_MAX:
assert results[0] == "ldap_user_shadow_max = canary"
case constants.ATTR_SHADOW_WARNING:
assert results[0] == "ldap_user_shadow_warning = canary"
case constants.ATTR_SHADOW_INACTIVE:
assert results[0] == "ldap_user_shadow_inactive = canary"
case constants.ATTR_SHADOW_EXPIRE:
assert results[0] == "ldap_user_shadow_expire = canary"
case _:
assert key is None, f"{key}: Unexpected key"
def test__attribute_map_keys_group():
for key in constants.LDAP_GROUP_MAP_KEYS:
data = {"group": {key: "canary"}}
results = utils.attribute_maps_data_to_params(data)
assert len(results) == 1
match key:
case constants.ATTR_GROUP_OBJ:
assert results[0] == "ldap_group_object_class = (objectClass=canary)"
case constants.ATTR_GROUP_GID:
assert results[0] == "ldap_group_gid_number = canary"
case constants.ATTR_GROUP_MEMBER:
assert results[0] == "ldap_group_member = canary"
case _:
assert key is None, f"{key}: Unexpected key"
def test__attribute_map_keys_netgroup():
for key in constants.LDAP_NETGROUP_MAP_KEYS:
data = {"netgroup": {key: "canary"}}
results = utils.attribute_maps_data_to_params(data)
assert len(results) == 1
match key:
case constants.ATTR_NETGROUP_OBJ:
assert results[0] == "ldap_netgroup_object_class = (objectClass=canary)"
case constants.ATTR_NETGROUP_MEMBER:
assert results[0] == "ldap_netgroup_member = canary"
case constants.ATTR_NETGROUP_TRIPLE:
assert results[0] == "ldap_netgroup_triple = canary"
case _:
assert key is None, f"{key}: Unexpected key"
| 6,512 | Python | .py | 155 | 32.812903 | 105 | 0.610111 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,397 | test_attachment_delegate_is_child_path.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/test_attachment_delegate_is_child_path.py | import pytest
from middlewared.plugins.smb import SMBFSAttachmentDelegate
from middlewared.pytest.unit.middleware import Middleware
@pytest.mark.parametrize('resource, path, check_parent, exact_match, is_child, expected_output', (
({'path_local': '/mnt/tank/test'}, '/mnt/tank', False, False, True, True),
({'path_local': '/mnt/tank/test'}, '/mnt/tank', False, True, True, False),
({'path_local': '/mnt/tank'}, '/mnt/tank', False, False, True, True),
({'path_local': '/mnt/test'}, '/mnt/tank', True, False, False, False),
({'path_local': '/mnt/tank/test'}, '/mnt/tank', True, False, True, True),
))
@pytest.mark.asyncio
async def test_attachment_is_child(resource, path, check_parent, exact_match, is_child, expected_output):
m = Middleware()
m['filesystem.is_child'] = lambda *arg: is_child
smb_attachment = SMBFSAttachmentDelegate(m)
assert (await smb_attachment.is_child_of_path(resource, path, check_parent, exact_match)) == expected_output
| 984 | Python | .py | 16 | 58.0625 | 112 | 0.699482 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,398 | test_enclosure2_query.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/enclosure/test_enclosure2_query.py | # Copyright (c) - iXsystems Inc.
#
# Licensed under the terms of the TrueNAS Enterprise License Agreement
# See the file LICENSE.IX for complete terms and conditions
import dataclasses
import json
import os
import pathlib
from unittest.mock import Mock
import pytest
from middlewared.pytest.unit.middleware import Middleware
from middlewared.plugins.enclosure_.enclosure2 import Enclosure2Service
test_case_dir = pathlib.Path(os.path.dirname(os.path.realpath(__file__))) / 'test-cases'
test_case_paths = [i for i in test_case_dir.iterdir() if i.is_dir()]
test_case_names = [i.name for i in test_case_paths]
@dataclasses.dataclass
class Enc2Mocked:
chassis: str
labels: dict
dmi: dict
ses: list[dict]
nvme: list[dict]
@dataclasses.dataclass
class Enc2Expected:
expected: list[dict]
def hookit(obj):
try:
# we need to do this because the keys that
# we're reading have all been cast to string
# since the json spec defines keys as being
# of type string. However, python doesn't
# have such limitation. If you use, however,
# the in-built json.dumps/loads module, it
# will convert all those integer based keys
# to strings for you. We expect integers as
# the keys so convert them back
return {int(k): v for k, v in obj.items()}
except ValueError:
# just means top-level key isn't an int so
# return the value as-is
return obj
@pytest.fixture(params=test_case_paths, ids=test_case_names)
def enc2_data(request):
test_dir: pathlib.Path = request.param
with open(test_dir / 'mocked.json') as f:
mocked_data = json.load(f, object_hook=hookit)
with open(test_dir / 'expected.json') as f:
expected_data = json.load(f, object_hook=hookit)
return Enc2Mocked(**mocked_data), Enc2Expected(expected_data)
def test_enclosure2_query(enc2_data):
enc2_mocked = enc2_data[0]
enc2_expected = enc2_data[1]
e = Enclosure2Service(Mock())
e.middleware = Middleware()
e.middleware._resolve_methods([Enclosure2Service], [])
e.middleware['truenas.get_chassis_hardware'] = Mock(return_value=enc2_mocked.chassis)
e.middleware['truenas.is_ix_hardware'] = Mock(return_value=True)
e.middleware['datastore.query'] = Mock(return_value=enc2_mocked.labels)
e.middleware['system.dmidecode_info'] = Mock(return_value=enc2_mocked.dmi)
e.middleware['jbof.query'] = Mock(return_value=[])
e.middleware['enclosure2.map_jbof'] = Mock(return_value=[])
e.get_ses_enclosures = Mock(return_value=enc2_mocked.ses)
e.map_nvme = Mock(return_value=enc2_mocked.nvme)
e.map_jbof = Mock(return_value=[])
assert e.query() == enc2_expected.expected
| 2,741 | Python | .py | 65 | 37.384615 | 89 | 0.71219 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,399 | test_m_series_nvme.py | truenas_middleware/src/middlewared/middlewared/pytest/unit/plugins/enclosure/test_m_series_nvme.py | # Copyright (c) - iXsystems Inc.
#
# Licensed under the terms of the TrueNAS Enterprise License Agreement
# See the file LICENSE.IX for complete terms and conditions
from unittest.mock import Mock, patch
from middlewared.plugins.enclosure_.nvme import EnclosureService
def test__m_series_nvme_enclosures(fs):
fs.create_file("/sys/bus/pci/slots/0-1/address", contents="0000:60:00\n")
with patch("middlewared.plugins.enclosure_.nvme.Context") as Context:
Context.return_value = Mock(
list_devices=Mock(
return_value=[
Mock(
attributes={"path": b"\\_SB_.PC03.BR3A"},
sys_path="/sys/devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:03/device:c5",
),
],
)
)
with patch("middlewared.plugins.enclosure_.nvme.Devices") as Devices:
child = Mock(properties={"SUBSYSTEM": "block"},
sys_name="nvme1n1")
child.parent = Mock(sys_name="nvmen1")
child.parent.parent = Mock(sys_name="0000:60:00.0")
Devices.from_path = Mock(
side_effect=lambda context, path: {
"/sys/devices/LNXSYSTM:00/LNXSYBUS:00/PNP0A08:03/device:c5/physical_node": Mock(
children=[child]
)
}[path],
)
es = EnclosureService(Mock())
es.middleware = Mock()
es.middleware.call_sync = Mock(return_value={'system-product-name': 'TRUENAS-M60-HA'})
es.fake_nvme_enclosure = Mock()
es.map_nvme()
es.fake_nvme_enclosure.assert_called_once_with(
"m60_plx_enclosure",
"Rear NVME U.2 Hotswap Bays",
"M60 Series",
4,
{1: "nvme1n1"},
)
| 1,894 | Python | .py | 43 | 30.767442 | 100 | 0.546095 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |