id int64 0 458k | file_name stringlengths 4 119 | file_path stringlengths 14 227 | content stringlengths 24 9.96M | size int64 24 9.96M | language stringclasses 1 value | extension stringclasses 14 values | total_lines int64 1 219k | avg_line_length float64 2.52 4.63M | max_line_length int64 5 9.91M | alphanum_fraction float64 0 1 | repo_name stringlengths 7 101 | repo_stars int64 100 139k | repo_forks int64 0 26.4k | repo_open_issues int64 0 2.27k | repo_license stringclasses 12 values | repo_extraction_date stringclasses 433 values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
24,000 | 2023-07-15_03-07_remove_novnc_support.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-07-15_03-07_remove_novnc_support.py | """
Remove novnc support from display devices
Revision ID: 9a94fb8c0206
Revises: a1917e15f409
Create Date: 2023-07-15 03:07:14.561629+00:00
"""
import json
from alembic import op
from collections import defaultdict
revision = '9a94fb8c0206'
down_revision = 'a1917e15f409'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
to_remove_ids = []
vms_mapping = defaultdict(list)
for row in map(dict, conn.execute("SELECT * FROM vm_device WHERE dtype = 'DISPLAY'").fetchall()):
vms_mapping[row['vm_id']].append(row)
for devices in vms_mapping.values():
if len(devices) == 1:
device = devices[0]
device['attributes'] = json.loads(device['attributes'])
if device['attributes']['type'] == 'VNC':
device['attributes']['type'] = 'SPICE'
conn.execute('UPDATE vm_device SET attributes = ? WHERE id = ?', (
json.dumps(device['attributes']), device['id']
))
else:
for device in devices:
device['attributes'] = json.loads(device['attributes'])
if device['attributes']['type'] == 'VNC':
to_remove_ids.append(device['id'])
for remove_id in to_remove_ids:
conn.execute('DELETE FROM vm_device WHERE id = ?', (remove_id,))
def downgrade():
pass
| 1,387 | Python | .py | 37 | 30.027027 | 101 | 0.612397 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,001 | 2022-12-14_00-45_nfs_protocols.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2022-12-14_00-45_nfs_protocols.py | """Replace v4 with protocols in nfs service configuration.
Revision ID: 60de23d5cd17
Revises: 136adf794fed
Create Date: 2022-12-14 00:45:57.157120+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '60de23d5cd17'
down_revision = '136adf794fed'
branch_labels = None
depends_on = None
def upgrade():
# Replace nfs_srv_v4 (a boolean) with nfs_srv_protocols
# First read the current state, so that we can tweak the default value of
# the new column based upon the values.
conn = op.get_bind()
share_count = conn.execute("SELECT COUNT(id) FROM sharing_nfs_share").first()[0]
nfs_srv_v4 = conn.execute("SELECT nfs_srv_v4 FROM services_nfs").first()[0]
# Want to check whether we can change the default to include
with op.batch_alter_table('services_nfs', schema=None) as batch_op:
if share_count == 0 or nfs_srv_v4:
batch_op.add_column(sa.Column('nfs_srv_protocols', sa.TEXT(), nullable=False, server_default='["NFSV3", "NFSV4"]'))
else:
batch_op.add_column(sa.Column('nfs_srv_protocols', sa.TEXT(), nullable=False, server_default='["NFSV3"]'))
batch_op.drop_column('nfs_srv_v4')
def downgrade():
conn = op.get_bind()
nfs_srv_protocols = conn.execute("SELECT nfs_srv_protocols FROM services_nfs").first()[0]
with op.batch_alter_table('services_nfs', schema=None) as batch_op:
if "NFSV4" in nfs_srv_protocols:
batch_op.add_column(sa.Column('nfs_srv_v4', sa.BOOLEAN(), nullable=False, server_default='1'))
else:
batch_op.add_column(sa.Column('nfs_srv_v4', sa.BOOLEAN(), nullable=False, server_default='0'))
batch_op.drop_column('nfs_srv_protocols')
| 1,750 | Python | .py | 35 | 44.714286 | 127 | 0.691496 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,002 | 2023-05-23_19-45_2fa_ad_support.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-05-23_19-45_2fa_ad_support.py | """
2FA AD Support
Revision ID: cf91fa3d0696
Revises: 2c0646015ca5
Create Date: 2023-05-23 19:45:17.935672+00:00
"""
from alembic import op
import sqlalchemy as sa
revision = 'cf91fa3d0696'
down_revision = '2c0646015ca5'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('account_twofactor_user_auth', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_sid', sa.String(length=255), nullable=True))
batch_op.create_index(batch_op.f('ix_account_twofactor_user_auth_user_sid'), ['user_sid'], unique=True)
def downgrade():
pass
| 596 | Python | .py | 18 | 30.388889 | 111 | 0.744308 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,003 | 2023-03-24_22-57_2fa_users.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-03-24_22-57_2fa_users.py | """
2FA support for multiple users
Revision ID: 55836e7dac39
Revises: c55b034b7654
Create Date: 2023-03-24 22:57:01.757983+00:00
"""
from alembic import op
import sqlalchemy as sa
revision = '55836e7dac39'
down_revision = 'c55b034b7654'
branch_labels = None
depends_on = None
def upgrade():
# We want 3 things here:
# 1) Have a model where we can keep user 2fa data
# 2) Have a model where global 2fa configurations can be enforced
# 3) Migrate
op.create_table(
'account_twofactor_user_auth',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('secret', sa.String(length=16), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
['user_id'], ['account_bsdusers.id'], name=op.f('fk_account_twofactor_user_auth_user_id_account_bsdusers'),
ondelete='CASCADE'
),
sa.PrimaryKeyConstraint('id', name=op.f('pk_account_twofactor_user_auth')),
sqlite_autoincrement=True,
)
with op.batch_alter_table('account_twofactor_user_auth', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_account_twofactor_user_auth_user_id'), ['user_id'], unique=False)
conn = op.get_bind()
existing_secret_record = conn.execute('SELECT secret FROM system_twofactorauthentication').fetchone()
existing_secret = existing_secret_record['secret'] if existing_secret_record else None
for row in map(dict, conn.execute('SELECT id,bsdusr_uid FROM account_bsdusers').fetchall()):
row = dict(row)
secret = existing_secret if row['bsdusr_uid'] == 0 else None
conn.execute('INSERT INTO account_twofactor_user_auth (secret,user_id) VALUES (?,?)', (secret, row['id']))
with op.batch_alter_table('system_twofactorauthentication', schema=None) as batch_op:
batch_op.drop_column('secret')
def downgrade():
pass
| 1,913 | Python | .py | 42 | 39.97619 | 119 | 0.692308 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,004 | 2023-12-28_10-16_delete_link_addresses_for_non_physical_interfaces.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-12-28_10-16_delete_link_addresses_for_non_physical_interfaces.py | """Delete link addresses for non-physical interfaces
Revision ID: 7a5ebab17483
Revises: 2eafc0aa58a0
Create Date: 2023-12-28 10:16:14.671575+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7a5ebab17483'
down_revision = '2eafc0aa58a0'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("DELETE FROM network_interface_link_address "
"WHERE (interface LIKE 'bond%' OR interface LIKE 'br%' OR interface LIKE 'vlan%')")
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| 746 | Python | .py | 21 | 32.285714 | 98 | 0.714086 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,005 | 2023-03-10_09-30_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-03-10_09-30_merge.py | """Merge
Revision ID: 91a4e09f5b7a
Revises: 54beb9f4bdf5, b690e5ae986d
Create Date: 2023-03-10 09:30:00.629175+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '91a4e09f5b7a'
down_revision = ('54beb9f4bdf5', 'b690e5ae986d')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,006 | 2023-10-02_21-57_extent_serial.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-10-02_21-57_extent_serial.py | """Ensure iSCSI extents have a non-empty serial number.
Revision ID: fa33f4ae6427
Revises: b06ea181e7dd
Create Date: 2023-10-02 21:57:49.452962+00:00
"""
from alembic import op
import secrets
# revision identifiers, used by Alembic.
revision = 'fa33f4ae6427'
down_revision = 'b06ea181e7dd'
branch_labels = None
depends_on = None
def generate_serial(used_serials, tries=10):
for i in range(tries):
serial = secrets.token_hex()[:15]
if serial not in used_serials:
return serial
def upgrade():
# We wish to ensure that every iSCSI extent has a (unique) serial number
# assigned (but that said we will only change the serial number if
# previously empty)
conn = op.get_bind()
tofix = []
for (ident,) in conn.execute("SELECT id FROM services_iscsitargetextent WHERE iscsi_target_extent_serial == null or iscsi_target_extent_serial == ''"):
tofix.append(ident)
if tofix:
serials = []
for (serial,) in conn.execute("SELECT iscsi_target_extent_serial FROM services_iscsitargetextent"):
if serial not in [None, '']:
serials.append(serial)
for ident in tofix:
serial = generate_serial(serials)
if serial:
conn.execute(
"UPDATE services_iscsitargetextent SET iscsi_target_extent_serial = ? WHERE id = ?",
serial, ident
)
serials.append(serial)
def downgrade():
pass
| 1,501 | Python | .py | 40 | 30.375 | 155 | 0.653343 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,007 | 2023-05-21_12-12_remove_tftp.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-05-21_12-12_remove_tftp.py | """Remove TFTP
Revision ID: 2c0646015ca5
Revises: a70b230c1675
Create Date: 2023-05-21 12:12:50.254773+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2c0646015ca5'
down_revision = 'a70b230c1675'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("DELETE FROM services_services WHERE srv_service = 'tftp'")
op.drop_table('services_tftp')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('services_tftp',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('tftp_directory', sa.VARCHAR(length=255), nullable=False),
sa.Column('tftp_newfiles', sa.BOOLEAN(), nullable=False),
sa.Column('tftp_port', sa.INTEGER(), nullable=False),
sa.Column('tftp_username', sa.VARCHAR(length=120), nullable=False),
sa.Column('tftp_umask', sa.VARCHAR(length=120), nullable=False),
sa.Column('tftp_options', sa.VARCHAR(length=120), nullable=False),
sa.Column('tftp_host', sa.VARCHAR(length=120), nullable=False),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
| 1,250 | Python | .py | 31 | 36.870968 | 74 | 0.70768 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,008 | 2023-01-20_00-15_k3s_metrics_server.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-01-20_00-15_k3s_metrics_server.py | """Add apps metrics server
Revision ID: 9c44b7e06dff
Revises: 2bef686cbf7d
Create Date: 2023-01-20 00:20:00.702138+00:00
"""
from alembic import op
import sqlalchemy as sa
revision = '9c44b7e06dff'
down_revision = '2bef686cbf7d'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('services_kubernetes', schema=None) as batch_op:
batch_op.add_column(sa.Column('metrics_server', sa.Boolean(), server_default='0', nullable=False))
def downgrade():
pass
| 502 | Python | .py | 16 | 28.9375 | 106 | 0.759916 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,009 | 2023-06-07_12-24_remove_crash_reporting.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-06-07_12-24_remove_crash_reporting.py | """remove crash reporting
Revision ID: 3c011188cbe2
Revises: d2a26c29efca
Create Date: 2023-06-07 12:47:24.385652+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '3c011188cbe2'
down_revision = 'd2a26c29efca'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('system_settings', schema=None) as batch_op:
batch_op.drop_column('stg_crash_reporting')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('system_settings', schema=None) as batch_op:
batch_op.add_column(sa.Column('stg_crash_reporting', sa.BOOLEAN(), nullable=True))
# ### end Alembic commands ###
| 845 | Python | .py | 22 | 35.136364 | 90 | 0.719557 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,010 | 2023-03-15_21-11_ssh_password_enabled.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-03-15_21-11_ssh_password_enabled.py | """SSH password enabled
Revision ID: 1c060aa856ca
Revises: 91a4e09f5b7a
Create Date: 2023-02-12 10:45:59.865895+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1c060aa856ca'
down_revision = '91a4e09f5b7a'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('account_bsdusers', schema=None) as batch_op:
batch_op.add_column(sa.Column('bsdusr_ssh_password_enabled', sa.Boolean(), nullable=False, server_default='0'))
conn = op.get_bind()
for passwordauth, rootlogin, adminlogin in conn.execute("""
SELECT ssh_passwordauth, ssh_rootlogin, ssh_adminlogin FROM services_ssh
""").fetchall():
if int(passwordauth):
op.execute("UPDATE account_bsdusers SET bsdusr_ssh_password_enabled = IIF(bsdusr_password_disabled, 0, 1) "
"WHERE bsdusr_builtin = 0")
op.execute(f"UPDATE account_bsdusers SET bsdusr_ssh_password_enabled = {int(rootlogin)} WHERE bsdusr_uid = 0")
op.execute(f"UPDATE account_bsdusers SET bsdusr_ssh_password_enabled = {int(adminlogin)} WHERE bsdusr_uid = 950")
with op.batch_alter_table('services_ssh', schema=None) as batch_op:
batch_op.drop_column('ssh_adminlogin')
batch_op.drop_column('ssh_rootlogin')
batch_op.add_column(sa.Column('ssh_password_login_groups', sa.TEXT(), nullable=False, server_default='[]'))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('services_ssh', schema=None) as batch_op:
batch_op.add_column(sa.Column('ssh_rootlogin', sa.BOOLEAN(), nullable=False))
batch_op.add_column(sa.Column('ssh_adminlogin', sa.BOOLEAN(), server_default=sa.text("'1'"), nullable=False))
with op.batch_alter_table('account_bsdusers', schema=None) as batch_op:
batch_op.drop_column('bsdusr_ssh_password_enabled')
# ### end Alembic commands ###
| 2,084 | Python | .py | 38 | 48.868421 | 125 | 0.694882 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,011 | 2023-12-22_11-29_delete_onedrive_cloud_sync_tasks.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-12-22_11-29_delete_onedrive_cloud_sync_tasks.py | """Delete OneDrive cloud sync tasks
Revision ID: 2eafc0aa58a0
Revises: 8f8942557260
Create Date: 2023-12-22 11:29:54.877781+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2eafc0aa58a0'
down_revision = '8f8942557260'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
conn.execute("DELETE FROM tasks_cloudsync WHERE credential_id in (SELECT id FROM system_cloudcredentials WHERE "
"provider = 'ONEDRIVE')")
conn.execute("DELETE FROM system_cloudcredentials WHERE provider = 'ONEDRIVE'")
def downgrade():
pass
| 636 | Python | .py | 19 | 30.315789 | 116 | 0.753695 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,012 | 2023-01-16_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-01-16_merge.py | """Merge
Revision ID: 519c9d598091
Revises: a5584351bdb8, 67ee25d22253
Create Date: 2023-01-16 10:51:51.496712+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '519c9d598091'
down_revision = ('a5584351bdb8', '67ee25d22253')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,013 | 2023-02-27_16-02_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-02-27_16-02_merge.py | """Merge
Revision ID: 54beb9f4bdf5
Revises: 60a953d6da3a, 9694ff0f4de6
Create Date: 2023-02-27 16:02:00.629175+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '54beb9f4bdf5'
down_revision = ('60a953d6da3a', '9694ff0f4de6')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,014 | 2023-02-17_12-50_webui_attribute.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-02-17_12-50_webui_attribute.py | """webui_attribute
Revision ID: 60a953d6da3a
Revises: 653ea1a2ba57
Create Date: 2023-02-17 12:50:10.441696+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '60a953d6da3a'
down_revision = '653ea1a2ba57'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('account_bsdusers_webui_attribute',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uid', sa.Integer(), nullable=False),
sa.Column('attributes', sa.TEXT(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_account_bsdusers_webui_attribute')),
sa.UniqueConstraint('uid', name=op.f('uq_account_bsdusers_webui_attribute_uid')),
sqlite_autoincrement=True
)
op.execute("INSERT INTO account_bsdusers_webui_attribute (uid, attributes) "
"SELECT bsdusr_uid, bsdusr_attributes FROM account_bsdusers WHERE bsdusr_attributes != '{}'")
with op.batch_alter_table('account_bsdusers', schema=None) as batch_op:
batch_op.drop_column('bsdusr_attributes')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('account_bsdusers', schema=None) as batch_op:
batch_op.add_column(sa.Column('bsdusr_attributes', sa.TEXT(), nullable=False))
op.drop_table('account_bsdusers_webui_attribute')
# ### end Alembic commands ###
| 1,500 | Python | .py | 33 | 41.272727 | 108 | 0.713795 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,015 | 2023-06-16_fips.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-06-16_fips.py | """
FIPS Support
Revision ID: c0b117cde6b8
Revises: 0893833a57be
Create Date: 2023-06-16 22:57:01.757983+00:00
"""
from alembic import op
import sqlalchemy as sa
revision = 'c0b117cde6b8'
down_revision = '0893833a57be'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'system_security',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('enable_fips', sa.Boolean(), nullable=False, server_default='0'),
sa.PrimaryKeyConstraint('id'),
)
def downgrade():
pass
| 539 | Python | .py | 21 | 22.190476 | 83 | 0.707843 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,016 | 2023-01-27_16-53_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-01-27_16-53_merge.py | """Merge
Revision ID: eaadca673130
Revises: e1c7ee02b545, af5efb72c74f
Create Date: 2023-01-27 16:53:39.823376+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'eaadca673130'
down_revision = ('e1c7ee02b545', 'af5efb72c74f')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,017 | 2023-06-23_08-22_ssh_cipher.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-06-23_08-22_ssh_cipher.py | """Remove SSH credentials cipher
Revision ID: 0e5949153c20
Revises: c0b117cde6b8
Create Date: 2023-06-23 08:22:39.080102+00:00
"""
import json
from alembic import op
import sqlalchemy as sa
from middlewared.plugins.pwenc import encrypt, decrypt
# revision identifiers, used by Alembic.
revision = '0e5949153c20'
down_revision = 'c0b117cde6b8'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
for c in map(dict, conn.execute("SELECT * FROM system_keychaincredential WHERE type = 'SSH_CREDENTIALS'").fetchall()):
if attributes := decrypt(c["attributes"]):
attributes = json.loads(attributes)
del attributes["cipher"]
conn.execute("UPDATE system_keychaincredential SET attributes = ? WHERE id = ?", (
encrypt(json.dumps(attributes)), c["id"]
))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| 1,086 | Python | .py | 29 | 32.655172 | 122 | 0.685769 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,018 | 2023-05-10_19-22_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-05-10_19-22_merge.py | """Merge
Revision ID: 58a555dd9612
Revises: b2775ae20e88, 08539dfd0500
Create Date: 2023-05-10 19:22:29.283266+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '58a555dd9612'
down_revision = ('b2775ae20e88', '08539dfd0500')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,019 | 2023-02-07_15-44_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-02-07_15-44_merge.py | """Merge
Revision ID: 653ea1a2ba57
Revises: eaadca673130, 1f39ac35aaeb
Create Date: 2023-02-07 15:44:00.629175+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '653ea1a2ba57'
down_revision = ('eaadca673130', '1f39ac35aaeb')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,020 | 2024-01-02_15-15_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2024-01-02_15-15_merge.py | """Merge
Revision ID: bc770461df0d
Revises: 61095406c3a0, 7a5ebab17483
Create Date: 2024-01-02 15:15:55.324871+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'bc770461df0d'
down_revision = ('61095406c3a0', '7a5ebab17483')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,021 | 2023-06-04_00-20_remove_geli_keys.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-06-04_00-20_remove_geli_keys.py | """
Remove geli related keys
Revision ID: d2a26c29efca
Revises: 1519ee5b6e29
Create Date: 2023-06-04 00:20:01.757983+00:00
"""
from alembic import op
revision = 'd2a26c29efca'
down_revision = '1519ee5b6e29'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('storage_volume', schema=None) as batch_op:
batch_op.drop_column('vol_encrypt')
batch_op.drop_column('vol_encryptkey')
op.drop_table('storage_encrypteddisk')
def downgrade():
pass
| 504 | Python | .py | 18 | 24.944444 | 73 | 0.744235 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,022 | 2023-10-24_14-28_network_interface_link_address.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-10-24_14-28_network_interface_link_address.py | """network_interface_link_address
Revision ID: 304e43883592
Revises: f7d06a57f8a1
Create Date: 2023-10-24 14:28:25.413126+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '304e43883592'
down_revision = 'f7d06a57f8a1'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('network_interface_link_address',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('interface', sa.String(length=300), nullable=False),
sa.Column('link_address', sa.String(length=17), nullable=True),
sa.Column('link_address_b', sa.String(length=17), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_network_interface_link_address')),
sqlite_autoincrement=True
)
op.execute("INSERT INTO network_interface_link_address (interface, link_address, link_address_b) "
"SELECT int_interface, int_link_address, int_link_address_b FROM network_interfaces "
"WHERE NOT (int_interface LIKE 'bond%' OR int_interface LIKE 'br%' "
"OR int_interface LIKE 'lagg%' OR int_interface LIKE 'vlan%')")
with op.batch_alter_table('network_interfaces', schema=None) as batch_op:
batch_op.drop_column('int_link_address')
batch_op.drop_column('int_link_address_b')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('network_interfaces', schema=None) as batch_op:
batch_op.add_column(sa.Column('int_link_address_b', sa.VARCHAR(length=17), nullable=True))
batch_op.add_column(sa.Column('int_link_address', sa.VARCHAR(length=17), nullable=True))
op.drop_table('network_interface_link_address')
# ### end Alembic commands ###
| 1,860 | Python | .py | 37 | 45.189189 | 102 | 0.702151 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,023 | 2023-07-13_20-35_k8s_host_path_validation.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-07-13_20-35_k8s_host_path_validation.py | """
Remove validate_host_path column from k8s column
Revision ID: a1917e15f409
Revises: 593f8ded154e
Create Date: 2023-07-13 20:35:14.561629+00:00
"""
from alembic import op
import sqlalchemy as sa
revision = 'a1917e15f409'
down_revision = '593f8ded154e'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('services_kubernetes', schema=None) as batch_op:
batch_op.drop_column('validate_host_path')
def downgrade():
pass
| 473 | Python | .py | 17 | 25.411765 | 78 | 0.770089 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,024 | 2023-06-14_13-50_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-06-14_13-50_merge.py | """Merge
Revision ID: 0893833a57be
Revises: 3c011188cbe2, 1490eef1fa8d
Create Date: 2023-06-14 13:50:49.024034+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '0893833a57be'
down_revision = ('3c011188cbe2', '1490eef1fa8d')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,025 | 2023-05-02_08-31_remove_s3.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-05-02_08-31_remove_s3.py | """Remove S3
Revision ID: b2775ae20e88
Revises: 55836e7dac39
Create Date: 2023-05-02 08:31:50.308732+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b2775ae20e88'
down_revision = '55836e7dac39'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("DELETE FROM services_services WHERE srv_service = 's3'")
op.drop_table('services_s3')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('services_s3',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('s3_bindip', sa.VARCHAR(length=128), nullable=False),
sa.Column('s3_bindport', sa.SMALLINT(), nullable=False),
sa.Column('s3_access_key', sa.VARCHAR(length=128), nullable=False),
sa.Column('s3_secret_key', sa.VARCHAR(length=128), nullable=False),
sa.Column('s3_mode', sa.VARCHAR(length=120), nullable=False),
sa.Column('s3_disks', sa.VARCHAR(length=255), nullable=False),
sa.Column('s3_certificate_id', sa.INTEGER(), nullable=True),
sa.Column('s3_browser', sa.BOOLEAN(), nullable=False),
sa.Column('s3_tls_server_uri', sa.VARCHAR(length=128), nullable=True),
sa.Column('s3_console_bindport', sa.SMALLINT(), server_default=sa.text("'9001'"), nullable=False),
sa.ForeignKeyConstraint(['s3_certificate_id'], ['system_certificate.id'], ),
sa.PrimaryKeyConstraint('id')
)
with op.batch_alter_table('services_s3', schema=None) as batch_op:
batch_op.create_index('ix_services_s3_s3_certificate_id', ['s3_certificate_id'], unique=False)
# ### end Alembic commands ###
| 1,733 | Python | .py | 37 | 43 | 102 | 0.701245 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,026 | 2023-07-07_08-46_cloud_backup.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-07-07_08-46_cloud_backup.py | """Cloud Backup
Revision ID: 1cdfe58ae329
Revises: bd5cd1d802c7
Create Date: 2023-06-07 08:46:16.249725+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1cdfe58ae329'
down_revision = 'bd5cd1d802c7'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tasks_cloud_backup',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('description', sa.String(length=150), nullable=False),
sa.Column('path', sa.String(length=255), nullable=False),
sa.Column('attributes', sa.TEXT(), nullable=False),
sa.Column('minute', sa.String(length=100), nullable=False),
sa.Column('hour', sa.String(length=100), nullable=False),
sa.Column('daymonth', sa.String(length=100), nullable=False),
sa.Column('month', sa.String(length=100), nullable=False),
sa.Column('dayweek', sa.String(length=100), nullable=False),
sa.Column('pre_script', sa.Text(), nullable=False),
sa.Column('post_script', sa.Text(), nullable=False),
sa.Column('snapshot', sa.Boolean(), nullable=False),
sa.Column('bwlimit', sa.TEXT(), nullable=False),
sa.Column('include', sa.TEXT(), nullable=False),
sa.Column('exclude', sa.TEXT(), nullable=False),
sa.Column('transfers', sa.Integer(), nullable=True),
sa.Column('args', sa.TEXT(), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('job', sa.TEXT(), nullable=False),
sa.Column('password', sa.TEXT(), nullable=False),
sa.Column('credential_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['credential_id'], ['system_cloudcredentials.id'], name=op.f('fk_tasks_cloud_backup_credential_id_system_cloudcredentials')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_tasks_cloud_backup')),
sqlite_autoincrement=True
)
with op.batch_alter_table('tasks_cloud_backup', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_tasks_cloud_backup_credential_id'), ['credential_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tasks_cloud_backup', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_tasks_cloud_backup_credential_id'))
op.drop_table('tasks_cloud_backup')
# ### end Alembic commands ###
| 2,449 | Python | .py | 49 | 45.755102 | 153 | 0.697908 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,027 | 2023-01-15_10-51_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-01-15_10-51_merge.py | """Merge
Revision ID: a5584351bdb8
Revises: d81ede53eb14, c86a02e21e9d
Create Date: 2023-01-15 10:51:51.496712+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a5584351bdb8'
down_revision = ('d81ede53eb14', 'c86a02e21e9d')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,028 | 2023-03-22_19-52_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-03-22_19-52_merge.py | """Merge
Revision ID: c55b034b7654
Revises: 7310292225d3, 7035fa70c0c0
Create Date: 2023-03-22 19:52:57.080579+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'c55b034b7654'
down_revision = ('7310292225d3', '7035fa70c0c0')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,029 | 2023-06-27_17-15_no_cert_request.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-06-27_17-15_no_cert_request.py | """Remove ftp_tls_opt_no_cert_request
Revision ID: bd5cd1d802c7
Revises: bd11aee1c4b7
Create Date: 2023-06-27 17:15:14.561629+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'bd5cd1d802c7'
down_revision = 'bd11aee1c4b7'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('services_ftp', schema=None) as batch_op:
batch_op.drop_column('ftp_tls_opt_no_cert_request')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('services_ftp', schema=None) as batch_op:
batch_op.add_column(sa.Column('ftp_tls_opt_no_cert_request', sa.BOOLEAN(), nullable=False))
# ### end Alembic commands ###
| 868 | Python | .py | 22 | 36.181818 | 99 | 0.715311 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,030 | 2023-11-10_23-19_add_exporting_table_for_reporting.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-11-10_23-19_add_exporting_table_for_reporting.py | """Add exporting table for reporting
Revision ID: 8f8942557260
Revises: 304e43883592
Create Date: 2023-11-10 23:19:42.678757+00:00
"""
import json
import sqlalchemy as sa
from alembic import op
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.sql import text
revision = '8f8942557260'
down_revision = '304e43883592'
branch_labels = None
depends_on = None
def get_hostname(conn):
try:
return dict(conn.execute('SELECT * FROM network_globalconfiguration').fetchone())['gc_hostname']
except Exception:
return 'truenas'
def upgrade():
conn = op.get_bind()
inspector = Inspector.from_engine(conn)
if 'reporting_exporters' in inspector.get_table_names():
return # Skip if already migrated
op.create_table(
'reporting_exporters',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('type', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('attributes', sa.TEXT(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_reporting_exports'))
)
hostname = get_hostname(conn)
for graphite_ip in filter(lambda ip: bool(ip[0]), conn.execute('SELECT graphite FROM system_reporting').fetchall()):
attributes = {
'destination_ip': graphite_ip[0],
'destination_port': 2003,
'prefix': 'scale',
'hostname': hostname,
'update_every': 1,
'buffer_on_failures': 10,
'send_names_instead_of_ids': True,
'matching_charts': '*'
}
query = text(
'INSERT INTO reporting_exporters (enabled, type, name, attributes) VALUES '
'(:enabled, :type, :name, :attributes)'
)
conn.execute(
query,
enabled=True,
type='GRAPHITE',
name='netdata',
attributes=json.dumps(attributes)
)
op.drop_table('system_reporting')
def downgrade():
pass
| 2,082 | Python | .py | 59 | 28.084746 | 120 | 0.634146 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,031 | 2023-05-19_18-00_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-05-19_18-00_merge.py | """Merge
Revision ID: a70b230c1675
Revises: 58a555dd9612, 441144fa08e7
Create Date: 2023-05-19 18:00:29.283266+00:00
"""
# revision identifiers, used by Alembic.
revision = 'a70b230c1675'
down_revision = ('58a555dd9612', '441144fa08e7')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 333 | Python | .py | 14 | 21.714286 | 48 | 0.766026 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,032 | 2024-01-02_15-09_delete_link_addresses_for_non_physical_interfaces.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2024-01-02_15-09_delete_link_addresses_for_non_physical_interfaces.py | """Delete link addresses for non-physical interfaces
Revision ID: 61095406c3a0
Revises: 8f8942557260
Create Date: 2024-01-02 15:09:43.283434+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '61095406c3a0'
down_revision = '8f8942557260'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("DELETE FROM network_interface_link_address "
"WHERE (interface LIKE 'bond%' OR interface LIKE 'br%' OR interface LIKE 'vlan%')")
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| 746 | Python | .py | 21 | 32.285714 | 98 | 0.714086 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,033 | 2023-09-01_11-11_remove_script_text.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-09-01_11-11_remove_script_text.py | """
Remove unused script_text parameter in init_shutdown_script.py
Revision ID: 22a23dafd7de
Revises: 1edf1b6b04db
Create Date: 2023-09-01 11:11:00.864204+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '22a23dafd7de'
down_revision = '1edf1b6b04db'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('tasks_initshutdown', schema=None) as batch_op:
batch_op.drop_column('ini_script_text')
# ### end Alembic commands ###
def downgrade():
pass
| 626 | Python | .py | 20 | 28.65 | 77 | 0.745394 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,034 | 2023-05-31_19-37_normalize_2fa_records.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-05-31_19-37_normalize_2fa_records.py | """
Normalize 2FA AD records
Revision ID: 1519ee5b6e29
Revises: a63a2c20632a
Create Date: 2023-05-31 19:37:17.935672+00:00
"""
from alembic import op
revision = '1519ee5b6e29'
down_revision = 'a63a2c20632a'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
# We will now get all existing records from 2fa table and then if some user does not has his record in 2fa table
# we will add it to the 2fa table
existing_records = [
user_id['user_id'] for user_id in map(
dict, conn.execute('SELECT user_id FROM account_twofactor_user_auth').fetchall()
)
]
for row in map(dict, conn.execute('SELECT id FROM account_bsdusers').fetchall()):
if row['id'] in existing_records:
continue
secret = None
conn.execute('INSERT INTO account_twofactor_user_auth (secret,user_id) VALUES (?,?)', (secret, row['id']))
def downgrade():
pass
| 942 | Python | .py | 27 | 30.185185 | 116 | 0.683572 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,035 | 2023-08-21_17-11_remove_NFS_quiet.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-08-21_17-11_remove_NFS_quiet.py | """
Remove unused NFS 'quiet' parameter
Revision ID: 1edf1b6b04db
Revises: 9a94fb8c0206
Create Date: 2023-08-21 17:11:31.864204+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1edf1b6b04db'
down_revision = '9a94fb8c0206'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('sharing_nfs_share', schema=None) as batch_op:
batch_op.drop_column('nfs_quiet')
# ### end Alembic commands ###
def downgrade():
pass
| 592 | Python | .py | 20 | 26.95 | 76 | 0.735346 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,036 | 2023-03-16_14-41_merge_migration.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-03-16_14-41_merge_migration.py | """ Merge migration for netbiosname change
Revision ID: 7310292225d3
Revises: 1c060aa856ca, 3df90537bffa
Create Date: 2023-03-16 14:41:49.067099+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7310292225d3'
down_revision = ('1c060aa856ca', '3df90537bffa')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 415 | Python | .py | 16 | 23.9375 | 48 | 0.785166 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,037 | 2023-06-26_20-35_netwait.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/23.10/2023-06-26_20-35_netwait.py | """Remove netwait
Revision ID: bd11aee1c4b7
Revises: 0e5949153c20
Create Date: 2023-06-26 20:35:25.259010+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'bd11aee1c4b7'
down_revision = '0e5949153c20'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('network_globalconfiguration', schema=None) as batch_op:
batch_op.drop_column('gc_netwait_enabled')
batch_op.drop_column('gc_netwait_ip')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('network_globalconfiguration', schema=None) as batch_op:
batch_op.add_column(sa.Column('gc_netwait_ip', sa.VARCHAR(length=300), nullable=False))
batch_op.add_column(sa.Column('gc_netwait_enabled', sa.BOOLEAN(), nullable=False))
# ### end Alembic commands ###
| 1,002 | Python | .py | 24 | 38 | 95 | 0.717975 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,038 | 2022-02-11_19-54_increase_nfs_servers.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/13.0/2022-02-11_19-54_increase_nfs_servers.py | """increase minimum number of NFS servers
Revision ID: 99aef90c4cd6
Revises: cd7569a7b973
Create Date: 2022-02-11 19:54:32.149486+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '99aef90c4cd6'
down_revision = 'cd7569a7b973'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
conn.execute("UPDATE services_nfs SET nfs_srv_servers = 16 WHERE nfs_srv_servers = 4")
| 461 | Python | .py | 15 | 28.8 | 90 | 0.772727 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,039 | 2022-01-04_16-02_disk_bus_lunid.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/13.0/2022-01-04_16-02_disk_bus_lunid.py | """Disk bus
Revision ID: 7132a60093ce
Revises: 37298ef77ee8
Create Date: 2021-12-28 12:04:25.637691+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7132a60093ce'
down_revision = '37298ef77ee8'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('storage_disk', schema=None) as batch_op:
batch_op.add_column(sa.Column('disk_bus', sa.String(length=20), nullable=False, server_default="UNKNOWN"))
batch_op.add_column(sa.Column('disk_lunid', sa.String(length=30), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('storage_disk', schema=None) as batch_op:
batch_op.drop_column('disk_lunid')
batch_op.drop_column('disk_bus')
# ### end Alembic commands ###
| 971 | Python | .py | 24 | 36.708333 | 114 | 0.705443 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,040 | 2022-03-23_11-40_remove_recvbuf_inc_tunable.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/13.0/2022-03-23_11-40_remove_recvbuf_inc_tunable.py | """remove net.inet.tcp.recvbuf_inc (was removed in 13)
Revision ID: 88bfe11b5be5
Revises: 99aef90c4cd6
Create Date: 2022-03-23 11:40:32.149486+00:00
"""
from alembic import op
revision = '88bfe11b5be5'
down_revision = '99aef90c4cd6'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
conn.execute('DELETE FROM system_tunable WHERE tun_var = "net.inet.tcp.recvbuf_in"')
| 407 | Python | .py | 13 | 29.230769 | 88 | 0.757732 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,041 | 2022-02-07_13-36_remove_snmp_iftop.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/13.0/2022-02-07_13-36_remove_snmp_iftop.py | """remove snmp iftop integration (never worked)
Revision ID: cd7569a7b973
Revises: 7132a60093ce
Create Date: 2022-02-07 13:36:26.041217+00:00
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'cd7569a7b973'
down_revision = '7132a60093ce'
branch_labels = None
depends_on = None
def upgrade():
try:
with op.batch_alter_table('services_snmp', schema=None) as batch_op:
batch_op.drop_column('snmp_iftop')
except KeyError:
pass
def downgrade():
pass
| 522 | Python | .py | 19 | 23.947368 | 76 | 0.729293 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,042 | 2023-09-07_19-50_add_exporting_table_for_reporting.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-09-07_19-50_add_exporting_table_for_reporting.py | """Add exporting table for reporting
Revision ID: 3e16b0a74d78
Revises: e915a3b8fff6
Create Date: 2023-09-07 19:50:42.678757+00:00
"""
import json
import sqlalchemy as sa
from alembic import op
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.sql import text
revision = '3e16b0a74d78'
down_revision = 'e915a3b8fff6'
branch_labels = None
depends_on = None
def get_hostname(conn):
try:
return dict(conn.execute('SELECT * FROM network_globalconfiguration').fetchone())['gc_hostname']
except Exception:
return 'truenas'
def upgrade():
conn = op.get_bind()
inspector = Inspector.from_engine(conn)
if 'reporting_exporters' in inspector.get_table_names():
return # Skip if already migrated
op.create_table(
'reporting_exporters',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.Column('type', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('attributes', sa.TEXT(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_reporting_exports'))
)
hostname = get_hostname(conn)
for graphite_ip in filter(lambda ip: bool(ip[0]), conn.execute('SELECT graphite FROM system_reporting').fetchall()):
attributes = {
'destination_ip': graphite_ip[0],
'destination_port': 2003,
'prefix': 'scale',
'hostname': hostname,
'update_every': 1,
'buffer_on_failures': 10,
'send_names_instead_of_ids': True,
'matching_charts': '*'
}
query = text(
'INSERT INTO reporting_exporters (enabled, type, name, attributes) VALUES '
'(:enabled, :type, :name, :attributes)'
)
conn.execute(
query,
enabled=True,
type='GRAPHITE',
name='netdata',
attributes=json.dumps(attributes)
)
op.drop_table('system_reporting')
def downgrade():
pass
| 2,083 | Python | .py | 59 | 28.084746 | 120 | 0.634146 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,043 | 2023-10-03_17-08_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-10-03_17-08_merge.py | """Merge
Revision ID: 3df553b07a99
Revises: 2b9a98464a33, fa33f4ae6427
Create Date: 2023-10-03 17:08:38.778513+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '3df553b07a99'
down_revision = ('2b9a98464a33', 'fa33f4ae6427')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,044 | 2023-11-13_17-14_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-11-13_17-14_merge.py | """Merge
Revision ID: 08483670ae15
Revises: 22e5e6881fda, 8f8942557260
Create Date: 2023-11-13 17:14:43.019010+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '08483670ae15'
down_revision = ('22e5e6881fda', '8f8942557260')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,045 | 2023-10-10_19-31_add-audit-backend.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-10-10_19-31_add-audit-backend.py | """Add auditing backend tables
Revision ID: 6f338216a965
Revises: 3df553b07a99
Create Date: 2023-10-02 19:31:49.067706+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '6f338216a965'
down_revision = '3df553b07a99'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
op.create_table('system_audit',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('retention', sa.Integer(), nullable=False),
sa.Column('reservation', sa.Integer(), nullable=False),
sa.Column('quota', sa.Integer(), nullable=False),
sa.Column('quota_fill_warning', sa.Integer(), nullable=False),
sa.Column('quota_fill_critical', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_system_audit')),
sqlite_autoincrement=True
)
with op.batch_alter_table('system_advanced', schema=None) as batch_op:
batch_op.add_column(sa.Column('adv_syslog_audit', sa.Boolean(), nullable=True))
conn.execute('UPDATE system_advanced SET adv_syslog_audit = ?', False)
with op.batch_alter_table('system_advanced', schema=None) as batch_op:
batch_op.alter_column('adv_syslog_audit', existing_type=sa.Boolean(), nullable=False)
| 1,262 | Python | .py | 29 | 39.793103 | 93 | 0.720228 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,046 | 2024-04-03_14-10_cron.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2024-04-03_14-10_cron.py | """Fix cron fields values, follow-up to NAS-125384
Revision ID: ea024b5dff95
Revises: 6a7c2281f48e
Create Date: 2024-04-03 14:10:41.168534+00:00
"""
from alembic import op
import re
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ea024b5dff95'
down_revision = '6a7c2281f48e'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
for table, prefix in [
("tasks_cronjob", "cron_"),
("storage_replication", "repl_schedule_"),
("storage_replication", "repl_restrict_schedule_"),
("tasks_rsync", "rsync_"),
("storage_task", "task_"),
("storage_scrub", "scrub_"),
]:
fields = [prefix + field for field in ["minute", "hour", "daymonth", "month", "dayweek"]]
for row in map(dict, conn.execute(f"SELECT id, {', '.join(fields)} FROM {table}").fetchall()):
for k in fields:
value = row[k]
if value is not None and '/' in value and not re.match(r'^(\*|[0-9]+-[0-9]+)/([0-9]+)$', value):
if m := re.search(r'/([0-9]+)$', value):
value = f'*/{m.group(1)}'
else:
value = '1' # No luck in guessing the correct value, here is our best guess
conn.execute(f"UPDATE {table} SET {k} = ? WHERE id = ?", [value, row["id"]])
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| 1,517 | Python | .py | 37 | 33.243243 | 112 | 0.569388 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,047 | 2024-01-02_15-19_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2024-01-02_15-19_merge.py | """Merge
Revision ID: a33672445bed
Revises: 69789458866a, bc770461df0d
Create Date: 2024-01-02 15:19:53.113060+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a33672445bed'
down_revision = ('69789458866a', 'bc770461df0d')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,048 | 2023-12-22_19-34_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-12-22_19-34_merge.py | """Merge
Revision ID: e2e0b53cb627
Revises: 5d9fd5c15c6d, 2eafc0aa58a0
Create Date: 2023-12-22 19:34:04.914357+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e2e0b53cb627'
down_revision = ('5d9fd5c15c6d', '2eafc0aa58a0')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,049 | 2023-12-14_19-18_network_interface_name_unique.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-12-14_19-18_network_interface_name_unique.py | """Make network interface names unique
Revision ID: 058f00440129
Revises: df1a322df40d
Create Date: 2023-12-14 19:18:38.014275+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '058f00440129'
down_revision = 'df1a322df40d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
ignorelist = set()
for id1, name1 in conn.execute('SELECT id, int_interface FROM network_interfaces ORDER BY id ASC').fetchall():
if id1 in ignorelist:
continue
for i, (id2, name2) in enumerate(
conn.execute('''
SELECT id, int_interface
FROM network_interfaces
WHERE id > ? AND int_interface = ?
ORDER BY id ASC
''', (id1, name1)).fetchall()
):
conn.execute(
'UPDATE network_interfaces SET int_interface = ? WHERE id = ?', (f'{name2}_{i + 1}', id2)
)
ignorelist.add(id2)
with op.batch_alter_table('network_interfaces', schema=None) as batch_op:
batch_op.create_unique_constraint(batch_op.f('uq_network_interfaces_int_interface'), ['int_interface'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('network_interfaces', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('uq_network_interfaces_int_interface'), type_='unique')
# ### end Alembic commands ###
| 1,615 | Python | .py | 39 | 34.25641 | 114 | 0.639386 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,050 | 2023-09-06_14-45_cleanup_reporting_api.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-09-06_14-45_cleanup_reporting_api.py | """
Cleanup Reporting API
Revision ID: e915a3b8fff6
Revises: 3f39fe7d911d
Create Date: 2023-09-06 14:45:13.261715+00:00
"""
from alembic import op
from sqlalchemy.engine.reflection import Inspector
revision = 'e915a3b8fff6'
down_revision = '3f39fe7d911d'
branch_labels = None
depends_on = None
def upgrade():
conn = op.get_bind()
inspector = Inspector.from_engine(conn)
if 'system_reporting' not in inspector.get_table_names():
return # Skip as backported cobia migration already removed system_reporting table
with op.batch_alter_table('system_reporting', schema=None) as batch_op:
batch_op.drop_column('graph_age')
batch_op.drop_column('graph_points')
def downgrade():
pass
| 730 | Python | .py | 22 | 29.772727 | 91 | 0.749642 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,051 | 2023-12-04_15-22_ldap-extend-schema.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-12-04_15-22_ldap-extend-schema.py | """Expand LDAP configuration options
Revision ID: abac40d29fc5
Revises: 08483670ae15
Create Date: 2023-12-04 15:22:07.122129+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'abac40d29fc5'
down_revision = '08483670ae15'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('directoryservice_ldap', schema=None) as batch_op:
batch_op.add_column(sa.Column('ldap_base_user', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_base_group', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_base_netgroup', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_user_object_class', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_user_name', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_user_uid', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_user_gid', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_user_gecos', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_user_home_directory', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_user_shell', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_shadow_object_class', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_shadow_last_change', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_shadow_min', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_shadow_max', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_shadow_warning', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_shadow_inactive', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_shadow_expire', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_group_object_class', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_group_gid', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_group_member', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_netgroup_object_class', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_netgroup_member', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_netgroup_triple', sa.String(length=256), nullable=True))
batch_op.add_column(sa.Column('ldap_server_type', sa.String(length=256), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('directoryservice_ldap', schema=None) as batch_op:
batch_op.drop_column('ldap_server_type')
batch_op.drop_column('ldap_netgroup_triple')
batch_op.drop_column('ldap_netgroup_member')
batch_op.drop_column('ldap_netgroup_object_class')
batch_op.drop_column('ldap_group_member')
batch_op.drop_column('ldap_group_gid')
batch_op.drop_column('ldap_group_object_class')
batch_op.drop_column('ldap_shadow_expire')
batch_op.drop_column('ldap_shadow_inactive')
batch_op.drop_column('ldap_shadow_warning')
batch_op.drop_column('ldap_shadow_max')
batch_op.drop_column('ldap_shadow_min')
batch_op.drop_column('ldap_shadow_last_change')
batch_op.drop_column('ldap_shadow_object_class')
batch_op.drop_column('ldap_user_shell')
batch_op.drop_column('ldap_user_home_directory')
batch_op.drop_column('ldap_user_gecos')
batch_op.drop_column('ldap_user_gid')
batch_op.drop_column('ldap_user_uid')
batch_op.drop_column('ldap_user_name')
batch_op.drop_column('ldap_user_object_class')
batch_op.drop_column('ldap_base_netgroup')
batch_op.drop_column('ldap_base_group')
batch_op.drop_column('ldap_base_user')
# ### end Alembic commands ###
| 4,327 | Python | .py | 68 | 56.485294 | 106 | 0.694046 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,052 | 2023-10-26_08-44_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-10-26_08-44_merge.py | """Merge
Revision ID: e3436f15aa9c
Revises: e48b983ea0a0, 304e43883592
Create Date: 2023-10-26 08:44:43.019010+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e3436f15aa9c'
down_revision = ('e48b983ea0a0', '304e43883592')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,053 | 2023-08-23_18-35_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-08-23_18-35_merge.py | """Merge
Revision ID: 80c01d290a1d
Revises: 1edf1b6b04db
Create Date: 2023-08-23 18:35:29.133731+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '80c01d290a1d'
down_revision = '1edf1b6b04db'
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 349 | Python | .py | 16 | 19.8125 | 45 | 0.781538 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,054 | 2024-04-08_21-43_netdata_update_storage_tier.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2024-04-08_21-43_netdata_update_storage_tier.py | """netdata update storage tier 1
Revision ID: d774066c6c0c
Revises: 423e4c21c28d
Create Date: 2024-04-08 21:43:33.622817+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd774066c6c0c'
down_revision = '423e4c21c28d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'reporting',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('tier0_days', sa.Integer(), nullable=False),
sa.Column('tier1_days', sa.Integer(), nullable=False),
sa.Column('tier1_update_interval', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_reporting')),
sqlite_autoincrement=True
)
# ### end Alembic commands ###
| 841 | Python | .py | 24 | 30.75 | 73 | 0.691358 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,055 | 2023-10-11_16-22_2fa_per_user_settings.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-10-11_16-22_2fa_per_user_settings.py | """
2FA Per user settings
Revision ID: 0a95d753f6d3
Revises: 6f338216a965
Create Date: 2023-10-11 16:22:17.935672+00:00
"""
import sqlalchemy as sa
from alembic import op
revision = '0a95d753f6d3'
down_revision = '6f338216a965'
branch_labels = None
depends_on = None
def upgrade():
with op.batch_alter_table('account_twofactor_user_auth', schema=None) as batch_op:
batch_op.add_column(sa.Column('interval', sa.INTEGER(), nullable=False, server_default='30'))
batch_op.add_column(sa.Column('otp_digits', sa.INTEGER(), nullable=False, server_default='6'))
conn = op.get_bind()
if twofactor_config := list(map(
dict, conn.execute('SELECT * FROM system_twofactorauthentication').fetchall()
)):
twofactor_config = twofactor_config[0]
for row in map(dict, conn.execute('SELECT id FROM account_twofactor_user_auth').fetchall()):
conn.execute(
'UPDATE account_twofactor_user_auth SET interval = ?, otp_digits = ? WHERE id = ?', [
twofactor_config['interval'], twofactor_config['otp_digits'], row['id']
]
)
with op.batch_alter_table('system_twofactorauthentication', schema=None) as batch_op:
batch_op.drop_column('interval')
batch_op.drop_column('otp_digits')
def downgrade():
pass
| 1,341 | Python | .py | 32 | 35.6875 | 102 | 0.672573 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,056 | 2024-04-04_20-18_add_iscsi_rel_tgt_id.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2024-04-04_20-18_add_iscsi_rel_tgt_id.py | """Add iSCSI target rel_tgt_id
Revision ID: 423e4c21c28d
Revises: ea024b5dff95
Create Date: 2024-04-04 20:18:24.354103+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '423e4c21c28d'
down_revision = 'ea024b5dff95'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('services_iscsitarget', schema=None) as batch_op:
batch_op.add_column(sa.Column('iscsi_target_rel_tgt_id', sa.Integer(), nullable=True))
batch_op.create_unique_constraint(batch_op.f('uq_services_iscsitarget_iscsi_target_rel_tgt_id'), ['iscsi_target_rel_tgt_id'])
rel_tgt_id = 1
conn = op.get_bind()
for target in conn.execute("SELECT * FROM services_iscsitarget").fetchall():
conn.execute("UPDATE services_iscsitarget SET iscsi_target_rel_tgt_id = ? WHERE id = ?", (rel_tgt_id, target['id']))
rel_tgt_id += 1
with op.batch_alter_table('services_iscsitarget', schema=None) as batch_op:
batch_op.alter_column('iscsi_target_rel_tgt_id', nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('services_iscsitarget', schema=None) as batch_op:
batch_op.drop_constraint(batch_op.f('uq_services_iscsitarget_iscsi_target_rel_tgt_id'), type_='unique')
batch_op.drop_column('iscsi_target_rel_tgt_id')
# ### end Alembic commands ###
| 1,543 | Python | .py | 31 | 45.290323 | 133 | 0.704 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,057 | 2023-10-18_12-16_merge_migration_NAS-124687.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-10-18_12-16_merge_migration_NAS-124687.py | """ Merge migration for NAS-124687
Revision ID: e48b983ea0a0
Revises: 0a95d753f6d3, f7d06a57f8a1
Create Date: 2023-10-18 12:16:35.860672+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e48b983ea0a0'
down_revision = ('0a95d753f6d3', 'f7d06a57f8a1')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 407 | Python | .py | 16 | 23.4375 | 48 | 0.780679 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,058 | 2023-12-08_01-55_make_nfs_servers_nullable.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-12-08_01-55_make_nfs_servers_nullable.py | """Convert the 'servers' field to nullable
Revision ID: df1a322df40d
Revises: abac40d29fc5
Create Date: 2023-12-08 01:55:21.782521+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'df1a322df40d'
down_revision = 'abac40d29fc5'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('services_nfs', schema=None) as batch_op:
batch_op.alter_column('nfs_srv_servers',
existing_type=sa.INTEGER(),
nullable=True)
conn = op.get_bind()
conn.execute("UPDATE services_nfs SET nfs_srv_servers = NULL WHERE nfs_srv_servers = 16")
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute("UPDATE services_nfs SET nfs_srv_servers = 16 WHERE nfs_srv_servers = NULL")
with op.batch_alter_table('services_nfs', schema=None) as batch_op:
batch_op.alter_column('nfs_srv_servers',
existing_type=sa.INTEGER(),
nullable=False)
# ### end Alembic commands ###
| 1,196 | Python | .py | 30 | 34.666667 | 93 | 0.679931 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,059 | 2023-12-21_18-34_jbof_rdma.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-12-21_18-34_jbof_rdma.py | """Add tables to support jbof and rdma.interface APIs
Revision ID: 5d9fd5c15c6d
Revises: 4c51b6b085a3
Create Date: 2023-12-21 18:34:24.980038+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5d9fd5c15c6d'
down_revision = '4c51b6b085a3'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('rdma_interface',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('rdmaif_node', sa.String(length=120), nullable=False),
sa.Column('rdmaif_ifname', sa.String(length=120), nullable=False),
sa.Column('rdmaif_address', sa.String(length=45), nullable=False),
sa.Column('rdmaif_prefixlen', sa.Integer(), nullable=False),
sa.Column('rdmaif_mtu', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_rdma_interface')),
sa.UniqueConstraint('rdmaif_node', 'rdmaif_ifname', name=op.f('uq_rdma_interface_rdmaif_node'))
)
op.create_table('storage_jbof',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('jbof_description', sa.String(length=120), nullable=True),
sa.Column('jbof_index', sa.Integer(), nullable=False),
sa.Column('jbof_uuid', sa.Text(), nullable=False),
sa.Column('jbof_mgmt_ip1', sa.String(length=45), nullable=False),
sa.Column('jbof_mgmt_ip2', sa.String(length=45), nullable=False),
sa.Column('jbof_mgmt_username', sa.String(length=120), nullable=False),
sa.Column('jbof_mgmt_password', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_storage_jbof')),
sa.UniqueConstraint('jbof_index', name=op.f('uq_storage_jbof_jbof_index')),
sa.UniqueConstraint('jbof_uuid', name=op.f('uq_storage_jbof_jbof_uuid')),
sqlite_autoincrement=True
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('storage_jbof')
op.drop_table('rdma_interface')
# ### end Alembic commands ###
| 2,078 | Python | .py | 44 | 43.318182 | 99 | 0.70385 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,060 | 2023-09-05_12-53_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-09-05_12-53_merge.py | """Merge
Revision ID: 3f39fe7d911d
Revises: 50873c0db61b, b06ea181e7dd
Create Date: 2023-09-05 12:53:41.984207+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '3f39fe7d911d'
down_revision = ('50873c0db61b', 'b06ea181e7dd')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,061 | 2023-12-16_15-01_drop_system_filesystem.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-12-16_15-01_drop_system_filesystem.py | """drop system_filesystem table
Revision ID: 334c69c59196
Revises: 058f00440129
Create Date: 2023-12-16 15:01:53.612472+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '334c69c59196'
down_revision = '058f00440129'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('system_filesystem')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('system_filesystem',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('identifier', sa.VARCHAR(length=255), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('identifier', name='uq_system_filesystem_identifier')
)
# ### end Alembic commands ###
| 875 | Python | .py | 25 | 31.92 | 77 | 0.720903 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,062 | 2023-12-18_17-48_remove_nfs_udp.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-12-18_17-48_remove_nfs_udp.py | """Remove UDP configuration option for NFS
Revision ID: 4c51b6b085a3
Revises: 334c69c59196
Create Date: 2023-12-18 17:48:41.974789+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4c51b6b085a3'
down_revision = '334c69c59196'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('services_nfs', schema=None) as batch_op:
batch_op.drop_column('nfs_srv_udp')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('services_nfs', schema=None) as batch_op:
batch_op.add_column(sa.Column('nfs_srv_udp', sa.BOOLEAN(), nullable=False))
# ### end Alembic commands ###
| 841 | Python | .py | 22 | 34.954545 | 83 | 0.714462 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,063 | 2024-01-23_10-38_remove_system_birthday.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2024-01-23_10-38_remove_system_birthday.py | """Remove system birthday
Revision ID: 7c456c2a4926
Revises: 809e46a452e0
Create Date: 2024-01-23 10:38:31.580418+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '7c456c2a4926'
down_revision = '809e46a452e0'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('system_settings', schema=None) as batch_op:
batch_op.drop_column('stg_birthday')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('system_settings', schema=None) as batch_op:
batch_op.add_column(sa.Column('stg_birthday', sa.DATETIME(), nullable=True))
# ### end Alembic commands ###
| 832 | Python | .py | 22 | 34.545455 | 84 | 0.7175 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,064 | 2024-01-29_10-05_update_graphite_hostname_attribute.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2024-01-29_10-05_update_graphite_hostname_attribute.py | """Update graphite hostname attribute
Revision ID: 6a7c2281f48e
Revises: 7c456c2a4926
Create Date: 2024-01-29 10:05:47.440945+00:00
"""
import json
from alembic import op
from sqlalchemy.sql import text
# revision identifiers, used by Alembic.
revision = '6a7c2281f48e'
down_revision = '7c456c2a4926'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
for attr in conn.execute('SELECT attributes FROM reporting_exporters').fetchall():
attributes = json.loads(attr['attributes'])
attributes['namespace'] = attributes.pop('hostname')
conn.execute(
text('UPDATE reporting_exporters set attributes=:attributes'),
attributes=json.dumps(attributes)
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| 971 | Python | .py | 28 | 30.321429 | 86 | 0.703108 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,065 | 2023-09-19_16-05_smb_audit.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-09-19_16-05_smb_audit.py | """Add SMB auditing parameters
Revision ID: 2b9a98464a33
Revises: 3e16b0a74d78
Create Date: 2023-09-19 16:05:32.676987+00:00
"""
from alembic import op
import sqlalchemy as sa
import json
# revision identifiers, used by Alembic.
revision = '2b9a98464a33'
down_revision = '3e16b0a74d78'
branch_labels = None
depends_on = None
AUDIT_CONFIG = json.dumps({'enable': False, 'watch_list': [], 'ignore_list': []})
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
with op.batch_alter_table('sharing_cifs_share', schema=None) as batch_op:
batch_op.add_column(sa.Column('cifs_audit', sa.TEXT(), nullable=True))
conn.execute('UPDATE sharing_cifs_share SET cifs_audit = ?', AUDIT_CONFIG)
with op.batch_alter_table('sharing_cifs_share', schema=None) as batch_op:
batch_op.alter_column('cifs_audit', existing_type=sa.TEXT(), nullable=False)
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('sharing_cifs_share', schema=None) as batch_op:
batch_op.drop_column('cifs_audit')
| 1,126 | Python | .py | 26 | 39.923077 | 84 | 0.716514 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,066 | 2023-11-03_11-35_sharing_manager_privilege.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-11-03_11-35_sharing_manager_privilege.py | """Add sharing administrators privilege
Revision ID: 22e5e6881fda
Revises: 649fe0f7f0ba
Create Date: 2023-11-03 11:35:38.694684+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '22e5e6881fda'
down_revision = '649fe0f7f0ba'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute(
"INSERT INTO account_privilege (builtin_name, name, local_groups, ds_groups, allowlist, web_shell, roles) "
"VALUES ('SHARING_MANAGER', 'Sharing Manager', '[]', '[]', '[]', 0, '[\"SHARING_MANAGER\"]')"
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| 813 | Python | .py | 23 | 32.086957 | 115 | 0.68798 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,067 | 2024-05-01_15-55_smb_migrate_values.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2024-05-01_15-55_smb_migrate_values.py | """Fix up SMB paramters and users
Revision ID: f38c2bbe776a
Revises: d774066c6c0c
Create Date: 2024-05-01 15:55:42.754331+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f38c2bbe776a'
down_revision = 'd774066c6c0c'
branch_labels = None
depends_on = None
SHARE_TABLE = "sharing_cifs_share"
PURPOSE_KEY = "cifs_purpose"
READONLY_KEY = "cifs_ro"
USER_TABLE = "account_bsdusers"
SMB_KEY = "bsdusr_smb"
HOME_KEY = "bsdusr_home"
LEGACY_HOME = "/nonexistent"
EMPTY_DIR = "/var/empty"
def upgrade():
conn = op.get_bind()
# convert any cluster READ_ONLY shares to a default share
# with readonly checked
stmnt = (
f"UPDATE {SHARE_TABLE} "
f"SET {PURPOSE_KEY} = ?, {READONLY_KEY} = ? "
f"WHERE {PURPOSE_KEY} = ?"
)
conn.execute(stmnt, ['DEFAULT_SHARE', 1, 'READ_ONLY'])
# convert any cluster DEFAULT_CLUSTER_SHARE shares to
# DEFAULT_SHARE
stmnt = (
f"UPDATE {SHARE_TABLE} "
f"SET {PURPOSE_KEY} = ? "
f"WHERE {PURPOSE_KEY} = ?"
)
conn.execute(stmnt, ['DEFAULT_SHARE', 'DEFAULT_CLUSTER_SHARE'])
# convert any SMB users with a home directory of `/nonexistent` to
# having a home directory of `/var/empty`
stmnt = (
f"UPDATE {USER_TABLE} "
f"SET {HOME_KEY} = ? "
f"WHERE {HOME_KEY} = ? AND {SMB_KEY} = ?"
)
conn.execute(stmnt, [EMPTY_DIR, LEGACY_HOME, 1])
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| 1,578 | Python | .py | 50 | 27.4 | 70 | 0.653465 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,068 | 2023-12-28_12-48_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-12-28_12-48_merge.py | """Merge
Revision ID: a598a2c81461
Revises: e2e0b53cb627, 7a5ebab17483
Create Date: 2023-12-28 12:48:12.099465+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a598a2c81461'
down_revision = ('e2e0b53cb627', '7a5ebab17483')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 381 | Python | .py | 16 | 21.8125 | 48 | 0.778711 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,069 | 2023-09-05_11-30_merge.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-09-05_11-30_merge.py | """merge migration
Revision ID: 50873c0db61b
Revises: 80c01d290a1d, 22a23dafd7de
Create Date: 2023-09-05 11:30:59.733983+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '50873c0db61b'
down_revision = ('80c01d290a1d', '22a23dafd7de')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
| 391 | Python | .py | 16 | 22.4375 | 48 | 0.782016 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,070 | 2024-01-15_20-10_builtin_roles_groups.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2024-01-15_20-10_builtin_roles_groups.py | """Built-in roles groups
Revision ID: 809e46a452e0
Revises: a33672445bed
Create Date: 2024-01-15 20:10:22.012464+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '809e46a452e0'
down_revision = 'a33672445bed'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute("INSERT INTO account_bsdgroups (bsdgrp_gid, bsdgrp_group, bsdgrp_builtin, bsdgrp_smb,"
"bsdgrp_sudo_commands, bsdgrp_sudo_commands_nopasswd) VALUES (951, 'truenas_readonly_administrators', "
"1, 1, '[]', '[]')")
conn.execute("INSERT INTO account_bsdgroups (bsdgrp_gid, bsdgrp_group, bsdgrp_builtin, bsdgrp_smb,"
"bsdgrp_sudo_commands, bsdgrp_sudo_commands_nopasswd) VALUES (952, 'truenas_sharing_administrators', "
"1, 1, '[]', '[]')")
op.execute(
"UPDATE account_privilege SET builtin_name = 'READONLY_ADMINISTRATOR', name = 'Read-Only Administrator', "
"local_groups = '[951]', roles = '[\"READONLY_ADMIN\"]' "
"WHERE builtin_name = 'READONLY'"
)
op.execute(
"UPDATE account_privilege SET builtin_name = 'SHARING_ADMINISTRATOR', name = 'Sharing Administrator', "
"local_groups = '[952]', roles = '[\"SHARING_ADMIN\"]' "
"WHERE builtin_name = 'SHARING_MANAGER'"
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| 1,604 | Python | .py | 36 | 38.694444 | 120 | 0.653821 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,071 | 2023-10-30_21-02_readonly_privilege.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-10-30_21-02_readonly_privilege.py | """Readonly privilege
Revision ID: 649fe0f7f0ba
Revises: e3436f15aa9c
Create Date: 2023-10-30 21:02:00.757892+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '649fe0f7f0ba'
down_revision = 'e3436f15aa9c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute(
"INSERT INTO account_privilege (builtin_name, name, local_groups, ds_groups, allowlist, web_shell, roles) "
"VALUES ('READONLY', 'Readonly Administators', '[]', '[]', '[]', 0, '[\"READONLY\"]')"
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| 788 | Python | .py | 23 | 31 | 115 | 0.682959 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,072 | 2023-12-28_18-12_remove_gluster.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/24.04/2023-12-28_18-12_remove_gluster.py | """ remove gluster service
Revision ID: 69789458866a
Revises: a598a2c81461
Create Date: 2023-12-23 18:12:00.848725+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '69789458866a'
down_revision = 'a598a2c81461'
branch_labels = None
depends_on = None
def upgrade():
op.execute("DELETE FROM services_services WHERE srv_service = 'glusterd'")
with op.batch_alter_table('sharing_cifs_share', schema=None) as batch_op:
batch_op.drop_column('cifs_cluster_volname')
def downgrade():
pass
| 567 | Python | .py | 18 | 29 | 78 | 0.763838 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,073 | 2023-06-13_12-04_link_address_b.py | truenas_middleware/src/middlewared/middlewared/alembic/versions/13.1/2023-06-13_12-04_link_address_b.py | """Network interface link_address_b
Revision ID: b412304844e1
Revises: 88bfe11b5be5
Create Date: 2023-06-13 12:04:07.420120+00:00
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b412304844e1'
down_revision = '88bfe11b5be5'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('network_interfaces', schema=None) as batch_op:
batch_op.add_column(sa.Column('int_link_address_b', sa.String(length=17), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('network_interfaces', schema=None) as batch_op:
batch_op.drop_column('int_link_address_b')
# ### end Alembic commands ###
| 867 | Python | .py | 22 | 36.136364 | 97 | 0.720958 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,074 | __init__.py | truenas_middleware/src/middlewared/middlewared/apidocs/__init__.py | import aiohttp.web
from middlewared.utils.mako import get_template
async def render_template(request, name, **kwargs):
return await request.app['middleware'].run_in_thread(
lambda: get_template(f'apidocs/templates/{name}').render(**kwargs)
)
async def render_to_response(request, name, **kwargs):
return aiohttp.web.Response(text=await render_template(request, name, **kwargs), content_type='text/html')
routes = aiohttp.web.RouteTableDef()
@routes.get('/api/docs/')
async def index(request):
return await render_to_response(request, 'index.html')
@routes.get('/api/docs/restful/')
async def restful(request):
return await render_to_response(request, 'restful.html')
@routes.get('/api/docs/websocket/')
async def websocket(request):
middleware = request.app['middleware']
services = []
for name in sorted(await middleware.call('core.get_services')):
services.append({
'name': name,
'methods': await middleware.call('core.get_methods', name),
})
events = await render_template(request, 'websocket/events.md', **{
'events': await middleware.call('core.get_events')
})
query_filters = await render_template(request, 'websocket/query.md')
protocol = await render_template(request, 'websocket/protocol.md')
jobs = await render_template(request, 'websocket/jobs.md')
return await render_to_response(request, 'websocket.html', **{
'events': events,
'services': services,
'protocol': protocol,
'jobs': jobs,
'query_filters': query_filters,
})
| 1,605 | Python | .py | 37 | 37.783784 | 110 | 0.69112 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,075 | convert_schema.py | truenas_middleware/src/middlewared/middlewared/schema/convert_schema.py | from .adaptable_schemas import Bool
from .dict_schema import Dict
from .integer_schema import Int
from .string_schema import Str
def convert_schema(spec):
t = spec.pop('type')
name = spec.pop('name')
if t in ('int', 'integer'):
return Int(name, **spec)
elif t in ('str', 'string'):
return Str(name, **spec)
elif t in ('bool', 'boolean'):
return Bool(name, **spec)
elif t == 'dict':
return Dict(name, *spec.get('args', []), **spec.get('kwargs', {}))
raise ValueError(f'Unknown type: {t}')
| 550 | Python | .py | 16 | 29.5 | 74 | 0.616541 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,076 | dict_schema.py | truenas_middleware/src/middlewared/middlewared/schema/dict_schema.py | import copy
import collections
from datetime import datetime, time
from middlewared.service_exception import ValidationErrors
from middlewared.utils import filter_list
from middlewared.utils.cron import CRON_FIELDS, croniter_for_schedule
from .attribute import Attribute
from .exceptions import Error
from .string_schema import Str, Time
from .utils import NOT_PROVIDED, REDACTED_VALUE
class Dict(Attribute):
def __init__(self, *attrs, **kwargs):
# TODO: Let's please perhaps have name as a keyword argument when we add support for
# optional name argument in accepts decorator
if list(attrs) and isinstance(attrs[0], str):
name = attrs[0]
attrs = list(attrs[1:])
else:
name = ''
self.additional_attrs = kwargs.pop('additional_attrs', False)
self.conditional_defaults = kwargs.pop('conditional_defaults', {})
self.private_keys = kwargs.pop('private_keys', [])
self.strict = kwargs.pop('strict', False)
# Update property is used to disable requirement on all attributes
# as well to not populate default values for not specified attributes
self.update = kwargs.pop('update', False)
if 'default' not in kwargs:
kwargs['default'] = {}
super(Dict, self).__init__(name, **kwargs)
self.attrs = {}
for i in attrs:
self.attrs[i.name] = i
for k, v in self.conditional_defaults.items():
if k not in self.attrs:
raise ValueError(f'Specified attribute {k!r} not found.')
for k_v in ('filters', 'attrs'):
if k_v not in v:
raise ValueError(f'Conditional defaults must have {k_v} specified.')
for attr in v['attrs']:
if attr not in self.attrs:
raise ValueError(f'Specified attribute {attr} not found.')
if self.strict:
for attr in self.attrs.values():
if attr.required:
if attr.has_default:
raise ValueError(
f'Attribute {attr.name} is required and has default value at the same time, '
'this is forbidden in strict mode'
)
else:
if not attr.has_default:
raise ValueError(
f'Attribute {attr.name} is not required and does not have default value, '
'this is forbidden in strict mode'
)
def has_private(self):
return self.private or any(i.has_private() for i in self.attrs.values())
def get_attrs_to_skip(self, data):
skip_attrs = collections.defaultdict(set)
check_data = self.get_defaults(data, {}, ValidationErrors(), False) if not self.update else data
for attr, attr_data in filter(
lambda k: not filter_list([check_data], k[1]['filters']), self.conditional_defaults.items()
):
for k in attr_data['attrs']:
skip_attrs[k].update({attr})
return skip_attrs
def clean(self, data):
data = super().clean(data)
if data is None:
if self.null:
return None
return copy.deepcopy(self.default)
if not isinstance(data, dict):
raise Error(self.name, 'A dict was expected')
verrors = ValidationErrors()
for key, value in list(data.items()):
if not self.additional_attrs:
if key not in self.attrs:
verrors.add(f'{self.name}.{key}', 'Field was not expected')
continue
attr = self.attrs.get(key)
if not attr:
continue
data[key] = self._clean_attr(attr, value, verrors)
# Do not make any field and required and not populate default values
if not self.update:
data.update(self.get_defaults(data, self.get_attrs_to_skip(data), verrors))
verrors.check()
return data
def get_defaults(self, orig_data, skip_attrs, verrors, check_required=True):
data = copy.deepcopy(orig_data)
for attr in list(self.attrs.values()):
if attr.name not in data and attr.name not in skip_attrs and (
(check_required and attr.required) or attr.has_default
):
data[attr.name] = self._clean_attr(attr, NOT_PROVIDED, verrors)
return data
def _clean_attr(self, attr, value, verrors):
try:
return attr.clean(value)
except Error as e:
verrors.add(f'{self.name}.{e.attribute}', e.errmsg, e.errno)
except ValidationErrors as e:
verrors.add_child(self.name, e)
def dump(self, value):
if self.private:
return REDACTED_VALUE
if not isinstance(value, dict):
return value
value = value.copy()
for key in value:
if key in self.private_keys:
value[key] = REDACTED_VALUE
continue
attr = self.attrs.get(key)
if not attr:
continue
value[key] = attr.dump(value[key])
return value
def validate(self, value):
if value is None:
return
super().validate(value)
verrors = ValidationErrors()
for attr in self.attrs.values():
if attr.name in value:
try:
attr.validate(value[attr.name])
except ValidationErrors as e:
verrors.add_child(self.name, e)
verrors.check()
def to_json_schema(self, parent=None):
schema = {
'type': 'object',
'properties': {},
'additionalProperties': self.additional_attrs,
**self._to_json_schema_common(parent),
}
for name, attr in list(self.attrs.items()):
schema['properties'][name] = attr.to_json_schema(parent=self)
schema['_attrs_order_'] = list(self.attrs.keys())
return schema
def resolve(self, schemas):
for name, attr in list(self.attrs.items()):
if not attr.resolved:
new_name = name
self.attrs[new_name] = attr.resolve(schemas)
if self.register:
schemas.add(self)
self.resolved = True
return self
def copy(self):
cp = super().copy()
cp.attrs = {}
for name, attr in self.attrs.items():
cp.attrs[name] = attr.copy()
return cp
class Cron(Dict):
FIELDS = CRON_FIELDS
def __init__(self, name='', **kwargs):
self.additional_attrs = kwargs.pop('additional_attrs', False)
exclude = kwargs.pop('exclude', [])
defaults = kwargs.pop('defaults', {})
self.begin_end = kwargs.pop('begin_end', False)
# Update property is used to disable requirement on all attributes
# as well to not populate default values for not specified attributes
self.update = kwargs.pop('update', False)
super(Cron, self).__init__(name, **kwargs)
self.attrs = {}
for i in filter(lambda f: f not in exclude, Cron.FIELDS):
self.attrs[i] = Str(i, default=defaults.get(i, '*'))
if self.begin_end:
self.attrs['begin'] = Time('begin', default=defaults.get('begin', '00:00'))
self.attrs['end'] = Time('end', default=defaults.get('end', '23:59'))
@staticmethod
def convert_schedule_to_db_format(data_dict, schedule_name='schedule', key_prefix='', begin_end=False):
if schedule_name in data_dict:
schedule = data_dict.pop(schedule_name)
db_fields = ['minute', 'hour', 'daymonth', 'month', 'dayweek']
if schedule is not None:
for index, field in enumerate(Cron.FIELDS):
if field in schedule:
data_dict[key_prefix + db_fields[index]] = schedule[field]
if begin_end:
for field in ['begin', 'end']:
if field in schedule:
data_dict[key_prefix + field] = schedule[field]
else:
for index, field in enumerate(Cron.FIELDS):
data_dict[key_prefix + db_fields[index]] = None
if begin_end:
for field in ['begin', 'end']:
data_dict[key_prefix + field] = None
@staticmethod
def convert_db_format_to_schedule(data_dict, schedule_name='schedule', key_prefix='', begin_end=False):
db_fields = ['minute', 'hour', 'daymonth', 'month', 'dayweek']
data_dict[schedule_name] = {}
for index, field in enumerate(db_fields):
key = key_prefix + field
if key in data_dict:
value = data_dict.pop(key)
if value is None:
data_dict[schedule_name] = None
else:
if data_dict[schedule_name] is not None:
data_dict[schedule_name][Cron.FIELDS[index]] = value
if begin_end:
for field in ['begin', 'end']:
key = key_prefix + field
if key in data_dict:
value = data_dict.pop(key)
if value is None:
data_dict[schedule_name] = None
else:
if data_dict[schedule_name] is not None:
data_dict[schedule_name][field] = str(value)[:5]
def validate(self, value):
if value is None:
return
verrors = ValidationErrors()
for attr in self.attrs.values():
if attr.name in value:
try:
attr.validate(value[attr.name])
except ValidationErrors as e:
verrors.add_child(self.name, e)
for v in value:
if self.begin_end and v in ['begin', 'end']:
continue
if v not in Cron.FIELDS:
verrors.add(self.name, f'Unexpected {v} value')
verrors.check()
try:
iter_ = croniter_for_schedule(value)
except Exception as e:
iter_ = None
verrors.add(self.name, 'Please ensure fields match cron syntax - ' + str(e))
if value.get('begin') and value.get('end') and not (value.get('begin') <= value.get('end')):
verrors.add(self.name, 'Begin time should be less or equal than end time')
if iter_ is not None and (value.get('begin') or value.get('end')):
begin = value.get('begin') or time(0, 0)
end = value.get('end') or time(23, 59)
for i in range(24 * 60):
d = iter_.get_next(datetime)
if begin <= d.time() <= end:
break
else:
verrors.add(self.name, 'Specified schedule does not match specified time interval')
verrors.check()
| 11,163 | Python | .py | 249 | 32.120482 | 107 | 0.554778 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,077 | adaptable_schemas.py | truenas_middleware/src/middlewared/middlewared/schema/adaptable_schemas.py | import copy
from middlewared.service_exception import ValidationErrors
from .attribute import Attribute
from .exceptions import Error, ResolverError
from .utils import NOT_PROVIDED
class Any(Attribute):
def to_json_schema(self, parent=None):
return {
'anyOf': [
{'type': 'string'},
{'type': 'integer'},
{'type': 'boolean'},
{'type': 'object'},
{'type': 'array'},
],
'nullable': self.null,
**self._to_json_schema_common(parent),
}
class Bool(Attribute):
def clean(self, value):
value = super().clean(value)
if value is None:
return value
if not isinstance(value, bool):
raise Error(self.name, 'Not a boolean')
return value
def to_json_schema(self, parent=None):
return {
'type': ['boolean', 'null'] if self.null else 'boolean',
**self._to_json_schema_common(parent),
}
class Ref:
def __init__(self, name, new_name=None):
self.schema_name = name
self.name = new_name or name
self.resolved = False
def resolve(self, schemas):
schema = schemas.get(self.schema_name)
if not schema:
raise ResolverError('Schema {0} does not exist'.format(self.schema_name))
schema = schema.copy()
schema.name = self.name
schema.register = False
schema.resolved = True
self.resolved = True
return schema
def copy(self):
return copy.deepcopy(self)
class OROperator:
def __init__(self, *schemas, **kwargs):
self.name = kwargs.get('name', '')
self.title = kwargs.get('title') or self.name
self.schemas = list(schemas)
self.description = kwargs.get('description')
self.resolved = False
self.default = kwargs.get('default', None)
self.has_default = 'default' in kwargs and kwargs['default'] is not NOT_PROVIDED
self.private = kwargs.get('private', False)
@property
def required(self):
for schema in filter(lambda s: hasattr(s, 'required'), self.schemas):
if schema.required:
return True
return False
def clean(self, value):
if self.has_default and value == self.default:
return copy.deepcopy(self.default)
found = False
final_value = value
verrors = ValidationErrors()
for index, i in enumerate(self.schemas):
try:
tmpval = copy.deepcopy(value)
final_value = i.clean(tmpval)
except (Error, ValidationErrors) as e:
if isinstance(e, Error):
verrors.add(e.attribute, e.errmsg, e.errno)
else:
verrors.extend(e)
else:
found = True
break
if found is not True:
raise Error(self.name, f'Result does not match specified schema: {verrors}')
return final_value
def validate(self, value):
verrors = ValidationErrors()
attr_verrors = ValidationErrors()
for attr in self.schemas:
try:
attr.validate(value)
except TypeError:
pass
except ValidationErrors as e:
attr_verrors.extend(e)
else:
break
else:
verrors.extend(attr_verrors)
verrors.check()
def to_json_schema(self, parent=None):
return {
'anyOf': [i.to_json_schema() for i in self.schemas],
'nullable': False,
'_name_': self.name,
'description': self.description,
'_required_': self.required,
}
def resolve(self, schemas):
for index, i in enumerate(self.schemas):
if not i.resolved:
self.schemas[index] = i.resolve(schemas)
self.resolved = True
return self
def copy(self):
cp = copy.deepcopy(self)
cp.register = False
return cp
def dump(self, value, fallback=True):
value = copy.deepcopy(value)
errors = []
for schema in self.schemas:
try:
schema.clean(copy.deepcopy(value))
except (Error, ValidationErrors) as e:
errors.append(e)
else:
value = schema.dump(value)
break
else:
if not fallback:
raise RuntimeError(f"OROperator failed to dump all schemas: {errors!r}")
return value
def has_private(self):
return self.private or any(schema.has_private() for schema in self.schemas)
| 4,782 | Python | .py | 135 | 24.851852 | 88 | 0.560294 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,078 | enum.py | truenas_middleware/src/middlewared/middlewared/schema/enum.py | from .exceptions import Error
class EnumMixin:
def __init__(self, *args, **kwargs):
self.enum = kwargs.pop('enum', None)
super(EnumMixin, self).__init__(*args, **kwargs)
def clean(self, value):
value = super().clean(value)
if self.enum is None:
return value
if value is None and self.null:
return value
if not isinstance(value, (list, tuple)):
tmp = [value]
else:
tmp = value
for v in tmp:
if v not in self.enum:
raise Error(self.name, f'Invalid choice: {value}')
return value
| 637 | Python | .py | 19 | 24.105263 | 66 | 0.550489 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,079 | attribute.py | truenas_middleware/src/middlewared/middlewared/schema/attribute.py | import copy
import json
import textwrap
import typing
from middlewared.service_exception import ValidationErrors
from middlewared.validators import ValidatorBase
from .exceptions import Error
from .utils import NOT_PROVIDED, REDACTED_VALUE
class Attribute:
def __init__(
self, name='', title=None, description=None, required=False, null=False, empty=True, private=False,
validators: typing.List[ValidatorBase]=None, register=False, hidden=False, editable=True, example=None, **kwargs
):
self.name = name
self.has_default = 'default' in kwargs and kwargs['default'] is not NOT_PROVIDED
self.default = kwargs.pop('default', None)
self.required = required
self.null = null
self.empty = empty
self.private = private
self.title = title or name
self.description = description
self.validators = validators or []
self.register = register
self.hidden = hidden
self.editable = editable
self.resolved = False
if example:
self.description = (description or '') + '\n' + textwrap.dedent('''
Example(s):
```
''') + json.dumps(example, indent=4) + textwrap.dedent('''
```
''')
# When a field is marked as non-editable, it must specify a default
if not self.editable and not self.has_default:
raise Error(self.name, 'Default value must be specified when attribute is marked as non-editable.')
if kwargs:
raise TypeError(f"Unexpected keyword arguments: {', '.join(map(repr, kwargs.keys()))}")
def clean(self, value):
if value is None and self.null is False:
raise Error(self.name, 'null not allowed')
if value is NOT_PROVIDED:
if self.has_default:
value = copy.deepcopy(self.default)
else:
raise Error(self.name, 'attribute required')
if not self.editable and value != self.default:
raise Error(self.name, 'Field is not editable.')
return value
def has_private(self):
return self.private
def dump(self, value):
if self.private:
return REDACTED_VALUE
return value
def validate(self, value):
verrors = ValidationErrors()
for validator in self.validators:
try:
validator(value)
except ValueError as e:
verrors.add(self.name, str(e))
verrors.check()
def to_json_schema(self, parent=None):
"""This method should return the json-schema v4 equivalent for the
given attribute.
"""
raise NotImplementedError("Attribute must implement to_json_schema method")
def _to_json_schema_common(self, parent) -> typing.Dict[str, typing.Any]:
schema = {}
schema['_name_'] = self.name
if self.title:
schema['title'] = self.title
if self.description:
schema['description'] = self.description
if self.has_default:
schema['default'] = self.default
schema['_required_'] = self.required
return schema
def resolve(self, schemas):
"""
After every plugin is initialized this method is called for every method param
so that the real attribute is evaluated.
e.g.
@params(
Patch('schema-name', 'new-name', ('add', {'type': 'string', 'name': test'})),
Ref('schema-test'),
)
will resolve to:
@params(
Dict('new-name', ...)
Dict('schema-test', ...)
)
"""
self.resolved = True
if self.register:
schemas.add(self)
return self
def copy(self):
cp = copy.deepcopy(self)
cp.register = False
return cp
| 3,902 | Python | .py | 103 | 28.543689 | 120 | 0.601165 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,080 | utils.py | truenas_middleware/src/middlewared/middlewared/schema/utils.py | NOT_PROVIDED = object()
REDACTED_VALUE = "********"
MS_RESERVED_WORDS = {
'ANONYMOUS'.casefold(),
'AUTHENTICATED USER'.casefold(),
'BATCH'.casefold(),
'BUILTIN'.casefold(),
'DIALUP'.casefold(),
'DOMAIN'.casefold(),
'ENTERPRISE'.casefold(),
'INTERACTIVE'.casefold(),
'INTERNET'.casefold(),
'LOCAL'.casefold(),
'NETWORK'.casefold(),
'NULL'.casefold(),
'PROXY'.casefold(),
'RESTRICTED'.casefold(),
'SELF'.casefold(),
'SERVER'.casefold(),
'USERS'.casefold(),
'WORLD'.casefold()
}
RFC_852_RESERVED_WORDS = {
'GATEWAY'.casefold(),
'GW'.casefold(),
'TAC'.casefold(),
}
RESERVED_WORDS = MS_RESERVED_WORDS | RFC_852_RESERVED_WORDS
| 709 | Python | .py | 28 | 21.214286 | 59 | 0.620944 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,081 | __init__.py | truenas_middleware/src/middlewared/middlewared/schema/__init__.py | from middlewared.service_exception import ValidationErrors # noqa
from .adaptable_schemas import Any, Bool, OROperator, Ref # noqa
from .attribute import Attribute # noqa
from .convert_schema import convert_schema # noqa
from .dict_schema import Cron, Dict # noqa
from .enum import EnumMixin # noqa
from .exceptions import Error # noqa
from .integer_schema import Float, Int, Timestamp # noqa
from .list_schema import List # noqa
from .patch import Patch # noqa
from .plugin_schema import Schemas # noqa
from .processor import accepts, clean_and_validate_arg, returns # noqa
from .resolvers import resolve_methods # noqa
from .string_schema import ( # noqa
Dataset, Datetime, Dir, File, HostPath, IPAddr, LDAP_DN, NetbiosName, NetbiosDomain,
Path, Password, SID, Str, Time, UnixPerm, URI, UUID
)
from .username import LocalUsername # noqa
from .utils import NOT_PROVIDED, REDACTED_VALUE # noqa
| 918 | Python | .py | 19 | 46.842105 | 88 | 0.775056 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,082 | list_schema.py | truenas_middleware/src/middlewared/middlewared/schema/list_schema.py | import copy
from middlewared.service_exception import ValidationErrors
from .attribute import Attribute
from .enum import EnumMixin
from .exceptions import Error
from .utils import REDACTED_VALUE
class List(EnumMixin, Attribute):
def __init__(self, *args, **kwargs):
self.items = kwargs.pop('items', [])
self.unique = kwargs.pop('unique', False)
if 'default' not in kwargs:
kwargs['default'] = []
super(List, self).__init__(*args, **kwargs)
def clean(self, value):
value = super(List, self).clean(value)
if value is None:
return copy.deepcopy(self.default)
if not isinstance(value, (list, tuple)):
raise Error(self.name, 'Not a list')
if not self.empty and not value:
raise Error(self.name, 'Empty value not allowed')
if self.items:
for index, v in enumerate(value):
for i in self.items:
try:
tmpval = copy.deepcopy(v)
value[index] = i.clean(tmpval)
found = True
break
except (Error, ValidationErrors) as e:
found = e
if self.items and found is not True:
raise Error(self.name, 'Item#{0} is not valid per list types: {1}'.format(index, found))
return value
def has_private(self):
return self.private or any(item.has_private() for item in self.items)
def dump(self, value):
if self.private:
return REDACTED_VALUE
# No schema is specified for list items or a schema is specified but
# does not contain any private values. In this situation it's safe to
# simply dump the raw value
if not value or not self.items or not self.has_private():
return value
# In most cases we'll only have a single item and so avoid validation loop
if len(self.items) == 1:
return [self.items[0].dump(x) for x in value]
# This is painful and potentially expensive. It would probably be best
# if developers simply avoided designing APIs in this way.
out_list = []
for i in value:
# Initialize the entry value to "private"
# If for some reason we can't validate the item then obscure the entry
# to prevent chance of accidental exposure of private data
entry = REDACTED_VALUE
for item in self.items:
# the item.clean() method may alter the value and so we need to
# make a deepcopy of it before validation
to_validate = copy.deepcopy(i)
try:
to_validate = item.clean(to_validate)
item.validate(to_validate)
except Exception:
continue
# Check whether we've already successfully validated this entry
if entry != REDACTED_VALUE:
# more than one of schemas fit this bill.
# fail safe and make it private
entry = REDACTED_VALUE
break
# dump the original value and not the one that has been cleaned
entry = item.dump(i)
out_list.append(entry)
return out_list
def validate(self, value):
if value is None:
return
verrors = ValidationErrors()
s = set()
for i, v in enumerate(value):
if self.unique:
if isinstance(v, dict):
v = tuple(sorted(list(v.items())))
if v in s:
verrors.add(f"{self.name}.{i}", "This value is not unique.")
s.add(v)
attr_verrors = ValidationErrors()
for attr in self.items:
try:
attr.validate(v)
except ValidationErrors as e:
attr_verrors.add_child(f"{self.name}.{i}", e)
else:
break
else:
verrors.extend(attr_verrors)
verrors.check()
super().validate(value)
def to_json_schema(self, parent=None):
schema = self._to_json_schema_common(parent)
if self.null:
schema['type'] = ['array', 'null']
else:
schema['type'] = 'array'
schema['items'] = [i.to_json_schema(self) for i in self.items]
return schema
def resolve(self, schemas):
for index, i in enumerate(self.items):
if not i.resolved:
self.items[index] = i.resolve(schemas)
if self.register:
schemas.add(self)
self.resolved = True
return self
def copy(self):
cp = super().copy()
cp.items = []
for item in self.items:
cp.items.append(item.copy())
return cp
| 4,999 | Python | .py | 120 | 28.966667 | 108 | 0.549012 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,083 | patch.py | truenas_middleware/src/middlewared/middlewared/schema/patch.py | import copy
from .convert_schema import convert_schema
from .dict_schema import Dict
from .exceptions import ResolverError
class Patch:
def __init__(self, orig_name, newname, *patches, register=False):
self.schema_name = orig_name
self.name = newname
self.patches = list(patches)
self.register = register
self.resolved = False
def resolve(self, schemas):
schema = schemas.get(self.schema_name)
if not schema:
raise ResolverError(f'Schema {self.schema_name} not found')
elif not isinstance(schema, Dict):
raise ValueError('Patch non-dict is not allowed')
schema = schema.copy()
schema.name = self.name
if hasattr(schema, 'title'):
schema.title = self.name
for operation, patch in self.patches:
if operation == 'replace':
# This is for convenience where it's hard sometimes to change attrs in a large dict
# with custom function(s) outlining the operation - it's easier to just replace the attr
name = patch['name'] if isinstance(patch, dict) else patch.name
self._resolve_internal(schema, schemas, 'rm', {'name': name})
operation = 'add'
self._resolve_internal(schema, schemas, operation, patch)
if self.register:
schemas.add(schema)
schema.resolved = True
self.resolved = True
return schema
def _resolve_internal(self, schema, schemas, operation, patch):
if operation == 'add':
if isinstance(patch, dict):
new = convert_schema(dict(patch))
else:
new = copy.deepcopy(patch)
schema.attrs[new.name] = new
elif operation == 'rm':
if patch.get('safe_delete') and patch['name'] not in schema.attrs:
return
del schema.attrs[patch['name']]
elif operation == 'edit':
attr = schema.attrs[patch['name']]
if 'method' in patch:
patch['method'](attr)
schema.attrs[patch['name']] = attr.resolve(schemas)
elif operation == 'attr':
for key, val in list(patch.items()):
setattr(schema, key, val)
| 2,301 | Python | .py | 53 | 32.54717 | 104 | 0.591254 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,084 | resolvers.py | truenas_middleware/src/middlewared/middlewared/schema/resolvers.py | import pprint
from .adaptable_schemas import OROperator, Ref
from .attribute import Attribute
from .exceptions import ResolverError
from .patch import Patch
def resolver(schemas, obj):
if not isinstance(obj, dict) or not all(k in obj for k in ('keys', 'get_attr', 'has_key')):
return
for schema_type in filter(obj['has_key'], obj['keys']):
new_params = []
schema_obj = obj['get_attr'](schema_type)
for p in schema_obj:
if isinstance(p, (Patch, Ref, Attribute, OROperator)):
resolved = p if p.resolved else p.resolve(schemas)
new_params.append(resolved)
else:
raise ResolverError(f'Invalid parameter definition {p}')
# FIXME: for some reason assigning params (f.accepts = new_params) does not work
schema_obj.clear()
schema_obj.extend(new_params)
def resolve_methods(schemas, to_resolve):
while len(to_resolve) > 0:
resolved = 0
errors = []
for method in list(to_resolve):
try:
resolver(schemas, method)
except ResolverError as e:
errors.append((method, e))
else:
to_resolve.remove(method)
resolved += 1
if resolved == 0:
raise ValueError(f'Not all schemas could be resolved:\n{pprint.pformat(errors)}')
| 1,394 | Python | .py | 34 | 31.441176 | 95 | 0.608278 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,085 | string_schema.py | truenas_middleware/src/middlewared/middlewared/schema/string_schema.py | import errno
import ipaddress
import os
import uuid
import re
try:
import wbclient
except ImportError:
# During fresh install of TrueNAS lookup of libwbclient.so.0
# will fail without specifying an LD_LOOKUP_PATH within the
# freshly extracted TrueNAS SCALE squashfs filesystem. If we
# don't skip the ImportError then fresh SCALE install will fail.
pass
from datetime import datetime, time, timezone
from ldap import dn
from urllib.parse import urlparse
from middlewared.service_exception import CallError, ValidationErrors
from .attribute import Attribute
from .enum import EnumMixin
from .exceptions import Error
from .utils import RESERVED_WORDS
# NetBIOS domain names allow using a dot "." to define a NetBIOS scope
# This is not true for NetBIOS computer names
RE_NETBIOSNAME = re.compile(r"^(?![0-9]*$)[a-zA-Z0-9-_!@#\$%^&\(\)'\{\}~]{1,15}$")
RE_NETBIOSDOM = re.compile(r"^(?![0-9]*$)[a-zA-Z0-9\.\-_!@#\$%^&\(\)'\{\}~]{1,15}$")
class Str(EnumMixin, Attribute):
def __init__(self, *args, **kwargs):
# Sqlite limits ( (2 ** 31) - 1 ) for storing text - https://www.sqlite.org/limits.html
self.max_length = kwargs.pop('max_length', 1024) or (2 ** 31) - 1
super().__init__(*args, **kwargs)
def clean(self, value):
value = super(Str, self).clean(value)
if value is None:
return value
if isinstance(value, int) and not isinstance(value, bool):
value = str(value)
if not isinstance(value, str):
raise Error(self.name, 'Not a string')
if not self.empty and not value.strip():
raise Error(self.name, 'Empty value not allowed')
return value
def to_json_schema(self, parent=None):
schema = self._to_json_schema_common(parent)
if self.null:
schema['type'] = ['string', 'null']
else:
schema['type'] = 'string'
if self.enum is not None:
schema['enum'] = self.enum
return schema
def validate(self, value):
if value is None:
return value
verrors = ValidationErrors()
if value and len(str(value)) > self.max_length:
verrors.add(self.name, f'The value may not be longer than {self.max_length} characters')
verrors.check()
return super().validate(value)
class Path(Str):
def __init__(self, *args, **kwargs):
self.forwarding_slash = kwargs.pop('forwarding_slash', True)
super().__init__(*args, **kwargs)
def clean(self, value):
value = super().clean(value)
if value is None:
return value
value = value.strip()
if self.forwarding_slash:
value = value.rstrip("/")
else:
value = value.strip("/")
return os.path.normpath(value.strip())
class Password(Str):
def __init__(self, *args, **kwargs):
super().__init__(*args, **(kwargs | {'private': True}))
class SID(Str):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def clean(self, value):
value = super().clean(value)
if value is None:
return value
value = value.strip()
return value.upper()
def validate(self, value):
if value is None:
return value
verrors = ValidationErrors()
if not wbclient.sid_is_valid(value):
verrors.add(
self.name,
'SID is malformed. See MS-DTYP Section 2.4 for SID type specifications. '
'Typically SIDs refer to existing objects on the local or remote server '
'and so an appropriate value should be queried prior to submitting to API '
'endpoints.'
)
verrors.check()
class NetbiosName(Str):
regex = RE_NETBIOSNAME
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def clean(self, value):
value = super().clean(value)
if value is None:
return value
value = value.strip()
return value.upper()
def validate(self, value):
if value is None:
return value
verrors = ValidationErrors()
if not self.regex.match(value):
verrors.add(
self.name,
'Invalid NetBIOS name. NetBIOS names must be between 1 and 15 characters in '
'length and may not contain the following characters: \\/:*?"<>|.'
)
if value.casefold() in RESERVED_WORDS:
verrors.add(
self.name,
f'NetBIOS names may not be one of following reserved names: {", ".join(RESERVED_WORDS)}'
)
verrors.check()
return super().validate(value)
class NetbiosDomain(NetbiosName):
regex = RE_NETBIOSDOM
class Dataset(Path):
def __init__(self, *args, **kwargs):
kwargs.setdefault('empty', False)
kwargs.setdefault('forwarding_slash', False)
super().__init__(*args, **kwargs)
class HostPath(Path):
def validate_internal(self, verrors, value):
pass
def validate(self, value):
if value is None:
return
verrors = ValidationErrors()
if value:
if not os.path.exists(value):
verrors.add(
self.name,
'Path does not exist (underlying dataset may be locked or the path is just missing).',
errno.ENOENT
)
else:
self.validate_internal(verrors, value)
verrors.check()
return super().validate(value)
class Dir(HostPath):
def validate_internal(self, verrors, value):
if not os.path.isdir(value):
verrors.add(self.name, 'This path is not a directory.', errno.ENOTDIR)
class File(HostPath):
def validate_internal(self, verrors, value):
if not os.path.isfile(value):
verrors.add(self.name, 'This path is not a file.', errno.EISDIR)
class URI(Str):
def validate(self, value):
super().validate(value)
verrors = ValidationErrors()
if value:
uri = urlparse(value)
if not all(getattr(uri, k) for k in ('scheme', 'netloc')):
verrors.add(self.name, 'Not a valid URI')
verrors.check()
class IPAddr(Str):
excluded_addr_types = [
'MULTICAST',
'PRIVATE',
'GLOBAL',
'UNSPECIFIED',
'RESERVED',
'LOOPBACK',
'LINK_LOCAL'
]
def __init__(self, *args, **kwargs):
self.cidr = kwargs.pop('cidr', False)
self.network = kwargs.pop('network', False)
self.network_strict = kwargs.pop('network_strict', False)
self.address_types = kwargs.pop('excluded_address_types', [])
self.v4 = kwargs.pop('v4', True)
self.v6 = kwargs.pop('v6', True)
if self.v4 and self.v6:
if self.network:
self.factory = ipaddress.ip_network
elif self.cidr:
self.factory = ipaddress.ip_interface
else:
self.factory = ipaddress.ip_address
elif self.v4:
if self.network:
self.factory = ipaddress.IPv4Network
elif self.cidr:
self.factory = ipaddress.IPv4Interface
else:
self.factory = ipaddress.IPv4Address
elif self.v6:
if self.network:
self.factory = ipaddress.IPv6Network
elif self.cidr:
self.factory = ipaddress.IPv6Interface
else:
self.factory = ipaddress.IPv6Address
else:
raise ValueError('Either IPv4 or IPv6 should be allowed')
self.allow_zone_index = kwargs.pop('allow_zone_index', False)
super(IPAddr, self).__init__(*args, **kwargs)
def __check_permitted_addr_types(self, value):
if not self.address_types:
return
to_check = self.factory(value)
if isinstance(to_check, (ipaddress.IPv4Interface, ipaddress.IPv6Interface)):
to_check = to_check.ip
for addr_type in self.address_types:
if addr_type not in self.excluded_addr_types:
raise CallError(
f'INTERNAL ERROR: {addr_type} not in supported types. '
'This indicates a programming error in API endpoint.'
)
if to_check.__getattribute__(f'is_{addr_type.lower()}'):
raise ValueError(
f'{str(to_check)}: {addr_type.lower()} addresses are not permitted.'
)
def clean(self, value):
value = super().clean(value)
if value:
try:
if self.network:
value = str(self.factory(value, strict=self.network_strict))
else:
if self.cidr and '/' not in value:
raise ValueError(
'Specified address should be in CIDR notation, e.g. 192.168.0.2/24'
)
zone_index = None
if self.allow_zone_index and '%' in value:
value, zone_index = value.rsplit('%', 1)
addr = self.factory(value)
if zone_index is not None and not isinstance(addr, ipaddress.IPv6Address):
raise ValueError('Zone index is allowed only for IPv6 addresses')
value = str(addr)
if zone_index is not None:
value += f'%{zone_index}'
self.__check_permitted_addr_types(value)
except ValueError as e:
raise Error(self.name, str(e))
return value
def validate(self, value):
if value is None:
return value
verrors = ValidationErrors()
try:
self.clean(value)
except (Error, ValueError) as e:
verrors.add(self.name, str(e))
verrors.check()
return super().validate(value)
class Time(Str):
def clean(self, value):
if isinstance(value, time):
return value
value = super(Time, self).clean(value)
if value is None:
return value
try:
hours, minutes = value.split(':')
except ValueError:
raise ValueError('Time should be in 24 hour format like "18:00"')
else:
try:
return time(int(hours), int(minutes))
except TypeError:
raise ValueError('Time should be in 24 hour format like "18:00"')
def validate(self, value):
return super().validate(str(value))
class Datetime(Str):
def clean(self, value):
if isinstance(value, datetime):
return value
value = super().clean(value)
if value is None:
return value
try:
return datetime.fromtimestamp(float(value), tz=timezone.utc)
except (TypeError, ValueError):
raise ValueError('Invalid datetime specified')
def validate(self, value):
return super().validate(str(value))
class UUID(Str):
def validate(self, value):
if value is None:
return
verrors = ValidationErrors()
try:
if isinstance(value, int):
uuid.UUID(int=value)
else:
uuid.UUID(value)
except TypeError:
verrors.add(self.name, 'Please supply a valid hex-formatted UUID string')
except ValueError as e:
verrors.add(self.name, e)
verrors.check()
return super().validate(value)
class UnixPerm(Str):
def validate(self, value):
if value is None:
return
try:
mode = int(value, 8)
except ValueError:
raise ValueError('Not a valid integer. Must be between 000 and 777')
if mode & 0o777 != mode:
raise ValueError('Please supply a value between 000 and 777')
return super().validate(value)
class LDAP_DN(Str):
def validate(self, value):
if value is None:
return
verrors = ValidationErrors()
if not dn.is_dn(value):
verrors.add(self.name, "Invalid LDAP DN specified.")
verrors.check()
return super().validate(value)
| 12,548 | Python | .py | 325 | 28.073846 | 106 | 0.573636 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,086 | exceptions.py | truenas_middleware/src/middlewared/middlewared/schema/exceptions.py | import errno
class Error(Exception):
def __init__(self, attribute, errmsg: str, errno=errno.EINVAL):
self.attribute = attribute
self.errmsg = errmsg
self.errno = errno
self.extra = None
def __str__(self):
return '[{0}] {1}'.format(self.attribute, self.errmsg)
class ResolverError(Exception):
pass
| 355 | Python | .py | 11 | 26 | 67 | 0.64497 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,087 | integer_schema.py | truenas_middleware/src/middlewared/middlewared/schema/integer_schema.py | import contextlib
from datetime import datetime
from .attribute import Attribute
from .enum import EnumMixin
from .exceptions import Error
class Int(EnumMixin, Attribute):
def clean(self, value):
value = super(Int, self).clean(value)
if value is None or (not isinstance(value, bool) and isinstance(value, int)):
return value
elif isinstance(value, str):
with contextlib.suppress(ValueError):
return int(value)
raise Error(self.name, 'Not an integer')
def to_json_schema(self, parent=None):
return {
'type': ['integer', 'null'] if self.null else 'integer',
**self._to_json_schema_common(parent),
}
class Timestamp(Int):
def validate(self, value):
super().validate(value)
if value is None:
return value
try:
datetime.fromtimestamp(value)
except ValueError:
raise Error(self.name, 'Not a valid timestamp')
class Float(EnumMixin, Attribute):
def clean(self, value):
value = super(Float, self).clean(value)
if value is None and not self.required:
return self.default
try:
# float(False) = 0.0
# float(True) = 1.0
if isinstance(value, bool):
raise TypeError()
return float(value)
except (TypeError, ValueError):
raise Error(self.name, 'Not a floating point number')
def to_json_schema(self, parent=None):
return {
'type': ['float', 'null'] if self.null else 'float',
**self._to_json_schema_common(parent),
}
| 1,677 | Python | .py | 46 | 27.304348 | 85 | 0.602723 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,088 | plugin_schema.py | truenas_middleware/src/middlewared/middlewared/schema/plugin_schema.py | class Schemas(dict):
def add(self, schema):
if schema.name in self:
raise ValueError(f'Schema "{schema.name}" is already registered')
super().__setitem__(schema.name, schema)
| 208 | Python | .py | 5 | 34 | 77 | 0.638614 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,089 | username.py | truenas_middleware/src/middlewared/middlewared/schema/username.py | import string
from .attribute import Attribute
from .exceptions import Error
class LocalUsername(Attribute):
def to_json_schema(self, parent=None):
return {**self._to_json_schema_common(parent), 'type': 'string'}
def validate(self, value):
# see man 8 useradd, specifically the CAVEATS section
# NOTE: we are ignoring the man page's recommendation for insistence
# upon the starting character of a username be a lower-case letter.
# We aren't enforcing this for maximum backwards compatibility
val = str(value)
val_len = len(val)
valid_chars = string.ascii_letters + string.digits + '_' + '-' + '$' + '.'
valid_start = string.ascii_letters + '_'
if val_len <= 0:
raise Error(self.name, 'Username must be at least 1 character in length')
elif val_len > 32:
raise Error(self.name, 'Username cannot exceed 32 characters in length')
elif val[0] not in valid_start:
raise Error(self.name, 'Username must start with a letter or an underscore')
elif '$' in val and val[-1] != '$':
raise Error(self.name, 'Username must end with a dollar sign character')
elif any((char not in valid_chars for char in val)):
raise Error(self.name, f'Valid characters for a username are: {", ".join(valid_chars)!r}')
return super().validate(val)
| 1,414 | Python | .py | 26 | 45.923077 | 102 | 0.647612 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,090 | processor.py | truenas_middleware/src/middlewared/middlewared/schema/processor.py | import asyncio
import copy
import inspect
import typing
import warnings
from middlewared.schema import Attribute
from middlewared.service_exception import CallError, ValidationErrors
from .exceptions import Error
from .utils import NOT_PROVIDED
def clean_and_validate_arg(verrors: ValidationErrors, attr: Attribute, arg):
try:
value = attr.clean(arg)
attr.validate(value)
return value
except Error as e:
verrors.add(e.attribute, e.errmsg, e.errno)
except ValidationErrors as e:
verrors.extend(e)
def validate_return_type(func, result, schemas: typing.Iterable[Attribute]):
if not schemas and result is None:
return
elif not schemas:
raise ValueError(f'Return schema missing for {func.__name__!r}')
result = copy.deepcopy(result)
if not isinstance(result, tuple):
result = [result]
verrors = ValidationErrors()
for res_entry, schema in zip(result, schemas):
clean_and_validate_arg(verrors, schema, res_entry)
verrors.check()
def returns(*schema):
if len(schema) > 1:
raise ValueError("Multiple schemas for @returns are not allowed")
def returns_internal(f):
if asyncio.iscoroutinefunction(f):
async def nf(*args, **kwargs):
res = await f(*args, **kwargs)
return res
else:
def nf(*args, **kwargs):
res = f(*args, **kwargs)
return res
from middlewared.utils.type import copy_function_metadata
copy_function_metadata(f, nf)
nf.wraps = f
for s in list(schema):
s.name = s.name or f.__name__
if hasattr(s, 'title'):
s.title = s.title or s.name
nf.returns = list(schema)
return nf
return returns_internal
def accepts(*schema, audit=None, audit_callback=False, audit_extended=None, deprecated=None, roles=None):
"""
`audit` is the message that will be logged to the audit log when the decorated function is called
`audit_extended` is the function that takes the same arguments as the decorated function and returns the string
that will be appended to the audit message to be logged.
`deprecated` is a list of pairs of functions that will adapt legacy method call signatures.
`roles` is a list of user roles that will gain access to this method.
First member of pair is a function that accepts a list of args and returns `True` if a legacy method call
matching a specific legacy signature was detected.
Second member of pair is a function that accepts detected legacy arguments and returns a list of arguments
for newer signature.
All pairs are executed sequentially so first pair can adapt from API 2.0 to API 2.1, second from API 2.1
to API 2.2 and so on.
Example:
@accepts(
Dict("options"),
deprecated=[
(
lambda args: len(args) == 2,
lambda option1, option2: [{
"option1": option1,
"option2": option2,
}]
)
],
)
Here an old-style method call `method("a", "b")` will be adapted to a new-style `method({"option1": "a",
"option2": "b"})`
"""
deprecated = deprecated or []
further_only_hidden = False
for i in schema:
if getattr(i, 'hidden', False):
further_only_hidden = True
elif further_only_hidden:
raise ValueError('You can\'t have non-hidden arguments after hidden')
def wrap(func):
f = func.wraps if hasattr(func, 'wraps') else func
if inspect.getfullargspec(f).defaults:
raise ValueError('All public method default arguments should be specified in @accepts()')
# Make sure number of schemas is same as method argument
args_index = calculate_args_index(f, audit_callback)
assert len(schema) == f.__code__.co_argcount - args_index # -1 for self
def clean_and_validate_args(args, kwargs):
args = list(args)
common_args = args[:args_index]
signature_args = args[args_index:]
had_warning = False
for check, adapt in deprecated:
if check(signature_args):
if not had_warning:
warnings.warn(f'Method {f!r} was called with a deprecated signature', DeprecationWarning)
had_warning = True
signature_args = adapt(*signature_args)
args = common_args + copy.deepcopy(signature_args)
kwargs = copy.deepcopy(kwargs)
verrors = ValidationErrors()
# Iterate over positional args first, excluding self
i = 0
if len(args[args_index:]) > len(nf.accepts):
raise CallError(f'Too many arguments (expected {len(nf.accepts)}, found {len(args[args_index:])})')
for _ in args[args_index:]:
args[args_index + i] = clean_and_validate_arg(verrors, nf.accepts[i], args[args_index + i])
i += 1
# Use i counter to map keyword argument to rpc positional
for x in list(range(i + args_index, f.__code__.co_argcount)):
kwarg = f.__code__.co_varnames[x]
if kwarg in kwargs:
attr = nf.accepts[i]
i += 1
value = kwargs[kwarg]
elif len(nf.accepts) >= i + 1:
attr = nf.accepts[i]
i += 1
value = NOT_PROVIDED
else:
i += 1
continue
kwargs[kwarg] = clean_and_validate_arg(verrors, attr, value)
verrors.check()
return args, kwargs
if asyncio.iscoroutinefunction(func):
async def nf(*args, **kwargs):
args, kwargs = clean_and_validate_args(args, kwargs)
return await func(*args, **kwargs)
else:
def nf(*args, **kwargs):
args, kwargs = clean_and_validate_args(args, kwargs)
return func(*args, **kwargs)
from middlewared.utils.type import copy_function_metadata
copy_function_metadata(f, nf)
nf.accepts = list(schema)
if hasattr(func, 'returns'):
nf.returns = func.returns
nf.audit = audit
nf.audit_callback = audit_callback
nf.audit_extended = audit_extended
nf.roles = roles or []
nf.wraps = f
nf.wrap = wrap
return nf
return wrap
def calculate_args_index(f, audit_callback):
args_index = 0
if f.__code__.co_argcount >= 1 and f.__code__.co_varnames[0] == 'self':
args_index += 1
if hasattr(f, '_pass_app'):
args_index += 1
if audit_callback:
args_index += 1
if hasattr(f, '_job'):
args_index += 1
if hasattr(f, '_skip_arg'):
args_index += f._skip_arg
return args_index
| 7,215 | Python | .py | 167 | 32.053892 | 115 | 0.584927 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,091 | environ.py | truenas_middleware/src/middlewared/middlewared/common/environ.py | import os
import time
import urllib.request
def environ_update(update):
for k, v in update.items():
if v is None:
os.environ.pop(k, None)
else:
os.environ[k] = v
if 'http_proxy' in update or 'https_proxy' in update:
# Reset global opener so ProxyHandler can be recalculated
urllib.request.install_opener(None)
if 'TZ' in update:
time.tzset()
| 423 | Python | .py | 14 | 23.428571 | 65 | 0.633663 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,092 | certificate.py | truenas_middleware/src/middlewared/middlewared/common/attachment/certificate.py | from middlewared.service import ServiceChangeMixin
class CertificateAttachmentDelegate:
HUMAN_NAME = NotImplementedError
NAMESPACE = NotImplementedError
def __init__(self, middleware):
self.middleware = middleware
async def state(self, cert_id):
raise NotImplementedError
async def redeploy(self, cert_id):
raise NotImplementedError
async def consuming_cert_human_output(self, cert_id):
return self.HUMAN_NAME if await self.state(cert_id) else None
class CertificateServiceAttachmentDelegate(CertificateAttachmentDelegate, ServiceChangeMixin):
CERT_FIELD = 'certificate'
SERVICE = NotImplementedError
SERVICE_VERB = 'reload'
async def get_namespace(self):
return self.SERVICE if self.NAMESPACE is NotImplementedError else self.NAMESPACE
async def state(self, cert_id):
config = await self.middleware.call(f'{await self.get_namespace()}.config')
if isinstance(config[self.CERT_FIELD], dict):
return config[self.CERT_FIELD]['id'] == cert_id
else:
return config[self.CERT_FIELD] == cert_id
async def redeploy(self, cert_id):
if await self.middleware.call('service.started', self.SERVICE):
await self.middleware.call(f'service.{self.SERVICE_VERB}', self.SERVICE)
class CertificateCRUDServiceAttachmentDelegate(CertificateAttachmentDelegate, ServiceChangeMixin):
CERT_FILTER_KEY = 'certificate'
async def get_filters(self, cert_id):
return [[self.CERT_FILTER_KEY, '=', cert_id]]
async def attachments(self, cert_id):
return await self.middleware.call(f'{self.NAMESPACE}.query', await self.get_filters(cert_id))
async def state(self, cert_id):
return bool(await self.attachments(cert_id))
| 1,798 | Python | .py | 35 | 44.228571 | 101 | 0.724771 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,093 | __init__.py | truenas_middleware/src/middlewared/middlewared/common/attachment/__init__.py | from middlewared.service import ServiceChangeMixin
class FSAttachmentDelegate(ServiceChangeMixin):
"""
Represents something (share, automatic task, etc.) that needs to be enabled or disabled when dataset
becomes available or unavailable (due to import/export, encryption/decryption, etc.)
"""
# Unique identifier among all FSAttachmentDelegate classes
name = NotImplementedError
# Human-readable name of item handled by this delegate (e.g. "NFS Share")
title = NotImplementedError
# If is not None, corresponding service will be restarted after performing tasks on item
service = None
# attribute which is used to identify human readable description of an attachment
resource_name = 'name'
def __init__(self, middleware):
self.middleware = middleware
self.logger = middleware.logger
async def query(self, path, enabled, options=None):
"""
Lists enabled/disabled items that depend on a dataset
:param path: mountpoint of the dataset (e.g. "/mnt/tank/work")
:param enabled: whether to list enabled or disabled items
:param options: an optional attribute which can control the filters/logic applied to retrieve attachments
:return: list of items of arbitrary type (will be passed to other methods of this class)
"""
raise NotImplementedError
async def get_attachment_name(self, attachment):
"""
Returns human-readable description of item (e.g. it's path). Will be combined with `cls.title`.
I.e. if you return here `/mnt/tank/work`, user will see: `NFS Share "/mnt/tank/work"`
:param attachment: one of the items returned by `query`
:return: string described above
"""
return attachment[self.resource_name]
async def delete(self, attachments):
"""
Permanently delete said items
:param attachments: list of the items returned by `query`
:return: None
"""
raise NotImplementedError
async def toggle(self, attachments, enabled):
"""
Enable or disable said items
:param attachments: list of the items returned by `query`
:param enabled:
:return:
"""
raise NotImplementedError
async def start(self, attachments):
pass
async def stop(self, attachments):
pass
class LockableFSAttachmentDelegate(FSAttachmentDelegate):
"""
Represents a share/task/resource which is affected if the dataset underlying is locked
"""
# service object
service_class = NotImplementedError
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.enabled_field = self.service_class.enabled_field
self.locked_field = self.service_class.locked_field
self.path_field = self.service_class.path_field
self.datastore_model = self.service_class._config.datastore
self.datastore_prefix = self.service_class._config.datastore_prefix
self.namespace = self.service_class._config.namespace
if not self.service:
self.service = self.service_class._config.service
async def get_query_filters(self, enabled, options=None):
options = options or {}
filters = [[self.enabled_field, '=', enabled]]
if 'locked' in options:
filters += [[self.locked_field, '=', options['locked']]]
return filters
async def start_service(self):
if not (
service_obj := await self.middleware.call('service.query', [['service', '=', self.service]])
) or not service_obj[0]['enable'] or service_obj[0]['state'] == 'RUNNING':
return
await self.middleware.call('service.start', self.service)
async def query(self, path, enabled, options=None):
results = []
options = options or {}
check_parent = options.get('check_parent', False)
exact_match = options.get('exact_match', False)
for resource in await self.middleware.call(
f'{self.namespace}.query', await self.get_query_filters(enabled, options)
):
if await self.is_child_of_path(resource, path, check_parent, exact_match):
results.append(resource)
return results
async def toggle(self, attachments, enabled):
for attachment in attachments:
await self.middleware.call(
'datastore.update', self.datastore_model, attachment['id'], {
f'{self.datastore_prefix}{self.enabled_field}': enabled
}
)
await self.remove_alert(attachment)
if enabled:
await self.start(attachments)
else:
await self.stop(attachments)
async def delete(self, attachments):
for attachment in attachments:
await self.middleware.call('datastore.delete', self.datastore_model, attachment['id'])
await self.remove_alert(attachment)
if attachments:
await self.restart_reload_services(attachments)
async def restart_reload_services(self, attachments):
"""
Common method for post delete/toggle which child classes can use to restart/reload services
"""
raise NotImplementedError
async def remove_alert(self, attachment):
await self.middleware.call(f'{self.namespace}.remove_locked_alert', attachment['id'])
async def is_child_of_path(self, resource, path, check_parent, exact_match):
# What this is essentially doing is testing if resource in question is a child of queried path
# and not vice versa. While this is desirable in most cases, there are cases we also want to see
# if path is a child of the resource in question. In that case we want the following:
# 1) When parent of configured path is specified we return true
# 2) When configured path itself is specified we return true
# 3) When path is child of configured path, we return true as the path
# is being consumed by service in question
#
# In most cases we want to cater to above child cases with resource path and the path specified
# but there can also be cases when we just want to be sure if the resource path and the path to check
# are equal and for that case `exact_match` is used where we do not try to see if one is the child of
# another or vice versa. We just check if they are equal.
#
# `check_parent` flag when set can be used to check for the case when share path is the parent
# of the path to check.
share_path = await self.service_class.get_path_field(self.service_class, resource)
if exact_match or share_path == path:
return share_path == path
is_child = await self.middleware.call('filesystem.is_child', share_path, path)
if not is_child and check_parent:
return await self.middleware.call('filesystem.is_child', path, share_path)
else:
return is_child
async def start(self, attachments):
await self.start_service()
for attachment in attachments:
await self.remove_alert(attachment)
if attachments:
await self.restart_reload_services(attachments)
async def stop(self, attachments):
if attachments:
await self.restart_reload_services(attachments)
| 7,466 | Python | .py | 150 | 40.906667 | 113 | 0.665752 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,094 | __init__.py | truenas_middleware/src/middlewared/middlewared/common/listen/__init__.py | class ListenDelegate:
"""
Represents something (e.g. service) that needs to be handle a deletion of a static IP address from the system.
"""
async def get_listen_state(self, ips):
"""
Returns a state object that will be passed to subsequent functions.
"""
raise NotImplementedError
async def set_listen_state(self, state):
"""
Set to listen on the addresses from the state.
"""
raise NotImplementedError
async def listens_on(self, state, ip):
"""
Checks if we are listening on an IP address.
"""
raise NotImplementedError
async def reset_listens(self, state):
"""
Listen on all IP addresses.
"""
raise NotImplementedError
async def repr(self, state):
"""
Returns machine-readable state description.
"""
raise NotImplementedError
class ConfigServiceListenDelegate(ListenDelegate):
"""
ConfigService listening on IP address.
"""
def __init__(self, middleware, plugin, field):
self.middleware = middleware
self.plugin = plugin
self.field = field
async def get_listen_state(self, ips):
config = await self.middleware.call(f"{self.plugin}.config")
return config[self.field]
async def set_listen_state(self, state):
await self.middleware.call(f"{self.plugin}.update", {self.field: state})
async def repr(self, state):
return {"type": "SERVICE", "service": self.plugin}
def __repr__(self):
return f"<{self.__class__.__name__} {self.plugin}>"
class ConfigServiceListenSingleDelegate(ConfigServiceListenDelegate):
"""
ConfigService listening on a single IP address.
"""
def __init__(self, *args, empty_value="0.0.0.0"):
super().__init__(*args)
self.empty_value = empty_value
async def listens_on(self, state, ip):
return state == ip
async def reset_listens(self, state):
await self.set_listen_state(self.empty_value)
class ConfigServiceListenMultipleDelegate(ConfigServiceListenDelegate):
"""
ConfigService listening on multiple IP addresses.
"""
async def listens_on(self, state, ip):
return ip in state
async def reset_listens(self, state):
await self.set_listen_state([])
class SystemServiceListenDelegateMixin:
@property
def service(self):
return self.middleware.get_service(self.plugin)._config.service
async def repr(self, state):
return {"type": "SYSTEM_SERVICE", "system-service": self.service}
class SystemServiceListenSingleDelegate(SystemServiceListenDelegateMixin, ConfigServiceListenSingleDelegate):
pass
class SystemServiceListenMultipleDelegate(SystemServiceListenDelegateMixin, ConfigServiceListenMultipleDelegate):
pass
| 2,872 | Python | .py | 75 | 31.373333 | 114 | 0.67714 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,095 | manager.py | truenas_middleware/src/middlewared/middlewared/common/event_source/manager.py | import asyncio
from collections import defaultdict, namedtuple
import functools
from uuid import uuid4
from middlewared.event import EventSource
from middlewared.schema import ValidationErrors
IdentData = namedtuple("IdentData", ["subscriber", "name", "arg"])
class Subscriber:
def send_event(self, event_type, **kwargs):
raise NotImplementedError
def terminate(self, error):
raise NotImplementedError
class AppSubscriber(Subscriber):
def __init__(self, app, collection):
self.app = app
self.collection = collection
def send_event(self, event_type, **kwargs):
self.app.send_event(self.collection, event_type, **kwargs)
def terminate(self, error):
self.app.notify_unsubscribed(self.collection, error)
class InternalSubscriber(Subscriber):
def __init__(self):
self.iterator = InternalSubscriberIterator()
def send_event(self, event_type, **kwargs):
self.iterator.queue.put_nowait((False, (event_type, kwargs)))
def terminate(self, error):
if error:
self.iterator.queue.put_nowait((True, error))
else:
self.iterator.queue.put_nowait(None)
class InternalSubscriberIterator:
def __init__(self):
self.queue = asyncio.Queue()
def __aiter__(self):
return self
async def __anext__(self):
item = await self.queue.get()
if item is None:
raise StopAsyncIteration
is_error, value = item
if is_error:
raise value
else:
return value
class EventSourceManager:
def __init__(self, middleware):
self.middleware = middleware
self.event_sources = {}
self.instances = defaultdict(dict)
self.idents = {}
self.subscriptions = defaultdict(lambda: defaultdict(set))
def short_name_arg(self, name):
if ':' in name:
shortname, arg = name.split(':', 1)
else:
shortname = name
arg = None
return shortname, arg
def get_full_name(self, name, arg):
if arg is None:
return name
else:
return f'{name}:{arg}'
def register(self, name, event_source, roles):
if not issubclass(event_source, EventSource):
raise RuntimeError(f"{event_source} is not EventSource subclass")
self.event_sources[name] = event_source
self.middleware.role_manager.register_event(name, roles)
async def subscribe(self, subscriber, ident, name, arg):
if ident in self.idents:
raise ValueError(f"Ident {ident} is already used")
self.idents[ident] = IdentData(subscriber, name, arg)
self.subscriptions[name][arg].add(ident)
if arg not in self.instances[name]:
self.middleware.logger.trace("Creating new instance of event source %r:%r", name, arg)
self.instances[name][arg] = self.event_sources[name](
self.middleware, name, arg,
functools.partial(self._send_event, name, arg),
functools.partial(self._unsubscribe_all, name, arg),
)
# Validate that specified `arg` is acceptable wrt event source in question
try:
await self.instances[name][arg].validate_arg()
except ValidationErrors as e:
await self.unsubscribe(ident, e)
else:
self.middleware.create_task(self.instances[name][arg].process())
else:
self.middleware.logger.trace("Re-using existing instance of event source %r:%r", name, arg)
async def unsubscribe(self, ident, error=None):
ident_data = self.idents.pop(ident)
self.terminate(ident_data, error)
idents = self.subscriptions[ident_data.name][ident_data.arg]
idents.remove(ident)
if not idents:
self.middleware.logger.trace("Canceling instance of event source %r:%r as the last subscriber "
"unsubscribed", ident_data.name, ident_data.arg)
instance = self.instances[ident_data.name].pop(ident_data.arg)
await instance.cancel()
def terminate(self, ident, error=None):
ident.subscriber.terminate(error)
async def subscribe_app(self, app, ident, name, arg):
await self.subscribe(AppSubscriber(app, self.get_full_name(name, arg)), ident, name, arg)
async def unsubscribe_app(self, app):
for ident, ident_data in list(self.idents.items()):
if isinstance(ident_data.subscriber, AppSubscriber) and ident_data.subscriber.app == app:
await self.unsubscribe(ident)
async def iterate(self, name, arg):
ident = str(uuid4())
subscriber = InternalSubscriber()
await self.subscribe(subscriber, ident, name, arg)
return subscriber.iterator
def _send_event(self, name, arg, event_type, **kwargs):
for ident in list(self.subscriptions[name][arg]):
try:
ident_data = self.idents[ident]
except KeyError:
self.middleware.logger.trace("Ident %r is gone", ident)
continue
ident_data.subscriber.send_event(event_type, **kwargs)
async def _unsubscribe_all(self, name, arg, error=None):
for ident in self.subscriptions[name][arg]:
self.terminate(self.idents.pop(ident), error)
self.instances[name].pop(arg, None)
self.subscriptions[name][arg].clear()
| 5,546 | Python | .py | 125 | 34.752 | 107 | 0.636127 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,096 | __init__.py | truenas_middleware/src/middlewared/middlewared/common/ports/__init__.py | from collections.abc import Iterable
from middlewared.plugins.ports.utils import WILDCARD_IPS
class PortDelegate:
name = NotImplemented
namespace = NotImplemented
title = NotImplemented
def __init__(self, middleware):
self.middleware = middleware
self.logger = middleware.logger
for k in ('name', 'namespace', 'title'):
if getattr(self, k) is NotImplemented:
raise ValueError(f'{k!r} must be specified for port delegate')
async def get_ports(self):
raise NotImplementedError()
class ServicePortDelegate(PortDelegate):
bind_address_field = NotImplemented
port_fields = NotImplemented
async def basic_checks(self):
if self.port_fields is NotImplemented:
raise ValueError('Port fields must be set for Service port delegate')
elif not isinstance(self.port_fields, Iterable):
raise ValueError('Port fields must be an iterable')
def bind_address(self, config):
default = '0.0.0.0'
return default if self.bind_address_field is NotImplemented else (
config.get(self.bind_address_field) or default
)
def get_bind_ip_port_tuple(self, config, port_field):
return self.bind_address(config), config[port_field]
async def config(self):
return await self.middleware.call(f'{self.namespace}.config')
async def get_ports_bound_on_wildcards(self):
return []
async def get_ports_internal(self):
if override_ports := await self.get_ports_bound_on_wildcards():
return [(wildcard, port) for wildcard in WILDCARD_IPS for port in override_ports]
await self.basic_checks()
config = await self.config()
return [self.get_bind_ip_port_tuple(config, k) for k in filter(lambda k: config.get(k), self.port_fields)]
async def get_ports(self):
ports = await self.get_ports_internal()
return [{'description': None, 'ports': ports}] if ports else []
| 2,007 | Python | .py | 42 | 39.833333 | 114 | 0.680349 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,097 | smartctl.py | truenas_middleware/src/middlewared/middlewared/common/smart/smartctl.py | from collections import namedtuple
import logging
import os
import shlex
from middlewared.utils import run
logger = logging.getLogger(__name__)
SMARTCTL_POWERMODES = ['NEVER', 'SLEEP', 'STANDBY', 'IDLE']
SMARTCTX = namedtuple('smartctl_args', ['devices', 'enterprise_hardware', 'middleware'])
async def get_smartctl_args(context, disk, smartoptions):
devices = context.devices
enterprise_hardware = context.enterprise_hardware
try:
smartoptions = shlex.split(smartoptions)
except Exception as e:
logger.warning("Error parsing S.M.A.R.T. options %r for disk %r: %r", smartoptions, disk, e)
smartoptions = []
if disk.startswith("nvme"):
return [f"/dev/{disk}", "-d", "nvme"] + smartoptions
device = devices.get(disk)
if device is None:
return
if device["vendor"] and device["vendor"].lower().strip() == "nvme":
return [f"/dev/{disk}", "-d", "nvme"] + smartoptions
args = [f"/dev/{disk}"] + smartoptions
sat = False
if enterprise_hardware:
if await context.middleware.run_in_thread(os.path.exists, f"/sys/block/{disk}/device/vpd_pg89"):
sat = True
else:
if device['bus'] == 'USB':
sat = True
if sat:
args = args + ["-d", "sat"]
return args
async def smartctl(args, **kwargs):
return await run(["smartctl"] + args, **kwargs)
| 1,391 | Python | .py | 36 | 32.916667 | 104 | 0.64877 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,098 | krb5.conf.py | truenas_middleware/src/middlewared/middlewared/etc_files/krb5.conf.py | import logging
from middlewared.plugins.etc import FileShouldNotExist
from middlewared.utils import filter_list
from middlewared.utils.directoryservices.constants import DSType
from middlewared.utils.directoryservices.krb5_conf import KRB5Conf
from middlewared.utils.directoryservices.krb5_constants import KRB_LibDefaults, PERSISTENT_KEYRING_PREFIX
logger = logging.getLogger(__name__)
def generate_krb5_conf(
middleware: object,
directory_service: dict,
krb_config: dict,
realms: list
):
if not realms:
raise FileShouldNotExist
krbconf = KRB5Conf()
krbconf.add_realms(realms)
appdefaults = {}
libdefaults = {
str(KRB_LibDefaults.DEFAULT_CCACHE_NAME): PERSISTENT_KEYRING_PREFIX + '%{uid}',
str(KRB_LibDefaults.DNS_LOOKUP_REALM): 'true',
str(KRB_LibDefaults.FORWARDABLE): 'true',
str(KRB_LibDefaults.DNS_LOOKUP_KDC): 'true',
}
default_realm = None
match directory_service['type']:
case DSType.AD.value:
ds_config = middleware.call_sync('activedirectory.config')
default_realm = filter_list(realms, [['id', '=', ds_config['kerberos_realm']]])
if not default_realm:
logger.error(
'%s: no realm configuration found for active directory domain',
ds_config['domainname']
)
# Try looking up again by domainname
default_realm = filter_list(realms, [['realm', '=', ds_config['domainname']]])
if not default_realm:
# Try to recover by creating a realm stub
realm_id = middleware.call_sync(
'datastore.insert', 'directoryservice.kerberosrealm',
{'krb_realm': ds_config['domainname']}
)
default_realm = middleware.call_sync('kerberos.realm.get_instance', realm_id)['realm']
else:
realm_id = default_realm[0]['id']
default_realm = default_realm[0]['realm']
# set the kerberos realm in AD form to correct value
middleware.call_sync(
'datastore.update', 'directoryservice.activedirectory',
ds_config['id'], {'ad_kerberos_realm': realm_id}
)
else:
default_realm = default_realm[0]['realm']
case DSType.IPA.value:
try:
default_realm = middleware.call_sync('ldap.ipa_config')['realm']
except Exception:
# This can happen if we're simultaneously disabling IPA service
# while generating the krb5.conf file
default_realm = None
# This matches defaults from ipa-client-install
libdefaults.update({
str(KRB_LibDefaults.RDNS): 'false',
str(KRB_LibDefaults.DNS_CANONICALIZE_HOSTNAME): 'false',
})
case DSType.LDAP.value:
ds_config = middleware.call_sync('ldap.config')
if ds_config['kerberos_realm']:
default_realm = filter_list(realms, [['id', '=', ds_config['kerberos_realm']]])
if default_realm:
default_realm = default_realm[0]['realm']
case _:
pass
if default_realm:
libdefaults[str(KRB_LibDefaults.DEFAULT_REALM)] = default_realm
krbconf.add_libdefaults(libdefaults, krb_config['libdefaults_aux'])
krbconf.add_appdefaults(appdefaults, krb_config['appdefaults_aux'])
return krbconf.generate()
def render(service, middleware, render_ctx):
return generate_krb5_conf(
middleware,
render_ctx['directoryservices.status'],
render_ctx['kerberos.config'],
render_ctx['kerberos.realm.query']
)
| 3,869 | Python | .py | 85 | 33.905882 | 106 | 0.602762 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |
24,099 | web_ui_root_login_alert.py | truenas_middleware/src/middlewared/middlewared/etc_files/web_ui_root_login_alert.py | from middlewared.utils import filter_list
async def render(service, middleware, render_ctx):
root_user = filter_list(render_ctx['user.query'], [('username', '=', 'root')], {'get': True})
if root_user['password_disabled'] and await middleware.call(
'privilege.always_has_root_password_enabled',
render_ctx['user.query'],
render_ctx['group.query'],
):
await middleware.call('alert.oneshot_create', 'WebUiRootLogin', None)
else:
await middleware.call('alert.oneshot_delete', 'WebUiRootLogin', None)
| 555 | Python | .py | 11 | 44.090909 | 97 | 0.678373 | truenas/middleware | 2,277 | 481 | 13 | LGPL-3.0 | 9/5/2024, 5:13:34 PM (Europe/Amsterdam) |