File size: 4,355 Bytes
476455e | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
# #
# http://aws.amazon.com/apache2.0/
# #
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import absolute_import
import pytest
from mock import Mock, MagicMock
from sagemaker.wrangler.processing import DataWranglerProcessor
from sagemaker.processing import ProcessingInput
ROLE = "arn:aws:iam::012345678901:role/SageMakerRole"
REGION = "us-west-2"
DATA_WRANGLER_RECIPE_SOURCE = "s3://data_wrangler_flows/flow-26-18-43-16-0b48ac2e.flow"
DATA_WRANGLER_CONTAINER_URI = (
"174368400705.dkr.ecr.us-west-2.amazonaws.com/sagemaker-data-wrangler-container:1.x"
)
MOCK_S3_URI = "s3://mock_data/mock.csv"
@pytest.fixture()
def sagemaker_session():
boto_mock = Mock(name="boto_session", region_name=REGION)
session_mock = MagicMock(
name="sagemaker_session",
boto_session=boto_mock,
boto_region_name=REGION,
config=None,
local_mode=False,
)
session_mock.expand_role.return_value = ROLE
return session_mock
def test_data_wrangler_processor_with_required_parameters(sagemaker_session):
processor = DataWranglerProcessor(
role=ROLE,
data_wrangler_flow_source=DATA_WRANGLER_RECIPE_SOURCE,
instance_count=1,
instance_type="ml.m4.xlarge",
sagemaker_session=sagemaker_session,
)
processor.run()
expected_args = _get_expected_args(processor._current_job_name)
sagemaker_session.process.assert_called_with(**expected_args)
def test_data_wrangler_processor_with_mock_input(sagemaker_session):
processor = DataWranglerProcessor(
role=ROLE,
data_wrangler_flow_source=DATA_WRANGLER_RECIPE_SOURCE,
instance_count=1,
instance_type="ml.m4.xlarge",
sagemaker_session=sagemaker_session,
)
mock_input = ProcessingInput(
source=MOCK_S3_URI,
destination="/opt/ml/processing/mock_input",
input_name="mock_input",
s3_data_type="S3Prefix",
s3_input_mode="File",
s3_data_distribution_type="FullyReplicated",
)
processor.run(inputs=[mock_input])
expected_args = _get_expected_args(processor._current_job_name, add_mock_input=True)
sagemaker_session.process.assert_called_with(**expected_args)
def _get_expected_args(job_name, add_mock_input=False):
args = {
"inputs": [
{
"InputName": "flow",
"AppManaged": False,
"S3Input": {
"S3Uri": DATA_WRANGLER_RECIPE_SOURCE,
"LocalPath": "/opt/ml/processing/flow",
"S3DataType": "S3Prefix",
"S3InputMode": "File",
"S3DataDistributionType": "FullyReplicated",
"S3CompressionType": "None",
},
}
],
"output_config": {"Outputs": []},
"job_name": job_name,
"resources": {
"ClusterConfig": {
"InstanceType": "ml.m4.xlarge",
"InstanceCount": 1,
"VolumeSizeInGB": 30,
}
},
"stopping_condition": None,
"app_specification": {
"ImageUri": DATA_WRANGLER_CONTAINER_URI,
},
"environment": None,
"network_config": None,
"role_arn": ROLE,
"tags": None,
"experiment_config": None,
}
if add_mock_input:
mock_input = {
"InputName": "mock_input",
"AppManaged": False,
"S3Input": {
"S3Uri": MOCK_S3_URI,
"LocalPath": "/opt/ml/processing/mock_input",
"S3DataType": "S3Prefix",
"S3InputMode": "File",
"S3DataDistributionType": "FullyReplicated",
"S3CompressionType": "None",
},
}
args["inputs"].insert(0, mock_input)
return args
|