File size: 12,402 Bytes
476455e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
#     http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Amazon SageMaker channel configurations for S3 data sources and file system data sources"""
from __future__ import absolute_import, print_function

from typing import Union, Optional, List
import attr

from sagemaker.workflow.entities import PipelineVariable

FILE_SYSTEM_TYPES = ["FSxLustre", "EFS"]
FILE_SYSTEM_ACCESS_MODES = ["ro", "rw"]


class TrainingInput(object):
    """Amazon SageMaker channel configurations for S3 data sources.

    Attributes:
        config (dict[str, dict]): A SageMaker ``DataSource`` referencing
            a SageMaker ``S3DataSource``.
    """

    def __init__(
        self,
        s3_data: Union[str, PipelineVariable],
        distribution: Optional[Union[str, PipelineVariable]] = None,
        compression: Optional[Union[str, PipelineVariable]] = None,
        content_type: Optional[Union[str, PipelineVariable]] = None,
        record_wrapping: Optional[Union[str, PipelineVariable]] = None,
        s3_data_type: Union[str, PipelineVariable] = "S3Prefix",
        instance_groups: Optional[List[Union[str, PipelineVariable]]] = None,
        input_mode: Optional[Union[str, PipelineVariable]] = None,
        attribute_names: Optional[List[Union[str, PipelineVariable]]] = None,
        target_attribute_name: Optional[Union[str, PipelineVariable]] = None,
        shuffle_config: Optional["ShuffleConfig"] = None,
    ):
        r"""Create a definition for input data used by an SageMaker training job.

        See AWS documentation on the ``CreateTrainingJob`` API for more details
        on the parameters.

        Args:
            s3_data (str or PipelineVariable): Defines the location of S3 data to train on.
            distribution (str or PipelineVariable): Valid values: ``'FullyReplicated'``,
                ``'ShardedByS3Key'`` (default: ``'FullyReplicated'``).
            compression (str or PipelineVariable): Valid values: ``'Gzip'``, ``None``
                (default: None). This is used only in Pipe input mode.
            content_type (str or PipelineVariable): MIME type of the input data
                (default: None).
            record_wrapping (str or PipelineVariable): Valid values: 'RecordIO'
                (default: None).
            s3_data_type (str or PipelineVariable): Valid values: ``'S3Prefix'``,
                ``'ManifestFile'``, ``'AugmentedManifestFile'``.
                If ``'S3Prefix'``, ``s3_data`` defines a prefix of s3 objects to train on.
                All objects with s3 keys beginning with ``s3_data`` will be used to train.
                If ``'ManifestFile'`` or ``'AugmentedManifestFile'``,
                then ``s3_data`` defines a
                single S3 manifest file or augmented manifest file respectively,
                listing the S3 data to train on. Both the ManifestFile and
                AugmentedManifestFile formats are described at `S3DataSource
                <https://docs.aws.amazon.com/sagemaker/latest/dg/API_S3DataSource.html>`_
                in the `Amazon SageMaker API reference`.
            instance_groups (list[str] or list[PipelineVariable]): Optional. A list of
                instance group names in string format that you specified while configuring
                a heterogeneous cluster using the :class:`sagemaker.instance_group.InstanceGroup`.
                S3 data will be sent to all instance groups in the specified list.
                For instructions on how to use InstanceGroup objects
                to configure a heterogeneous cluster
                through the SageMaker generic and framework estimator classes, see
                `Train Using a Heterogeneous Cluster
                <https://docs.aws.amazon.com/sagemaker/latest/dg/train-heterogeneous-cluster.html>`_
                in the *Amazon SageMaker developer guide*.
                (default: None)
            input_mode (str or PipelineVariable): Optional override for this channel's input mode
                (default: None). By default, channels will use the input mode defined on
                ``sagemaker.estimator.EstimatorBase.input_mode``, but they will ignore
                that setting if this parameter is set.

                    * None - Amazon SageMaker will use the input mode specified in the ``Estimator``
                    * 'File' - Amazon SageMaker copies the training dataset from the S3 location to
                        a local directory.
                    * 'Pipe' - Amazon SageMaker streams data directly from S3 to the container via
                        a Unix-named pipe.
                    * 'FastFile' - Amazon SageMaker streams data from S3 on demand instead of
                        downloading the entire dataset before training begins.

            attribute_names (list[str] or list[PipelineVariable]): A list of one or more attribute
                names to use that are found in a specified AugmentedManifestFile.
            target_attribute_name (str or PipelineVariable): The name of the attribute will be
                predicted (classified) in a SageMaker AutoML job. It is required if the input is
                for SageMaker AutoML job.
            shuffle_config (sagemaker.inputs.ShuffleConfig): If specified this configuration enables
                shuffling on this channel. See the SageMaker API documentation for more info:
                https://docs.aws.amazon.com/sagemaker/latest/dg/API_ShuffleConfig.html
        """
        self.config = {
            "DataSource": {"S3DataSource": {"S3DataType": s3_data_type, "S3Uri": s3_data}}
        }

        if not (target_attribute_name or distribution):
            distribution = "FullyReplicated"

        if distribution is not None:
            self.config["DataSource"]["S3DataSource"]["S3DataDistributionType"] = distribution

        if compression is not None:
            self.config["CompressionType"] = compression
        if content_type is not None:
            self.config["ContentType"] = content_type
        if record_wrapping is not None:
            self.config["RecordWrapperType"] = record_wrapping
        if instance_groups is not None:
            self.config["DataSource"]["S3DataSource"]["InstanceGroupNames"] = instance_groups
        if input_mode is not None:
            self.config["InputMode"] = input_mode
        if attribute_names is not None:
            self.config["DataSource"]["S3DataSource"]["AttributeNames"] = attribute_names
        if target_attribute_name is not None:
            self.config["TargetAttributeName"] = target_attribute_name
        if shuffle_config is not None:
            self.config["ShuffleConfig"] = {"Seed": shuffle_config.seed}


class ShuffleConfig(object):
    """For configuring channel shuffling using a seed.

    For more detail, see the AWS documentation:
    https://docs.aws.amazon.com/sagemaker/latest/dg/API_ShuffleConfig.html
    """

    def __init__(self, seed):
        """Create a ShuffleConfig.

        Args:
            seed (long): the long value used to seed the shuffled sequence.
        """
        self.seed = seed


@attr.s
class CreateModelInput(object):
    """A class containing parameters which can be used to create a SageMaker Model

    Parameters:
        instance_type (str): type or EC2 instance will be used for model deployment.
        accelerator_type (str): elastic inference accelerator type.
    """

    instance_type: str = attr.ib(default=None)
    accelerator_type: str = attr.ib(default=None)


@attr.s
class TransformInput(object):
    """Create a class containing all the parameters.

    It can be used when calling ``sagemaker.transformer.Transformer.transform()``
    """

    data: str = attr.ib()
    data_type: str = attr.ib(default="S3Prefix")
    content_type: str = attr.ib(default=None)
    compression_type: str = attr.ib(default=None)
    split_type: str = attr.ib(default=None)
    input_filter: str = attr.ib(default=None)
    output_filter: str = attr.ib(default=None)
    join_source: str = attr.ib(default=None)
    model_client_config: dict = attr.ib(default=None)
    batch_data_capture_config: dict = attr.ib(default=None)


class FileSystemInput(object):
    """Amazon SageMaker channel configurations for file system data sources.

    Attributes:
        config (dict[str, dict]): A Sagemaker File System ``DataSource``.
    """

    def __init__(
        self,
        file_system_id,
        file_system_type,
        directory_path,
        file_system_access_mode="ro",
        content_type=None,
    ):
        """Create a new file system input used by an SageMaker training job.

        Args:
            file_system_id (str): An Amazon file system ID starting with 'fs-'.
            file_system_type (str): The type of file system used for the input.
                Valid values: 'EFS', 'FSxLustre'.
            directory_path (str): Absolute or normalized path to the root directory (mount point) in
                the file system.
                Reference: https://docs.aws.amazon.com/efs/latest/ug/mounting-fs.html and
                https://docs.aws.amazon.com/fsx/latest/LustreGuide/mount-fs-auto-mount-onreboot.html
            file_system_access_mode (str): Permissions for read and write.
                Valid values: 'ro' or 'rw'. Defaults to 'ro'.
        """

        if file_system_type not in FILE_SYSTEM_TYPES:
            raise ValueError(
                "Unrecognized file system type: %s. Valid values: %s."
                % (file_system_type, ", ".join(FILE_SYSTEM_TYPES))
            )

        if file_system_access_mode not in FILE_SYSTEM_ACCESS_MODES:
            raise ValueError(
                "Unrecognized file system access mode: %s. Valid values: %s."
                % (file_system_access_mode, ", ".join(FILE_SYSTEM_ACCESS_MODES))
            )

        self.config = {
            "DataSource": {
                "FileSystemDataSource": {
                    "FileSystemId": file_system_id,
                    "FileSystemType": file_system_type,
                    "DirectoryPath": directory_path,
                    "FileSystemAccessMode": file_system_access_mode,
                }
            }
        }

        if content_type:
            self.config["ContentType"] = content_type


class BatchDataCaptureConfig(object):
    """Configuration object passed in when create a batch transform job.

    Specifies configuration related to batch transform job data capture for use with
    Amazon SageMaker Model Monitoring
    """

    def __init__(
        self,
        destination_s3_uri: str,
        kms_key_id: str = None,
        generate_inference_id: bool = None,
    ):
        """Create new BatchDataCaptureConfig

        Args:
            destination_s3_uri (str): S3 Location to store the captured data
            kms_key_id (str): The KMS key to use when writing to S3.
                KmsKeyId can be an ID of a KMS key, ARN of a KMS key, alias of a KMS key,
                or alias of a KMS key. The KmsKeyId is applied to all outputs.
                (default: None)
            generate_inference_id (bool): Flag to generate an inference id
                (default: None)
        """
        self.destination_s3_uri = destination_s3_uri
        self.kms_key_id = kms_key_id
        self.generate_inference_id = generate_inference_id

    def _to_request_dict(self):
        """Generates a request dictionary using the parameters provided to the class."""
        batch_data_capture_config = {
            "DestinationS3Uri": self.destination_s3_uri,
        }

        if self.kms_key_id is not None:
            batch_data_capture_config["KmsKeyId"] = self.kms_key_id
        if self.generate_inference_id is not None:
            batch_data_capture_config["GenerateInferenceId"] = self.generate_inference_id

        return batch_data_capture_config