Skip to content

Commit

Permalink
[AIRFLOW-2524] Add Amazon SageMaker Training (apache#3658)
Browse files Browse the repository at this point in the history
Add SageMaker Hook, Training Operator & Sensor
Co-authored-by: srrajeev-aws <[email protected]>
  • Loading branch information
troychen728 authored and galak75 committed Nov 23, 2018
1 parent 94a09a5 commit 058be62
Show file tree
Hide file tree
Showing 8 changed files with 1,325 additions and 0 deletions.
241 changes: 241 additions & 0 deletions airflow/contrib/hooks/sagemaker_hook.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,241 @@
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import copy
import time
from botocore.exceptions import ClientError

from airflow.exceptions import AirflowException
from airflow.contrib.hooks.aws_hook import AwsHook
from airflow.hooks.S3_hook import S3Hook


class SageMakerHook(AwsHook):
"""
Interact with Amazon SageMaker.
sagemaker_conn_id is required for using
the config stored in db for training/tuning
"""

def __init__(self,
sagemaker_conn_id=None,
use_db_config=False,
region_name=None,
check_interval=5,
max_ingestion_time=None,
*args, **kwargs):
super(SageMakerHook, self).__init__(*args, **kwargs)
self.sagemaker_conn_id = sagemaker_conn_id
self.use_db_config = use_db_config
self.region_name = region_name
self.check_interval = check_interval
self.max_ingestion_time = max_ingestion_time
self.conn = self.get_conn()

def check_for_url(self, s3url):
"""
check if the s3url exists
:param s3url: S3 url
:type s3url:str
:return: bool
"""
bucket, key = S3Hook.parse_s3_url(s3url)
s3hook = S3Hook(aws_conn_id=self.aws_conn_id)
if not s3hook.check_for_bucket(bucket_name=bucket):
raise AirflowException(
"The input S3 Bucket {} does not exist ".format(bucket))
if not s3hook.check_for_key(key=key, bucket_name=bucket):
raise AirflowException("The input S3 Key {} does not exist in the Bucket"
.format(s3url, bucket))
return True

def check_valid_training_input(self, training_config):
"""
Run checks before a training starts
:param training_config: training_config
:type training_config: dict
:return: None
"""
for channel in training_config['InputDataConfig']:
self.check_for_url(channel['DataSource']
['S3DataSource']['S3Uri'])

def check_valid_tuning_input(self, tuning_config):
"""
Run checks before a tuning job starts
:param tuning_config: tuning_config
:type tuning_config: dict
:return: None
"""
for channel in tuning_config['TrainingJobDefinition']['InputDataConfig']:
self.check_for_url(channel['DataSource']
['S3DataSource']['S3Uri'])

def check_status(self, non_terminal_states,
failed_state, key,
describe_function, *args):
"""
:param non_terminal_states: the set of non_terminal states
:type non_terminal_states: dict
:param failed_state: the set of failed states
:type failed_state: dict
:param key: the key of the response dict
that points to the state
:type key: string
:param describe_function: the function used to retrieve the status
:type describe_function: python callable
:param args: the arguments for the function
:return: None
"""
sec = 0
running = True

while running:

sec = sec + self.check_interval

if self.max_ingestion_time and sec > self.max_ingestion_time:
# ensure that the job gets killed if the max ingestion time is exceeded
raise AirflowException("SageMaker job took more than "
"%s seconds", self.max_ingestion_time)

time.sleep(self.check_interval)
try:
response = describe_function(*args)
status = response[key]
self.log.info("Job still running for %s seconds... "
"current status is %s" % (sec, status))
except KeyError:
raise AirflowException("Could not get status of the SageMaker job")
except ClientError:
raise AirflowException("AWS request failed, check log for more info")

if status in non_terminal_states:
running = True
elif status in failed_state:
raise AirflowException("SageMaker job failed because %s"
% response['FailureReason'])
else:
running = False

self.log.info('SageMaker Job Compeleted')

def get_conn(self):
"""
Establish an AWS connection
:return: a boto3 SageMaker client
"""
return self.get_client_type('sagemaker', region_name=self.region_name)

def list_training_job(self, name_contains=None, status_equals=None):
"""
List the training jobs associated with the given input
:param name_contains: A string in the training job name
:type name_contains: str
:param status_equals: 'InProgress'|'Completed'
|'Failed'|'Stopping'|'Stopped'
:return:dict
"""
return self.conn.list_training_jobs(
NameContains=name_contains, StatusEquals=status_equals)

def list_tuning_job(self, name_contains=None, status_equals=None):
"""
List the tuning jobs associated with the given input
:param name_contains: A string in the training job name
:type name_contains: str
:param status_equals: 'InProgress'|'Completed'
|'Failed'|'Stopping'|'Stopped'
:return:dict
"""
return self.conn.list_hyper_parameter_tuning_job(
NameContains=name_contains, StatusEquals=status_equals)

def create_training_job(self, training_job_config, wait_for_completion=True):
"""
Create a training job
:param training_job_config: the config for training
:type training_job_config: dict
:param wait_for_completion: if the program should keep running until job finishes
:param wait_for_completion: bool
:return: A dict that contains ARN of the training job.
"""
if self.use_db_config:
if not self.sagemaker_conn_id:
raise AirflowException("SageMaker connection id must be present to read \
SageMaker training jobs configuration.")
sagemaker_conn = self.get_connection(self.sagemaker_conn_id)

config = copy.deepcopy(sagemaker_conn.extra_dejson)
training_job_config.update(config)

self.check_valid_training_input(training_job_config)

response = self.conn.create_training_job(
**training_job_config)
if wait_for_completion:
self.check_status(['InProgress', 'Stopping', 'Stopped'],
['Failed'],
'TrainingJobStatus',
self.describe_training_job,
training_job_config['TrainingJobName'])
return response

def create_tuning_job(self, tuning_job_config):
"""
Create a tuning job
:param tuning_job_config: the config for tuning
:type tuning_job_config: dict
:return: A dict that contains ARN of the tuning job.
"""
if self.use_db_config:
if not self.sagemaker_conn_id:
raise AirflowException(
"sagemaker connection id must be present to \
read sagemaker tunning job configuration.")

sagemaker_conn = self.get_connection(self.sagemaker_conn_id)

config = sagemaker_conn.extra_dejson.copy()
tuning_job_config.update(config)

self.check_valid_tuning_input(tuning_job_config)

return self.conn.create_hyper_parameter_tuning_job(
**tuning_job_config)

def describe_training_job(self, training_job_name):
"""
:param training_job_name: the name of the training job
:type train_job_name: string
Return the training job info associated with the current job_name
:return: A dict contains all the training job info
"""
return self.conn\
.describe_training_job(TrainingJobName=training_job_name)

def describe_tuning_job(self, tuning_job_name):
"""
:param tuning_job_name: the name of the training job
:type tuning_job_name: string
Return the tuning job info associated with the current job_name
:return: A dict contains all the tuning job info
"""
return self.conn\
.describe_hyper_parameter_tuning_job(
HyperParameterTuningJobName=tuning_job_name)
119 changes: 119 additions & 0 deletions airflow/contrib/operators/sagemaker_create_training_job_operator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

from airflow.contrib.hooks.sagemaker_hook import SageMakerHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow.exceptions import AirflowException


class SageMakerCreateTrainingJobOperator(BaseOperator):

"""
Initiate a SageMaker training
This operator returns The ARN of the model created in Amazon SageMaker
:param training_job_config:
The configuration necessary to start a training job (templated)
:type training_job_config: dict
:param region_name: The AWS region_name
:type region_name: string
:param sagemaker_conn_id: The SageMaker connection ID to use.
:type sagemaker_conn_id: string
:param use_db_config: Whether or not to use db config
associated with sagemaker_conn_id.
If set to true, will automatically update the training config
with what's in db, so the db config doesn't need to
included everything, but what's there does replace the ones
in the training_job_config, so be careful
:type use_db_config: bool
:param aws_conn_id: The AWS connection ID to use.
:type aws_conn_id: string
:param wait_for_completion: if the operator should block
until training job finishes
:type wait_for_completion: bool
:param check_interval: if wait is set to be true, this is the time interval
which the operator will check the status of the training job
:type check_interval: int
:param max_ingestion_time: if wait is set to be true, the operator will fail
if the training job hasn't finish within the max_ingestion_time
(Caution: be careful to set this parameters because training can take very long)
:type max_ingestion_time: int
**Example**:
The following operator would start a training job when executed
sagemaker_training =
SageMakerCreateTrainingJobOperator(
task_id='sagemaker_training',
training_job_config=config,
region_name='us-west-2'
sagemaker_conn_id='sagemaker_customers_conn',
use_db_config=True,
aws_conn_id='aws_customers_conn'
)
"""

template_fields = ['training_job_config']
template_ext = ()
ui_color = '#ededed'

@apply_defaults
def __init__(self,
training_job_config=None,
region_name=None,
sagemaker_conn_id=None,
use_db_config=False,
wait_for_completion=True,
check_interval=5,
max_ingestion_time=None,
*args, **kwargs):
super(SageMakerCreateTrainingJobOperator, self).__init__(*args, **kwargs)

self.sagemaker_conn_id = sagemaker_conn_id
self.training_job_config = training_job_config
self.use_db_config = use_db_config
self.region_name = region_name
self.wait_for_completion = wait_for_completion
self.check_interval = check_interval
self.max_ingestion_time = max_ingestion_time

def execute(self, context):
sagemaker = SageMakerHook(
sagemaker_conn_id=self.sagemaker_conn_id,
use_db_config=self.use_db_config,
region_name=self.region_name,
check_interval=self.check_interval,
max_ingestion_time=self.max_ingestion_time
)

self.log.info(
"Creating SageMaker Training Job %s."
% self.training_job_config['TrainingJobName']
)
response = sagemaker.create_training_job(
self.training_job_config,
wait_for_completion=self.wait_for_completion)
if not response['ResponseMetadata']['HTTPStatusCode'] \
== 200:
raise AirflowException(
'Sagemaker Training Job creation failed: %s' % response)
else:
return response
Loading

0 comments on commit 058be62

Please sign in to comment.