Source code for airflow.contrib.operators.ecs_operator

# -*- coding: utf-8 -*-## Licensed to the Apache Software Foundation (ASF) under one# or more contributor license agreements. See the NOTICE file# distributed with this work for additional information# regarding copyright ownership. The ASF licenses this file# to you under the Apache License, Version 2.0 (the# "License"); you may not use this file except in compliance# with the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing,# software distributed under the License is distributed on an# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY# KIND, either express or implied. See the License for the# specific language governing permissions and limitations# under the License.importsysimportrefromairflow.exceptionsimportAirflowExceptionfromairflow.modelsimportBaseOperatorfromairflow.utilsimportapply_defaultsfromairflow.contrib.hooks.aws_hookimportAwsHook

[docs]classECSOperator(BaseOperator):""" Execute a task on AWS EC2 Container Service :param task_definition: the task definition name on EC2 Container Service :type task_definition: str :param cluster: the cluster name on EC2 Container Service :type cluster: str :param overrides: the same parameter that boto3 will receive (templated): http://boto3.readthedocs.org/en/latest/reference/services/ecs.html#ECS.Client.run_task :type overrides: dict :param aws_conn_id: connection id of AWS credentials / region name. If None, credential boto3 strategy will be used (http://boto3.readthedocs.io/en/latest/guide/configuration.html). :type aws_conn_id: str :param region_name: region name to use in AWS Hook. Override the region_name in connection (if provided) :type region_name: str :param launch_type: the launch type on which to run your task ('EC2' or 'FARGATE') :type launch_type: str """ui_color='#f0ede4'client=Nonearn=Nonetemplate_fields=('overrides',)@apply_defaultsdef__init__(self,task_definition,cluster,overrides,aws_conn_id=None,region_name=None,launch_type='EC2',**kwargs):super(ECSOperator,self).__init__(**kwargs)self.aws_conn_id=aws_conn_idself.region_name=region_nameself.task_definition=task_definitionself.cluster=clusterself.overrides=overridesself.launch_type=launch_typeself.hook=self.get_hook()defexecute(self,context):self.log.info('Running ECS Task - Task definition: %s - on cluster %s',self.task_definition,self.cluster)self.log.info('ECSOperator overrides: %s',self.overrides)self.client=self.hook.get_client_type('ecs',region_name=self.region_name)response=self.client.run_task(cluster=self.cluster,taskDefinition=self.task_definition,overrides=self.overrides,startedBy=self.owner,launchType=self.launch_type)failures=response['failures']iflen(failures)>0:raiseAirflowException(response)self.log.info('ECS Task started: %s',response)self.arn=response['tasks'][0]['taskArn']self._wait_for_task_ended()self._check_success_task()self.log.info('ECS Task has been successfully executed: %s',response)def_wait_for_task_ended(self):waiter=self.client.get_waiter('tasks_stopped')waiter.config.max_attempts=sys.maxsize# timeout is managed by airflowwaiter.wait(cluster=self.cluster,tasks=[self.arn])def_check_success_task(self):response=self.client.describe_tasks(cluster=self.cluster,tasks=[self.arn])self.log.info('ECS Task stopped, check status: %s',response)iflen(response.get('failures',[]))>0:raiseAirflowException(response)fortaskinresponse['tasks']:# This is a `stoppedReason` that indicates a task has not# successfully finished, but there is no other indication of failure# in the response.# See, https://docs.aws.amazon.com/AmazonECS/latest/developerguide/stopped-task-errors.html # noqa E501ifre.match(r'Host EC2 \(instance .+?\) (stopped|terminated)\.',task.get('stoppedReason','')):raiseAirflowException('The task was stopped because the host instance terminated: {}'.format(task.get('stoppedReason','')))containers=task['containers']forcontainerincontainers:ifcontainer.get('lastStatus')=='STOPPED'and \
container['exitCode']!=0:raiseAirflowException('This task is not in success state {}'.format(task))elifcontainer.get('lastStatus')=='PENDING':raiseAirflowException('This task is still pending {}'.format(task))elif'error'incontainer.get('reason','').lower():raiseAirflowException('This containers encounter an error during launching : {}'.format(container.get('reason','').lower()))defget_hook(self):returnAwsHook(aws_conn_id=self.aws_conn_id)defon_kill(self):response=self.client.stop_task(cluster=self.cluster,task=self.arn,reason='Task killed by the user')self.log.info(response)