Skip to content
This repository has been archived by the owner on Dec 13, 2023. It is now read-only.

Commit

Permalink
Merge pull request #926 from Netflix/http_task_mapper
Browse files Browse the repository at this point in the history
added http task mapper
  • Loading branch information
apanicker-nflx authored Jan 3, 2019
2 parents fc0a936 + 2569a26 commit efad135
Show file tree
Hide file tree
Showing 7 changed files with 230 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import com.netflix.conductor.core.execution.mapper.EventTaskMapper;
import com.netflix.conductor.core.execution.mapper.ForkJoinDynamicTaskMapper;
import com.netflix.conductor.core.execution.mapper.ForkJoinTaskMapper;
import com.netflix.conductor.core.execution.mapper.HTTPTaskMapper;
import com.netflix.conductor.core.execution.mapper.JoinTaskMapper;
import com.netflix.conductor.core.execution.mapper.SimpleTaskMapper;
import com.netflix.conductor.core.execution.mapper.SubWorkflowTaskMapper;
Expand All @@ -52,6 +53,7 @@
import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_EVENT;
import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_FORK_JOIN;
import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_FORK_JOIN_DYNAMIC;
import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_HTTP;
import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_JOIN;
import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_SIMPLE;
import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_SUB_WORKFLOW;
Expand Down Expand Up @@ -178,4 +180,12 @@ public TaskMapper getUserDefinedTaskMapper(ParametersUtils parametersUtils, Meta
public TaskMapper getSimpleTaskMapper(ParametersUtils parametersUtils) {
return new SimpleTaskMapper(parametersUtils);
}

@ProvidesIntoMap
@StringMapKey(TASK_TYPE_HTTP)
@Singleton
@Named(TASK_MAPPERS_QUALIFIER)
public TaskMapper getHTTPTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) {
return new HTTPTaskMapper(parametersUtils, metadataDAO);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
/*
* Copyright 2018 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;

import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.core.execution.ParametersUtils;
import com.netflix.conductor.core.execution.TerminateWorkflowException;
import com.netflix.conductor.dao.MetadataDAO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;

/**
* An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link TaskType#HTTP}
* to a {@link Task} of type {@link TaskType#HTTP} with {@link Task.Status#SCHEDULED}
*/
public class HTTPTaskMapper implements TaskMapper {

public static final Logger logger = LoggerFactory.getLogger(com.netflix.conductor.core.execution.mapper.HTTPTaskMapper.class);

private final ParametersUtils parametersUtils;
private final MetadataDAO metadataDAO;

public HTTPTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) {
this.parametersUtils = parametersUtils;
this.metadataDAO = metadataDAO;
}

/**
* This method maps a {@link WorkflowTask} of type {@link TaskType#HTTP}
* to a {@link Task} in a {@link Task.Status#SCHEDULED} state
*
* @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId
* @return a List with just one HTTP task
* @throws TerminateWorkflowException In case if the task definition does not exist
*/
@Override
public List<Task> getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException {

logger.debug("TaskMapperContext {} in HTTPTaskMapper", taskMapperContext);

WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule();
Workflow workflowInstance = taskMapperContext.getWorkflowInstance();
String taskId = taskMapperContext.getTaskId();
int retryCount = taskMapperContext.getRetryCount();

TaskDef taskDefinition = Optional.ofNullable(taskMapperContext.getTaskDefinition())
.orElseGet(() -> Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName()))
.orElseThrow(() -> {
String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName());
return new TerminateWorkflowException(reason);
}));

Map<String, Object> input = parametersUtils.getTaskInputV2(taskToSchedule.getInputParameters(), workflowInstance, taskId, taskDefinition);

Task httpTask = new Task();
httpTask.setTaskType(taskToSchedule.getType());
httpTask.setTaskDefName(taskToSchedule.getName());
httpTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName());
httpTask.setWorkflowInstanceId(workflowInstance.getWorkflowId());
httpTask.setWorkflowType(workflowInstance.getWorkflowName());
httpTask.setCorrelationId(workflowInstance.getCorrelationId());
httpTask.setScheduledTime(System.currentTimeMillis());
httpTask.setTaskId(taskId);
httpTask.setInputData(input);
httpTask.setStatus(Task.Status.SCHEDULED);
httpTask.setRetryCount(retryCount);
httpTask.setCallbackAfterSeconds(taskToSchedule.getStartDelay());
httpTask.setWorkflowTask(taskToSchedule);
httpTask.setRateLimitPerFrequency(taskDefinition.getRateLimitPerFrequency());
httpTask.setRateLimitFrequencyInSeconds(taskDefinition.getRateLimitFrequencyInSeconds());
return Collections.singletonList(httpTask);
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/**
/*
* Copyright 2018 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import com.netflix.conductor.core.execution.mapper.EventTaskMapper;
import com.netflix.conductor.core.execution.mapper.ForkJoinDynamicTaskMapper;
import com.netflix.conductor.core.execution.mapper.ForkJoinTaskMapper;
import com.netflix.conductor.core.execution.mapper.HTTPTaskMapper;
import com.netflix.conductor.core.execution.mapper.JoinTaskMapper;
import com.netflix.conductor.core.execution.mapper.SimpleTaskMapper;
import com.netflix.conductor.core.execution.mapper.SubWorkflowTaskMapper;
Expand Down Expand Up @@ -108,6 +109,7 @@ public void init() {
taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO));
taskMappers.put("EVENT", new EventTaskMapper(parametersUtils));
taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils));
taskMappers.put("HTTP", new HTTPTaskMapper(parametersUtils, metadataDAO));

this.deciderService = new DeciderService(parametersUtils, queueDAO, externalPayloadStorageUtils, taskMappers);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import com.netflix.conductor.core.execution.mapper.EventTaskMapper;
import com.netflix.conductor.core.execution.mapper.ForkJoinDynamicTaskMapper;
import com.netflix.conductor.core.execution.mapper.ForkJoinTaskMapper;
import com.netflix.conductor.core.execution.mapper.HTTPTaskMapper;
import com.netflix.conductor.core.execution.mapper.JoinTaskMapper;
import com.netflix.conductor.core.execution.mapper.SimpleTaskMapper;
import com.netflix.conductor.core.execution.mapper.SubWorkflowTaskMapper;
Expand Down Expand Up @@ -133,6 +134,7 @@ public void setup() {
taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO));
taskMappers.put("EVENT", new EventTaskMapper(parametersUtils));
taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils));
taskMappers.put("HTTP", new HTTPTaskMapper(parametersUtils, metadataDAO));

deciderService = new DeciderService(parametersUtils, queueDAO, externalPayloadStorageUtils, taskMappers);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import com.netflix.conductor.core.execution.mapper.EventTaskMapper;
import com.netflix.conductor.core.execution.mapper.ForkJoinDynamicTaskMapper;
import com.netflix.conductor.core.execution.mapper.ForkJoinTaskMapper;
import com.netflix.conductor.core.execution.mapper.HTTPTaskMapper;
import com.netflix.conductor.core.execution.mapper.JoinTaskMapper;
import com.netflix.conductor.core.execution.mapper.SimpleTaskMapper;
import com.netflix.conductor.core.execution.mapper.SubWorkflowTaskMapper;
Expand Down Expand Up @@ -111,6 +112,7 @@ public void init() {
taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO));
taskMappers.put("EVENT", new EventTaskMapper(parametersUtils));
taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils));
taskMappers.put("HTTP", new HTTPTaskMapper(parametersUtils, metadataDAO));

deciderService = new DeciderService(parametersUtils, queueDAO, externalPayloadStorageUtils, taskMappers);
MetadataMapperService metadataMapperService = new MetadataMapperService(metadataDAO);
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
/*
* Copyright 2018 Netflix, Inc.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.conductor.core.execution.mapper;

import com.netflix.conductor.common.metadata.tasks.Task;
import com.netflix.conductor.common.metadata.tasks.TaskDef;
import com.netflix.conductor.common.metadata.workflow.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
import com.netflix.conductor.common.metadata.workflow.WorkflowTask;
import com.netflix.conductor.common.run.Workflow;
import com.netflix.conductor.core.execution.ParametersUtils;
import com.netflix.conductor.core.execution.TerminateWorkflowException;
import com.netflix.conductor.core.utils.IDGenerator;
import com.netflix.conductor.dao.MetadataDAO;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;

import java.util.HashMap;
import java.util.List;

import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.mock;

public class HTTPTaskMapperTest {

private ParametersUtils parametersUtils;
private MetadataDAO metadataDAO;
private HTTPTaskMapper httpTaskMapper;

@Rule
public ExpectedException expectedException = ExpectedException.none();

@Before
public void setUp() {
parametersUtils = mock(ParametersUtils.class);
metadataDAO = mock(MetadataDAO.class);
httpTaskMapper = new HTTPTaskMapper(parametersUtils, metadataDAO);
}

@Test
public void getMappedTasks() {
//Given
WorkflowTask taskToSchedule = new WorkflowTask();
taskToSchedule.setName("http_task");
taskToSchedule.setType(TaskType.HTTP.name());
taskToSchedule.setTaskDefinition(new TaskDef("http_task"));
String taskId = IDGenerator.generate();
String retriedTaskId = IDGenerator.generate();

Workflow workflow = new Workflow();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);

TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder()
.withWorkflowDefinition(workflowDef)
.withWorkflowInstance(workflow)
.withTaskDefinition(new TaskDef())
.withTaskToSchedule(taskToSchedule)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();

//when
List<Task> mappedTasks = httpTaskMapper.getMappedTasks(taskMapperContext);

//Then
assertEquals(1, mappedTasks.size());
assertEquals(TaskType.HTTP.name(), mappedTasks.get(0).getTaskType());
}

@Test
public void getMappedTasksException() {
//Given
WorkflowTask taskToSchedule = new WorkflowTask();
taskToSchedule.setName("http_task");
taskToSchedule.setType(TaskType.HTTP.name());
String taskId = IDGenerator.generate();
String retriedTaskId = IDGenerator.generate();

Workflow workflow = new Workflow();
WorkflowDef workflowDef = new WorkflowDef();
workflow.setWorkflowDefinition(workflowDef);

TaskMapperContext taskMapperContext = TaskMapperContext.newBuilder()
.withWorkflowDefinition(workflowDef)
.withWorkflowInstance(workflow)
.withTaskToSchedule(taskToSchedule)
.withTaskInput(new HashMap<>())
.withRetryCount(0)
.withRetryTaskId(retriedTaskId)
.withTaskId(taskId)
.build();

//then
expectedException.expect(TerminateWorkflowException.class);
expectedException.expectMessage(String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName()));
//when
httpTaskMapper.getMappedTasks(taskMapperContext);
}
}

0 comments on commit efad135

Please sign in to comment.