Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Initial changes made for delete cluster endpoint #186

Draft
wants to merge 1 commit into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

import com.homeaway.datapullclient.data.ResponseEntity;
import com.homeaway.datapullclient.data.SimpleResponseEntity;
import com.homeaway.datapullclient.data.DelClusterResponseEntity;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
Expand Down Expand Up @@ -55,7 +56,9 @@ public interface DataPullClientApi {

String SIMPLE_ENDPOINT_NOTES_TEXT_HTML = "Give the inputs of environment name and pipeline name";

@ApiOperation(value = "Given a JSON input, creates an EMR cluster, run a DataPull step and terminates the cluster",
String DELETE_CLUSTER_ENDPOINT_NOTES_TEXT_HTML = "Give the cluster id to be deleted";

@ApiOperation(value = "Given a JSON input , this creates a Jenkins pipline that can be scheduled to create an EMR cluster, run a DataPull step and terminates the cluster",
consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE, response = ResponseEntity.class
, notes = NOTES_TEXT_HTML, nickname = "startDatapull")
@ResponseStatus(value = HttpStatus.ACCEPTED)
Expand All @@ -79,4 +82,14 @@ public interface DataPullClientApi {
@ApiImplicitParam(name = "pipelinename", value = "pipelinename", required = true, dataType = "String", paramType = "query")
})
SimpleResponseEntity startSimpleDataPull(@RequestParam("pipelinename") String pipelinename , @RequestParam("awsenv") String awenv);

@ApiOperation(value = "Given cluster id should be spun up by datapull cluster only",
consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE, response = ResponseEntity.class
, notes = DELETE_CLUSTER_ENDPOINT_NOTES_TEXT_HTML, nickname = "deleteClusterDatapull")
@ResponseStatus(value = HttpStatus.ACCEPTED)
@RequestMapping(value = "/DeleteCluster-DataPull", method = POST, consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE)
@ApiImplicitParams({
@ApiImplicitParam(name = "cluster_id", value = "cluster_id", required = true, dataType = "String", paramType = "query")
})
DelClusterResponseEntity delClusterDataPull(@RequestParam("cluster_id") String cluster_id);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
package com.homeaway.datapullclient.data;


import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import org.springframework.http.HttpStatus;

@ApiModel(value = "Delete Cluster Response entity for DataPull")
public class DelClusterResponseEntity {

@ApiModelProperty("HTTP code for response")
private int statusCode = HttpStatus.OK.value();

@ApiModelProperty("Response Message_cluster_id")
private String message_cluster_id;

public DelClusterResponseEntity(int status, String cluster_id) {
this.statusCode = status;
this.message_cluster_id = cluster_id;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package com.homeaway.datapullclient.handlers;

import com.homeaway.datapullclient.api.DataPullClientApi;
import com.homeaway.datapullclient.data.DelClusterResponseEntity;
import com.homeaway.datapullclient.data.ResponseEntity;
import com.homeaway.datapullclient.data.SimpleResponseEntity;
import com.homeaway.datapullclient.exception.InputException;
Expand Down Expand Up @@ -83,6 +84,26 @@ public SimpleResponseEntity startSimpleDataPull(String pipelinename, String awse

return entity;
}

@Override
public DelClusterResponseEntity delClusterDataPull(String cluster_id) {
if (log.isDebugEnabled())
log.debug("delClusterResponseEntity -> data=" + cluster_id);
DelClusterResponseEntity entity = null;

try{
service.delClusterDataPull(cluster_id);
entity = new DelClusterResponseEntity(HttpStatus.ACCEPTED.value(), "Request Succesfully registered : "+cluster_id);
}
catch(ProcessingException e){
throw new InputException("DataPull application failed for data : "+cluster_id+" \n "+e.getMessage());
}

if(log.isDebugEnabled())
log.debug("delClusterDataPull <- return");

return entity;
}
}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,11 @@ public void runSimpleDataPull(String awsenv, String pipelinename) {
//DO nothing
}

@Override
public void delClusterDataPull(String cluster_id) {
//DO nothing
}

private void runDataPull(String json, boolean isStart, boolean validateJson) throws ProcessingException {
String originalInputJson = json;
json = extractUserJsonFromS3IfProvided(json, isStart);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,6 @@ private void runSparkCluster() {

} else {
final RunJobFlowResult result = this.runTaskInNewCluster(emr, logPath, this.s3JarPath, Objects.toString(this.clusterProperties.getSparksubmitparams(), ""), haveBootstrapAction);
DataPullTask.log.info(result.toString());
}

DataPullTask.log.info("Task " + this.taskId + " submitted to EMR cluster");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,4 +22,6 @@ public interface DataPullClientService {
void runDataPull(final String json) throws ProcessingException;

void runSimpleDataPull(String awsenv, String pipelinename) throws ProcessingException;

void delClusterDataPull(String cluster_id) throws ProcessingException;
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
package com.homeaway.datapullclient.service;

import com.homeaway.datapullclient.exception.ProcessingException;

public interface DelCLusterDataPullClientService {

void runSimpleDataPull(final String cluster_id) throws ProcessingException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import com.homeaway.datapullclient.process.DataPullRequestProcessor;
import com.homeaway.datapullclient.service.DataPullClientService;
import com.homeaway.datapullclient.service.SimpleDataPullClientService;
import com.homeaway.datapullclient.service.DelCLusterDataPullClientService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
Expand Down
2 changes: 1 addition & 1 deletion api/src/main/resources/logback-spring.xml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
<configuration scan="true">
<include resource="org/springframework/boot/logging/logback/defaults.xml" />
<property resource="application.yml" />
<property name="LOG_PATH" value="/Users/srigajjala/logs"/>
<property name="LOG_PATH" value="/Users/abalmiki/logs"/>
<springProperty scope="context" name="CLOUDWATCH_GROUP_NAME" source="datapull.logger.cloudwatch.groupName"/>
<springProperty scope="context" name="CLOUDWATCH_STREAM_NAME" source="datapull.logger.cloudwatch.streamName"/>
<!--<springProperty scope="context" name="ACCESS_KEY" source="ACCESS_KEY"/>
Expand Down