Commit b2dad407 authored by Spiros Koulouzis's avatar Spiros Koulouzis

null checks

parent 1221b093
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
* Copyright 2019 S. Koulouzis * Copyright 2019 S. Koulouzis
* *
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this zipfile except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
...@@ -15,21 +15,29 @@ ...@@ -15,21 +15,29 @@
*/ */
package nl.uva.sne.drip.commons.utils; package nl.uva.sne.drip.commons.utils;
import java.io.File; import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
import java.io.FileOutputStream; import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.FileSystem;
import java.nio.file.FileSystems;
import java.nio.file.FileVisitResult;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.security.MessageDigest; import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.util.Base64; import java.util.Base64;
import java.util.Enumeration;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.zip.ZipEntry; import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import java.util.zip.ZipOutputStream; import java.util.zip.ZipOutputStream;
import org.json.JSONException; import org.json.JSONException;
import org.json.JSONObject; import org.json.JSONObject;
...@@ -98,29 +106,55 @@ public class Converter { ...@@ -98,29 +106,55 @@ public class Converter {
return new String(encodedBytes, StandardCharsets.UTF_8); return new String(encodedBytes, StandardCharsets.UTF_8);
} }
public static void zipFolder(String sourceDir, String zipFile) throws FileNotFoundException, IOException { public static void zipFolder(String sourceFolder, String zipFolder) throws FileNotFoundException, IOException {
FileOutputStream fout = new FileOutputStream(zipFile); try (FileOutputStream fos = new FileOutputStream(zipFolder);
try (ZipOutputStream zout = new ZipOutputStream(fout)) { ZipOutputStream zos = new ZipOutputStream(fos)) {
File fileSource = new File(sourceDir); Path sourcePath = Paths.get(sourceFolder);
addDirectory(zout, fileSource); Files.walkFileTree(sourcePath, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult preVisitDirectory(final Path dir, final BasicFileAttributes attrs) throws IOException {
if (!sourcePath.equals(dir)) {
zos.putNextEntry(new ZipEntry(sourcePath.relativize(dir).toString() + "/"));
zos.closeEntry();
} }
return FileVisitResult.CONTINUE;
} }
private static void addDirectory(ZipOutputStream zout, File fileSource) throws FileNotFoundException, IOException { @Override
File[] files = fileSource.listFiles(); public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException {
for (File file : files) { zos.putNextEntry(new ZipEntry(sourcePath.relativize(file).toString()));
if (file.isDirectory()) { Files.copy(file, zos);
addDirectory(zout, file); zos.closeEntry();
continue; return FileVisitResult.CONTINUE;
}
});
}
}
public static void unzipFolder(String zipFile) throws IOException {
try (ZipFile zipfile = new ZipFile(zipFile)) {
FileSystem fileSystem = FileSystems.getDefault();
Enumeration<? extends ZipEntry> zipEntries = zipfile.entries();
String uncompressedDirectory = "uncompressed/";
Files.createDirectory(fileSystem.getPath(uncompressedDirectory));
while (zipEntries.hasMoreElements()) {
ZipEntry entry = zipEntries.nextElement();
if (entry.isDirectory()) {
Files.createDirectories(fileSystem.getPath(uncompressedDirectory + entry.getName()));
} else {
InputStream is = zipfile.getInputStream(entry);
BufferedInputStream bis = new BufferedInputStream(is);
String uncompressedFileName = uncompressedDirectory + entry.getName();
Path uncompressedFilePath = fileSystem.getPath(uncompressedFileName);
Files.createFile(uncompressedFilePath);
try (FileOutputStream fileOutput = new FileOutputStream(uncompressedFileName)) {
while (bis.available() > 0) {
fileOutput.write(bis.read());
}
} }
byte[] buffer = new byte[1024];
try (final FileInputStream fin = new FileInputStream(file)) {
zout.putNextEntry(new ZipEntry(file.getName()));
int length;
while ((length = fin.read(buffer)) > 0) {
zout.write(buffer, 0, length);
} }
zout.closeEntry();
} }
} }
} }
......
...@@ -38,7 +38,6 @@ import java.util.logging.Logger; ...@@ -38,7 +38,6 @@ import java.util.logging.Logger;
import nl.uva.sne.drip.model.Exceptions.TypeExeption; import nl.uva.sne.drip.model.Exceptions.TypeExeption;
import nl.uva.sne.drip.model.NodeTemplate; import nl.uva.sne.drip.model.NodeTemplate;
import nl.uva.sne.drip.model.NodeTemplateMap; import nl.uva.sne.drip.model.NodeTemplateMap;
import nl.uva.sne.drip.model.cloud.storm.CloudsStormSubTopology;
import nl.uva.sne.drip.model.tosca.Credential; import nl.uva.sne.drip.model.tosca.Credential;
import nl.uva.sne.drip.model.tosca.ToscaTemplate; import nl.uva.sne.drip.model.tosca.ToscaTemplate;
import nl.uva.sne.drip.sure.tosca.client.DefaultApi; import nl.uva.sne.drip.sure.tosca.client.DefaultApi;
...@@ -319,8 +318,11 @@ public class ToscaHelper { ...@@ -319,8 +318,11 @@ public class ToscaHelper {
if (attributes == null) { if (attributes == null) {
attributes = new HashMap<>(); attributes = new HashMap<>();
} }
if (nodeState != null) {
attributes.put(stateName, nodeState.toString()); attributes.put(stateName, nodeState.toString());
node.getNodeTemplate().attributes(attributes); node.getNodeTemplate().attributes(attributes);
}
return node; return node;
} }
......
...@@ -37,6 +37,7 @@ import nl.uva.sne.drip.model.NodeTemplate; ...@@ -37,6 +37,7 @@ import nl.uva.sne.drip.model.NodeTemplate;
import nl.uva.sne.drip.model.NodeTemplateMap; import nl.uva.sne.drip.model.NodeTemplateMap;
import nl.uva.sne.drip.model.Provisioner; import nl.uva.sne.drip.model.Provisioner;
import nl.uva.sne.drip.model.cloud.storm.CloudsStormSubTopology; import nl.uva.sne.drip.model.cloud.storm.CloudsStormSubTopology;
import nl.uva.sne.drip.model.cloud.storm.OpCode;
import nl.uva.sne.drip.model.tosca.Credential; import nl.uva.sne.drip.model.tosca.Credential;
import nl.uva.sne.drip.model.tosca.ToscaTemplate; import nl.uva.sne.drip.model.tosca.ToscaTemplate;
import org.junit.After; import org.junit.After;
...@@ -420,12 +421,12 @@ public class ToscaHelperTest { ...@@ -420,12 +421,12 @@ public class ToscaHelperTest {
} }
} }
/** /**
* Test of getKeyPairsFromVM method, of class ToscaHelper. * Test of getKeyPairsFromVM method, of class ToscaHelper.
*/ */
@Test @Test
public void testGetKeyPairsFromVM() throws Exception { public void testGetKeyPairsFromVM() throws Exception {
if (serviceUp) {
System.out.println("getKeyPairsFromVM"); System.out.println("getKeyPairsFromVM");
instance.uploadToscaTemplate(provisionedToscaTemplate); instance.uploadToscaTemplate(provisionedToscaTemplate);
KeyPair keyPair; KeyPair keyPair;
...@@ -442,7 +443,7 @@ public class ToscaHelperTest { ...@@ -442,7 +443,7 @@ public class ToscaHelperTest {
assertNotNull(keyPair); assertNotNull(keyPair);
} }
} }
}
} }
/** /**
...@@ -461,4 +462,87 @@ public class ToscaHelperTest { ...@@ -461,4 +462,87 @@ public class ToscaHelperTest {
} }
} }
/**
* Test of getNodeCurrentState method, of class ToscaHelper.
*
* @throws java.io.IOException
* @throws com.fasterxml.jackson.core.JsonProcessingException
* @throws nl.uva.sne.drip.sure.tosca.client.ApiException
*/
@Test
public void testGetNodeCurrentState() throws IOException, JsonProcessingException, ApiException {
if (serviceUp) {
System.out.println("getNodeCurrentState");
instance.uploadToscaTemplate(provisionedToscaTemplate);
NodeTemplateMap node = instance.getVMTopologyTemplates().get(0);
ToscaHelper.NODE_STATES expResult = ToscaHelper.NODE_STATES.RUNNING;
ToscaHelper.NODE_STATES result = instance.getNodeCurrentState(node);
assertEquals(expResult, result);
}
}
/**
* Test of setNodeCurrentState method, of class ToscaHelper.
*
* @throws java.io.IOException
* @throws com.fasterxml.jackson.core.JsonProcessingException
* @throws nl.uva.sne.drip.sure.tosca.client.ApiException
*/
@Test
public void testSetNodeCurrentState() throws IOException, JsonProcessingException, ApiException {
if (serviceUp) {
System.out.println("setNodeCurrentState");
instance.uploadToscaTemplate(provisionedToscaTemplate);
NodeTemplateMap node = instance.getVMTopologyTemplates().get(0);
ToscaHelper.NODE_STATES nodeState = ToscaHelper.NODE_STATES.DELETED;
NodeTemplateMap result = instance.setNodeCurrentState(node, nodeState);
assertEquals(instance.getNodeCurrentState(node), nodeState);
instance.setNodeCurrentState(node, null);
}
}
/**
* Test of NodeDesiredState2CloudStormOperation method, of class
* ToscaHelper.
*/
@Test
public void testNodeDesiredState2CloudStormOperation() {
System.out.println("NodeDesiredState2CloudStormOperation");
ToscaHelper.NODE_STATES nodeDesiredState = ToscaHelper.NODE_STATES.RUNNING;
OpCode.OperationEnum expResult = OpCode.OperationEnum.PROVISION;
OpCode.OperationEnum result = ToscaHelper.NodeDesiredState2CloudStormOperation(nodeDesiredState);
assertEquals(expResult, result);
nodeDesiredState = ToscaHelper.NODE_STATES.DELETED;
expResult = OpCode.OperationEnum.DELETE;
result = ToscaHelper.NodeDesiredState2CloudStormOperation(nodeDesiredState);
assertEquals(expResult, result);
nodeDesiredState = ToscaHelper.NODE_STATES.STOPPED;
expResult = OpCode.OperationEnum.STOP;
result = ToscaHelper.NodeDesiredState2CloudStormOperation(nodeDesiredState);
assertEquals(expResult, result);
nodeDesiredState = ToscaHelper.NODE_STATES.STARTED;
expResult = OpCode.OperationEnum.START;
result = ToscaHelper.NodeDesiredState2CloudStormOperation(nodeDesiredState);
assertEquals(expResult, result);
}
/**
* Test of nodeCurrentState2CloudStormStatus method, of class ToscaHelper.
*/
@Test
public void testNodeCurrentState2CloudStormStatus() {
System.out.println("nodeCurrentState2CloudStormStatus");
ToscaHelper.NODE_STATES currentState = ToscaHelper.NODE_STATES.CONFIGURED;
CloudsStormSubTopology.StatusEnum expResult = null;
CloudsStormSubTopology.StatusEnum result = ToscaHelper.nodeCurrentState2CloudStormStatus(currentState);
assertEquals(expResult, result);
}
} }
...@@ -54,6 +54,7 @@ import org.apache.commons.io.FileUtils; ...@@ -54,6 +54,7 @@ import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.FilenameUtils;
import org.apache.commons.math3.ml.distance.EuclideanDistance; import org.apache.commons.math3.ml.distance.EuclideanDistance;
import org.apache.maven.shared.utils.io.DirectoryScanner; import org.apache.maven.shared.utils.io.DirectoryScanner;
import topology.analysis.TopologyAnalysisMain;
/** /**
* *
...@@ -112,7 +113,7 @@ class CloudStormService { ...@@ -112,7 +113,7 @@ class CloudStormService {
objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
} }
public ToscaTemplate execute() throws FileNotFoundException, JSchException, IOException, ApiException, Exception { public ToscaTemplate execute(boolean dryRun) throws FileNotFoundException, JSchException, IOException, ApiException, Exception {
String tempInputDirPath = System.getProperty("java.io.tmpdir") + File.separator + "Input-" + Long.toString(System.nanoTime()) + File.separator; String tempInputDirPath = System.getProperty("java.io.tmpdir") + File.separator + "Input-" + Long.toString(System.nanoTime()) + File.separator;
File tempInputDir = new File(tempInputDirPath); File tempInputDir = new File(tempInputDirPath);
...@@ -149,7 +150,7 @@ class CloudStormService { ...@@ -149,7 +150,7 @@ class CloudStormService {
List<CloudsStormSubTopology> cloudStormSubtopologies = (List<CloudsStormSubTopology>) subTopologiesAndVMs.get("cloud_storm_subtopologies"); List<CloudsStormSubTopology> cloudStormSubtopologies = (List<CloudsStormSubTopology>) subTopologiesAndVMs.get("cloud_storm_subtopologies");
writeCloudStormInfrasCodeFiles(infrasCodeTempInputDirPath, cloudStormSubtopologies); writeCloudStormInfrasCodeFiles(infrasCodeTempInputDirPath, cloudStormSubtopologies);
ToscaTemplate newToscaTemplate = runCloudStorm(tempInputDirPath); ToscaTemplate newToscaTemplate = runCloudStorm(tempInputDirPath, dryRun);
getHelper().uploadToscaTemplate(newToscaTemplate); getHelper().uploadToscaTemplate(newToscaTemplate);
return newToscaTemplate; return newToscaTemplate;
} }
...@@ -351,11 +352,20 @@ class CloudStormService { ...@@ -351,11 +352,20 @@ class CloudStormService {
FileUtils.copyDirectory(srcDir, destDir); FileUtils.copyDirectory(srcDir, destDir);
} }
protected ToscaTemplate runCloudStorm(String tempInputDirPath) throws IOException, ApiException { protected ToscaTemplate runCloudStorm(String tempInputDirPath, boolean dryRun) throws IOException, ApiException {
String[] args = new String[]{"run", tempInputDirPath}; String[] args = new String[]{"run", tempInputDirPath};
File topTopologyFile = new File(tempInputDirPath + TOPOLOGY_RELATIVE_PATH
+ TOP_TOPOLOGY_FILE_NAME);
if (!dryRun) {
standalone.MainAsTool.main(args); standalone.MainAsTool.main(args);
CloudsStormTopTopology _top = objectMapper.readValue(new File(tempInputDirPath + TOPOLOGY_RELATIVE_PATH } else {
+ TOP_TOPOLOGY_FILE_NAME), TopologyAnalysisMain tam = new TopologyAnalysisMain(topTopologyFile.getAbsolutePath());
if (!tam.fullLoadWholeTopology()) {
Logger.getLogger(CloudStormService.class.getName()).log(Level.FINE, "CloudStrom topology file at: {0} has errors", topTopologyFile.getAbsolutePath());
}
}
CloudsStormTopTopology _top = objectMapper.readValue(topTopologyFile,
CloudsStormTopTopology.class); CloudsStormTopTopology.class);
List<CloudsStormSubTopology> subTopologies = _top.getTopologies(); List<CloudsStormSubTopology> subTopologies = _top.getTopologies();
...@@ -363,20 +373,68 @@ class CloudStormService { ...@@ -363,20 +373,68 @@ class CloudStormService {
List<NodeTemplateMap> vmTopologiesMaps = getHelper().getVMTopologyTemplates(); List<NodeTemplateMap> vmTopologiesMaps = getHelper().getVMTopologyTemplates();
int i = 0; int i = 0;
for (CloudsStormSubTopology subTopology : subTopologies) { for (CloudsStormSubTopology subTopology : subTopologies) {
setSSHKeysToVMAttributes(i, vmTopologiesMaps, subTopology, tempInputDirPath);
}
return toscaTemplate;
}
private double[] convert2ArrayofDoubles(Double numOfCores, Double memSize, Double diskSize) {
double[] vector = new double[]{numOfCores, memSize, diskSize};
return vector;
}
protected KeyPair getKeyPair() throws ApiException, TypeExeption, JSchException {
KeyPair keyPair = null;
List<NodeTemplateMap> vmTopologyTemplatesMap = getHelper().getVMTopologyTemplates();
for (NodeTemplateMap nodeTemplateMap : vmTopologyTemplatesMap) {
List<NodeTemplateMap> vmTemplatesMap = getHelper().getTemplateVMsForVMTopology(nodeTemplateMap);
for (NodeTemplateMap vmMap : vmTemplatesMap) {
keyPair = getHelper().getKeyPairsFromVM(vmMap.getNodeTemplate());
break;
}
}
return keyPair;
}
protected NodeTemplateMap addCloudStromArtifacts(NodeTemplateMap vmTopologyMap, String tempInputDirPath) throws IOException {
Map<String, Object> artifacts = vmTopologyMap.getNodeTemplate().getArtifacts();
if (artifacts == null) {
artifacts = new HashMap<>();
}
Map<String, String> provisionedFiles = new HashMap<>();
provisionedFiles.put("type", ENCODED_FILE_DATATYPE);
File tempInputDirFile = new File(tempInputDirPath);
String zipPath = (tempInputDirFile.getAbsolutePath() + "-cloudStromFiles.zip");
String sourceFolderPath = tempInputDirPath;
Converter.zipFolder(sourceFolderPath, zipPath);
String cloudStormZipFileContentsAsBase64 = Converter.encodeFileToBase64Binary(zipPath);
provisionedFiles.put("file_contents", cloudStormZipFileContentsAsBase64);
provisionedFiles.put("encoding", "base64");
provisionedFiles.put("file_ext", "zip");
artifacts.put("provisioned_files", provisionedFiles);
vmTopologyMap.getNodeTemplate().setArtifacts(artifacts);
return vmTopologyMap;
}
private void setSSHKeysToVMAttributes(int i, List<NodeTemplateMap> vmTopologiesMaps, CloudsStormSubTopology subTopology, String tempInputDirPath) throws IOException, ApiException {
NodeTemplateMap vmTopologyMap = vmTopologiesMaps.get(i); NodeTemplateMap vmTopologyMap = vmTopologiesMaps.get(i);
vmTopologyMap = addCloudStromArtifacts(vmTopologyMap, tempInputDirPath); vmTopologyMap = addCloudStromArtifacts(vmTopologyMap, tempInputDirPath);
getHelper().setNodeCurrentState(vmTopologyMap, cloudStormStatus2NodeState(subTopology.getStatus())); getHelper().setNodeCurrentState(vmTopologyMap, cloudStormStatus2NodeState(subTopology.getStatus()));
Credential rootKeyPairCredential = null;
if (subTopology.getSshKeyPairId() != null) {
String rootKeyPairFolder = tempInputDirPath + TOPOLOGY_RELATIVE_PATH String rootKeyPairFolder = tempInputDirPath + TOPOLOGY_RELATIVE_PATH
+ File.separator + subTopology.getSshKeyPairId(); + File.separator + subTopology.getSshKeyPairId();
Credential rootKeyPairCredential = new Credential(); rootKeyPairCredential = new Credential();
rootKeyPairCredential.setProtocol("ssh"); rootKeyPairCredential.setProtocol("ssh");
Map<String, String> rootKeys = new HashMap<>(); Map<String, String> rootKeys = new HashMap<>();
rootKeys.put("private_key", Converter.encodeFileToBase64Binary(rootKeyPairFolder + File.separator + "id_rsa")); rootKeys.put("private_key", Converter.encodeFileToBase64Binary(rootKeyPairFolder + File.separator + "id_rsa"));
DirectoryScanner scanner = new DirectoryScanner(); DirectoryScanner scanner = new DirectoryScanner();
scanner.setIncludes(new String[]{"**/*.pub"}); scanner.setIncludes(new String[]{"**/*.pub"});
scanner.setBasedir(rootKeyPairFolder + File.separator); scanner.setBasedir(rootKeyPairFolder + File.separator);
...@@ -386,6 +444,7 @@ class CloudStormService { ...@@ -386,6 +444,7 @@ class CloudStormService {
rootKeys.put("public_key", Converter.encodeFileToBase64Binary(rootKeyPairFolder + File.separator + File.separator + fileNames[0])); rootKeys.put("public_key", Converter.encodeFileToBase64Binary(rootKeyPairFolder + File.separator + File.separator + fileNames[0]));
rootKeyPairCredential.setKeys(rootKeys); rootKeyPairCredential.setKeys(rootKeys);
}
String userKyePairFolder = tempInputDirPath + TOPOLOGY_RELATIVE_PATH; String userKyePairFolder = tempInputDirPath + TOPOLOGY_RELATIVE_PATH;
Credential userKeyPairCredential = new Credential(); Credential userKeyPairCredential = new Credential();
...@@ -415,8 +474,9 @@ class CloudStormService { ...@@ -415,8 +474,9 @@ class CloudStormService {
} }
vmAttributes.put("node_type", vm.getNodeType()); vmAttributes.put("node_type", vm.getNodeType());
vmAttributes.put("host_name", vm.getName()); vmAttributes.put("host_name", vm.getName());
if (rootKeyPairCredential != null) {
vmAttributes.put("root_key_pair", rootKeyPairCredential); vmAttributes.put("root_key_pair", rootKeyPairCredential);
}
vmAttributes.put("user_key_pair", userKeyPairCredential); vmAttributes.put("user_key_pair", userKeyPairCredential);
vmTemplateMap.getNodeTemplate().setAttributes(vmAttributes); vmTemplateMap.getNodeTemplate().setAttributes(vmAttributes);
toscaTemplate = getHelper().setNodeInToscaTemplate(toscaTemplate, vmTemplateMap); toscaTemplate = getHelper().setNodeInToscaTemplate(toscaTemplate, vmTemplateMap);
...@@ -425,45 +485,5 @@ class CloudStormService { ...@@ -425,45 +485,5 @@ class CloudStormService {
toscaTemplate = getHelper().setNodeInToscaTemplate(toscaTemplate, vmTopologyMap); toscaTemplate = getHelper().setNodeInToscaTemplate(toscaTemplate, vmTopologyMap);
i++; i++;
} }
return toscaTemplate;
}
private double[] convert2ArrayofDoubles(Double numOfCores, Double memSize, Double diskSize) {
double[] vector = new double[]{numOfCores, memSize, diskSize};
return vector;
}
protected KeyPair getKeyPair() throws ApiException, TypeExeption, JSchException {
KeyPair keyPair = null;
List<NodeTemplateMap> vmTopologyTemplatesMap = getHelper().getVMTopologyTemplates();
for (NodeTemplateMap nodeTemplateMap : vmTopologyTemplatesMap) {
List<NodeTemplateMap> vmTemplatesMap = getHelper().getTemplateVMsForVMTopology(nodeTemplateMap);
for (NodeTemplateMap vmMap : vmTemplatesMap) {
keyPair = getHelper().getKeyPairsFromVM(vmMap.getNodeTemplate());
break;
}
}
return keyPair;
}
protected NodeTemplateMap addCloudStromArtifacts(NodeTemplateMap vmTopologyMap, String tempInputDirPath) throws IOException {
Map<String, Object> artifacts = vmTopologyMap.getNodeTemplate().getArtifacts();
if (artifacts == null) {
artifacts = new HashMap<>();
}
Map<String, String> provisionedFiles = new HashMap<>();
provisionedFiles.put("type", ENCODED_FILE_DATATYPE);
String zipPath = (tempInputDirPath + File.separator + TOPOLOGY_FOLDER_NAME + ".zip");
String sourceFolderPath = tempInputDirPath + TOPOLOGY_RELATIVE_PATH;
Converter.zipFolder(sourceFolderPath, zipPath);
String cloudStormZipFileContentsAsBase64 = Converter.encodeFileToBase64Binary(zipPath);
provisionedFiles.put("file_contents", cloudStormZipFileContentsAsBase64);
provisionedFiles.put("encoding", "base64");
provisionedFiles.put("file_ext", "zip");
artifacts.put("provisioned_files", provisionedFiles);
vmTopologyMap.getNodeTemplate().setArtifacts(artifacts);
return vmTopologyMap;
}
} }
...@@ -77,7 +77,8 @@ public class Consumer extends DefaultConsumer { ...@@ -77,7 +77,8 @@ public class Consumer extends DefaultConsumer {
} }
CloudStormService service = new CloudStormService(this.properties, message.getToscaTemplate()); CloudStormService service = new CloudStormService(this.properties, message.getToscaTemplate());
ToscaTemplate toscaTemplate = service.execute(); boolean dryRun = false;
ToscaTemplate toscaTemplate = service.execute(dryRun);
responceMessage = new Message(); responceMessage = new Message();
responceMessage.setCreationDate(System.currentTimeMillis()); responceMessage.setCreationDate(System.currentTimeMillis());
......
...@@ -129,17 +129,23 @@ public class CloudStormServiceTest { ...@@ -129,17 +129,23 @@ public class CloudStormServiceTest {
/** /**
* Test of execute method, of class CloudStormService. * Test of execute method, of class CloudStormService.
*
* @throws java.lang.Exception
*/ */
// @Test @Test
// public void testExecute() throws Exception { public void testExecute() throws Exception {
// if (ToscaHelper.isServiceUp(sureToscaBasePath)) { if (ToscaHelper.isServiceUp(sureToscaBasePath)) {
// System.out.println("execute"); System.out.println("execute");
// CloudStormService instance = getService(messageExampleProvisioneRequestFilePath); CloudStormService instance = getService(messageExampleProvisioneRequestFilePath);
// instance.execute(); boolean dryRun = true;
// } instance.execute(dryRun);
// } }
}
/** /**
* Test of writeCloudStormTopologyFiles method, of class CloudStormService. * Test of writeCloudStormTopologyFiles method, of class CloudStormService.
*
* @throws java.lang.Exception
*/ */
@Test @Test
public void testWriteCloudStormTopologyFiles() throws Exception { public void testWriteCloudStormTopologyFiles() throws Exception {
...@@ -151,6 +157,8 @@ public class CloudStormServiceTest { ...@@ -151,6 +157,8 @@ public class CloudStormServiceTest {
/** /**
* Test of buildSSHKeyPair method, of class CloudStormService. * Test of buildSSHKeyPair method, of class CloudStormService.
*
* @throws java.lang.Exception
*/ */
@Test @Test
public void testBuildSSHKeyPair() throws Exception { public void testBuildSSHKeyPair() throws Exception {
...@@ -194,6 +202,8 @@ public class CloudStormServiceTest { ...@@ -194,6 +202,8 @@ public class CloudStormServiceTest {
/** /**
* Test of getKeyPair method, of class CloudStormService. * Test of getKeyPair method, of class CloudStormService.
*
* @throws java.lang.Exception
*/ */
@Test @Test
public void testGetKeyPair() throws Exception { public void testGetKeyPair() throws Exception {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment