Sync Integ to Master
Change-Id: I71e3acc26fa612127756ac04073a522b9cc6cd74
Issue-ID: SDC-977
Signed-off-by: Gitelman, Tal (tg851x) <tg851x@intl.att.com>
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java
new file mode 100644
index 0000000..053596d
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLITool.java
@@ -0,0 +1,56 @@
+package org.openecomp.sdc.asdctool.cli;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.DefaultParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * abstract base class to extend when implementing a cli tool
+ */
+public abstract class CLITool {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(CLITool.class);
+
+ public CLIToolData init(String[] args) {
+ CommandLine commandLine = initCmdLineOptions(args);
+ return new CLIToolData(commandLine);
+ }
+
+ private CommandLine initCmdLineOptions(String[] args) {
+ Options options = buildCmdLineOptions();
+ CommandLineParser parser = new DefaultParser();
+ try {
+ return parser.parse( options, args );
+ }
+ catch( ParseException exp ) {
+ LOGGER.error("Parsing failed. Reason: " + exp.getMessage() );
+ usageAndExit(options);
+ return null;
+ }
+ }
+
+ private void usageAndExit(Options options) {
+ HelpFormatter formatter = new HelpFormatter();
+ formatter.printHelp(commandName(), options );
+ System.exit(1);
+ }
+
+ /**
+ *
+ * @return all command line options required by this command line tool
+ */
+ protected abstract Options buildCmdLineOptions();
+
+ /**
+ *
+ * @return the command name
+ */
+ protected abstract String commandName();
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLIToolData.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLIToolData.java
new file mode 100644
index 0000000..2811905
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLIToolData.java
@@ -0,0 +1,31 @@
+package org.openecomp.sdc.asdctool.cli;
+
+import org.apache.commons.cli.CommandLine;
+import org.springframework.context.support.AbstractApplicationContext;
+
+public class CLIToolData {
+
+ private CommandLine commandLine;
+ private AbstractApplicationContext springApplicationContext;
+
+ public CLIToolData(CommandLine commandLine) {
+ this.commandLine = commandLine;
+ }
+
+ public CLIToolData(CommandLine commandLine, AbstractApplicationContext springApplicationContext) {
+ this.commandLine = commandLine;
+ this.springApplicationContext = springApplicationContext;
+ }
+
+ public CommandLine getCommandLine() {
+ return commandLine;
+ }
+
+ public AbstractApplicationContext getSpringApplicationContext() {
+ return springApplicationContext;
+ }
+
+ public void setSpringApplicationContext(AbstractApplicationContext springApplicationContext) {
+ this.springApplicationContext = springApplicationContext;
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLIUtils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLIUtils.java
new file mode 100644
index 0000000..b5f7ae0
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/CLIUtils.java
@@ -0,0 +1,21 @@
+package org.openecomp.sdc.asdctool.cli;
+
+import org.apache.commons.cli.Option;
+
+public class CLIUtils {
+
+ static final String CONFIG_PATH_SHORT_OPT = "c";
+ private static final String CONFIG_PATH_LONG_OPT = "configFolderPath";
+
+ private CLIUtils(){}
+
+ public static Option getConfigurationPathOption() {
+ return Option.builder(CONFIG_PATH_SHORT_OPT)
+ .longOpt(CONFIG_PATH_LONG_OPT)
+ .required()
+ .hasArg()
+ .desc("path to sdc configuration folder - required")
+ .build();
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/SpringCLITool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/SpringCLITool.java
new file mode 100644
index 0000000..a672c2a
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/cli/SpringCLITool.java
@@ -0,0 +1,32 @@
+package org.openecomp.sdc.asdctool.cli;
+
+import org.apache.commons.cli.Options;
+import org.openecomp.sdc.asdctool.configuration.ConfigurationUploader;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+/**
+ * abstract class to extend when implementing a spring and sdc configuration based command line tool
+ */
+public abstract class SpringCLITool extends CLITool {
+
+ @Override
+ public CLIToolData init(String[] args) {
+ CLIToolData cliToolData = super.init(args);
+ String appConfigDir = cliToolData.getCommandLine().getOptionValue(CLIUtils.CONFIG_PATH_SHORT_OPT);
+ ConfigurationUploader.uploadConfigurationFiles(appConfigDir);
+ AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(getSpringConfigurationClass());
+ cliToolData.setSpringApplicationContext(context);
+ return cliToolData;
+ }
+
+ @Override
+ protected Options buildCmdLineOptions() {
+ return new Options().addOption(CLIUtils.getConfigurationPathOption());
+ }
+
+ /**
+ *
+ * @return the {@code Class} which holds all the spring bean declaration needed by this cli tool
+ */
+ protected abstract Class<?> getSpringConfigurationClass();
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java
index f141966..a7620b1 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ArtifactUUIDFixConfiguration.java
@@ -1,54 +1,11 @@
package org.openecomp.sdc.asdctool.configuration;
+
import org.openecomp.sdc.asdctool.impl.ArtifactUuidFix;
-import org.openecomp.sdc.be.auditing.impl.AuditingManager;
-import org.openecomp.sdc.be.components.ArtifactsResolver;
-import org.openecomp.sdc.be.components.distribution.engine.DistributionEngine;
import org.openecomp.sdc.be.components.distribution.engine.ServiceDistributionArtifactsBuilder;
-import org.openecomp.sdc.be.components.impl.AdditionalInformationBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ArtifactResolverImpl;
-import org.openecomp.sdc.be.components.impl.ArtifactsBusinessLogic;
-import org.openecomp.sdc.be.components.impl.CompositionBusinessLogic;
-import org.openecomp.sdc.be.components.impl.GroupBusinessLogic;
-import org.openecomp.sdc.be.components.impl.InputsBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ProductBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ResourceBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ResourceImportManager;
-import org.openecomp.sdc.be.components.impl.ServiceBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ServiceComponentInstanceBusinessLogic;
-import org.openecomp.sdc.be.components.impl.VFComponentInstanceBusinessLogic;
-import org.openecomp.sdc.be.components.impl.generic.GenericTypeBusinessLogic;
-import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic;
-import org.openecomp.sdc.be.components.merge.heat.HeatEnvArtifactsMergeBusinessLogic;
-import org.openecomp.sdc.be.components.merge.input.InputsValuesMergingBusinessLogic;
-import org.openecomp.sdc.be.components.merge.instance.ComponentInstanceMergeDataBusinessLogic;
-import org.openecomp.sdc.be.components.merge.property.DataDefinitionsValuesMergingBusinessLogic;
-import org.openecomp.sdc.be.dao.DAOTitanStrategy;
-import org.openecomp.sdc.be.dao.TitanClientStrategy;
-import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
+import org.openecomp.sdc.be.config.CatalogModelSpringConfig;
import org.openecomp.sdc.be.dao.config.DAOSpringConfig;
-import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
-import org.openecomp.sdc.be.dao.impl.AuditingDao;
-import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
-import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
-import org.openecomp.sdc.be.dao.titan.TitanGraphClient;
-import org.openecomp.sdc.be.impl.ComponentsUtils;
-import org.openecomp.sdc.be.model.cache.ComponentCache;
-import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
-import org.openecomp.sdc.be.model.operations.impl.CapabilityTypeOperation;
-import org.openecomp.sdc.be.model.operations.impl.ComponentInstanceOperation;
-import org.openecomp.sdc.be.model.operations.impl.CsarOperation;
-import org.openecomp.sdc.be.model.operations.impl.ElementOperation;
-import org.openecomp.sdc.be.model.operations.impl.GraphLockOperation;
-import org.openecomp.sdc.be.model.operations.impl.GroupInstanceOperation;
-import org.openecomp.sdc.be.model.operations.impl.GroupOperation;
-import org.openecomp.sdc.be.model.operations.impl.GroupTypeOperation;
-import org.openecomp.sdc.be.model.operations.impl.PropertyOperation;
-import org.openecomp.sdc.be.tosca.CsarUtils;
-import org.openecomp.sdc.be.tosca.ToscaExportHandler;
-import org.openecomp.sdc.be.user.UserBusinessLogic;
-import org.springframework.beans.factory.annotation.Qualifier;
+import org.openecomp.sdc.config.CatalogBESpringConfig;
import org.springframework.beans.factory.config.PropertiesFactoryBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
@@ -57,17 +14,10 @@
import org.springframework.core.io.FileSystemResource;
@Configuration
-@Import(DAOSpringConfig.class)
-@ComponentScan({
- "org.openecomp.sdc.be.model.operations.impl",
- "org.openecomp.sdc.be.model.cache",
- "org.openecomp.sdc.be.dao.titan",
- "org.openecomp.sdc.be.dao.cassandra",
- "org.openecomp.sdc.be.model.jsontitan.operations",
- "org.openecomp.sdc.be.dao.jsongraph",
- "org.openecomp.sdc.be.tosca",
- "org.openecomp.sdc.be.components.merge",
- })
+@Import({DAOSpringConfig.class, CatalogBESpringConfig.class, CatalogModelSpringConfig.class})
+@ComponentScan({"org.openecomp.sdc.asdctool.migration.tasks",//migration tasks
+ "org.openecomp.sdc.asdctool.migration.config.mocks"
+ })
public class ArtifactUUIDFixConfiguration {
@Bean
@@ -75,246 +25,11 @@
return new ArtifactUuidFix();
}
- @Bean(name = "cassandra-client")
- public CassandraClient cassandraClient() {
- return new CassandraClient();
- }
-
- @Bean(name = "dao-titan-strategy")
- public TitanClientStrategy daoStrategy() {
- return new DAOTitanStrategy();
- }
-
- @Bean(name = "titan-dao")
- public TitanDao titanDao(@Qualifier("titan-client") TitanGraphClient titanGraphClient) {
- return new TitanDao(titanGraphClient);
- }
-
- @Bean(name = "titan-client", initMethod = "createGraph")
- public TitanGraphClient titanClient(@Qualifier("dao-titan-strategy") TitanClientStrategy titanClientStrategy) {
- return new TitanGraphClient(titanClientStrategy);
- }
-
- @Bean(name = "resource-business-logic")
- public ResourceBusinessLogic resourceBusinessLogic() {
- return new ResourceBusinessLogic();
- }
-
-// @Bean(name = "healthCheckBusinessLogic")
-// public HealthCheckBusinessLogic healthCheckBusinessLogic() {
-// return new HealthCheckBusinessLogic();
-// }
-//
-// @Bean(name = "distribution-engine-cluster-health")
-// public DistributionEngineClusterHealth distributionEngineClusterHealth() {
-// return new DistributionEngineClusterHealth();
-// }
-//
-// @Bean(name = "cassandra-health-check")
-// public CassandraHealthCheck cassandraHealthCheck() {
-// return new CassandraHealthCheck();
-// }
-
-// @Bean(name = "switchover-detector")
-// public SwitchoverDetector switchoverDetector() {
-// return new SwitchoverDetector();
-// }
-
- @Bean(name = "service-business-logic")
- public ServiceBusinessLogic serviceBusinessLogic() {
- return new ServiceBusinessLogic();
- }
-
- @Bean(name = "capability-type-operation")
- public CapabilityTypeOperation CapabilityTypeOperation() {
- return new CapabilityTypeOperation();
- }
-
- @Bean(name = "lifecycle-business-logic")
- public LifecycleBusinessLogic lifecycleBusinessLogic() {
- return new LifecycleBusinessLogic();
- }
-
- @Bean(name = "property-operation")
- public PropertyOperation propertyOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao) {
- return new PropertyOperation(titanGenericDao);
- }
-
- @Bean(name = "csar-operation")
- public CsarOperation csarOperation() {
- return new CsarOperation();
- }
-
- @Bean(name = "vf-component-instance-business-logic")
- public VFComponentInstanceBusinessLogic vFComponentInstanceBusinessLogic() {
- return new VFComponentInstanceBusinessLogic();
- }
-
- @Bean(name = "resource-import-manager")
- public ResourceImportManager resourceImportManager() {
- return new ResourceImportManager();
- }
-
- @Bean(name = "group-business-logic")
- public GroupBusinessLogic groupBusinessLogic() {
- return new GroupBusinessLogic();
- }
-
- @Bean(name = "inputs-business-logic")
- public InputsBusinessLogic inputsBusinessLogic() {
- return new InputsBusinessLogic();
- }
-
- @Bean(name = "composition-business-logic")
- public CompositionBusinessLogic compositionBusinessLogic() {
- return new CompositionBusinessLogic();
- }
-
- @Bean(name = "artifacts-business-logic")
- public ArtifactsBusinessLogic artifactsBusinessLogic() {
- return new ArtifactsBusinessLogic();
- }
-
- @Bean(name = "component-cache")
- public ComponentCache componentCache() {
- return new ComponentCache();
- }
-
- @Bean(name = "componentUtils")
- public ComponentsUtils componentsUtils() {
- return new ComponentsUtils();
- }
-
- @Bean(name = "user-business-logic")
- public UserBusinessLogic userBusinessLogic() {
- return new UserBusinessLogic();
- }
-
- @Bean(name = "graph-lock-operation")
- public GraphLockOperation graphLockOperation() {
- return new GraphLockOperation();
- }
-
- @Bean(name = "titan-generic-dao")
- public TitanGenericDao titanGenericDao(@Qualifier("titan-client") TitanGraphClient titanGraphClient) {
- return new TitanGenericDao(titanGraphClient);
- }
-
- @Bean(name = "element-operation")
- public ElementOperation elementOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao) {
- return new ElementOperation(titanGenericDao);
- }
-
- @Bean(name = "group-operation")
- public GroupOperation groupOperation() {
- return new GroupOperation();
- }
-
- @Bean(name = "group-instance-operation")
- public GroupInstanceOperation groupInstanceOperation() {
- return new GroupInstanceOperation();
- }
-
- @Bean(name = "group-type-operation")
- public GroupTypeOperation groupTypeOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao, @Qualifier("property-operation") PropertyOperation propertyOperation) {
- return new GroupTypeOperation(titanGenericDao, propertyOperation);
- }
-
- @Bean(name = "tosca-operation-facade")
- public ToscaOperationFacade toscaOperationFacade() {
- return new ToscaOperationFacade();
- }
-
- @Bean(name = "distribution-engine")
- public DistributionEngine distributionEngine() {
- return null;
- }
-
- @Bean(name = "audit-cassandra-dao")
- public AuditCassandraDao auditCassandraDao() {
- return new AuditCassandraDao();
- }
-
- @Bean(name = "service-component-instance-business-logic")
- public ServiceComponentInstanceBusinessLogic serviceComponentInstanceBusinessLogic() {
- return new ServiceComponentInstanceBusinessLogic();
- }
-
- @Bean("tosca-export-handler")
- public ToscaExportHandler toscaExportHandler() {
- return new ToscaExportHandler();
- }
-
- @Bean(name = "component-instance-operation")
- public ComponentInstanceOperation componentInstanceOperation() {
- return new ComponentInstanceOperation();
- }
-
- @Bean(name = "additional-information-business-logic")
- public AdditionalInformationBusinessLogic additionalInformationBusinessLogic() {
- return new AdditionalInformationBusinessLogic();
- }
-
- @Bean(name = "auditing-manager")
- public AuditingManager auditingManager() {
- return new AuditingManager();
- }
-
- @Bean(name = "auditing-dao")
- public AuditingDao auditingDao() {
- return new AuditingDao();
- }
-
- @Bean(name = "elasticsearch-client", initMethod = "initialize")
- public ElasticSearchClient elasticSearchClient() {
- return new ElasticSearchClient();
- }
-
- @Bean(name = "csar-utils")
- public CsarUtils csarUtils() {
- return new CsarUtils();
- }
-
- @Bean(name = "service-distribution-artifacts-builder")
+ @Bean(name = "serviceDistributionArtifactsBuilder")
public ServiceDistributionArtifactsBuilder serviceDistributionArtifactsBuilder() {
return new ServiceDistributionArtifactsBuilder();
}
-
- @Bean(name = "product-business-logic")
- public ProductBusinessLogic productBusinessLogic() {
- return null;
- }
-
- @Bean(name = "dataDefinitionsValuesMergingBusinessLogic")
- public DataDefinitionsValuesMergingBusinessLogic dataDefinitionsValuesMergingBusinessLogic() {
- return new DataDefinitionsValuesMergingBusinessLogic();
- }
-
- @Bean(name = "artifacts-resolver")
- public ArtifactsResolver artifactsResolver() {
- return new ArtifactResolverImpl();
- }
-
- @Bean(name = "InputsValuesMergingBusinessLogic")
- public InputsValuesMergingBusinessLogic InputsValuesMergingBusinessLogic(){
- return new InputsValuesMergingBusinessLogic();
- }
-
- @Bean(name = "GenericTypeBusinessLogic")
- public GenericTypeBusinessLogic genericTypeBusinessLogic(){
- return new GenericTypeBusinessLogic();
- }
-
- @Bean(name ="componentInstanceMergeDataBusinessLogic")
- public ComponentInstanceMergeDataBusinessLogic componentInstanceMergeDataBusinessLogic(){
- return new ComponentInstanceMergeDataBusinessLogic();
- }
-
- @Bean(name ="heatEnvArtifactsMergeBusinessLogic")
- public HeatEnvArtifactsMergeBusinessLogic heatEnvArtifactsMergeBusinessLogic(){
- return new HeatEnvArtifactsMergeBusinessLogic();
- }
@Bean(name = "elasticsearchConfig")
public PropertiesFactoryBean mapper() {
@@ -324,4 +39,5 @@
return bean;
}
+
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java
index 267f209..ac75dc8 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/ConfigurationUploader.java
@@ -1,12 +1,12 @@
package org.openecomp.sdc.asdctool.configuration;
-import java.io.File;
-
import org.openecomp.sdc.be.config.ConfigurationManager;
import org.openecomp.sdc.common.api.ConfigurationSource;
import org.openecomp.sdc.common.impl.ExternalConfiguration;
import org.openecomp.sdc.common.impl.FSConfigurationSource;
+import java.io.File;
+
public class ConfigurationUploader {
public static void uploadConfigurationFiles(String appConfigDir) {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/GetConsumersConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/GetConsumersConfiguration.java
new file mode 100644
index 0000000..e411b53
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/GetConsumersConfiguration.java
@@ -0,0 +1,20 @@
+package org.openecomp.sdc.asdctool.configuration;
+
+import org.openecomp.sdc.be.dao.config.TitanSpringConfig;
+import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
+import org.openecomp.sdc.be.model.operations.impl.ConsumerOperation;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.Import;
+
+@Configuration
+@Import({TitanSpringConfig.class})
+public class GetConsumersConfiguration {
+
+
+ @Bean("consumer-operation")
+ public ConsumerOperation consumerOperation(TitanGenericDao titanGenericDao) {
+ return new ConsumerOperation(titanGenericDao);
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java
new file mode 100644
index 0000000..302d20f
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ESCatalogDAOMock.java
@@ -0,0 +1,41 @@
+package org.openecomp.sdc.asdctool.configuration.mocks.es;
+
+import fj.data.Either;
+import org.openecomp.sdc.be.dao.api.ICatalogDAO;
+import org.openecomp.sdc.be.dao.api.ResourceUploadStatus;
+import org.openecomp.sdc.be.resources.data.ESArtifactData;
+
+import java.util.List;
+
+public class ESCatalogDAOMock implements ICatalogDAO {
+
+ @Override
+ public void addToIndicesMap(String typeName, String indexName) {
+
+ }
+
+ @Override
+ public void writeArtifact(ESArtifactData artifactData) {
+
+ }
+
+ @Override
+ public Either<ESArtifactData, ResourceUploadStatus> getArtifact(String id) {
+ return null;
+ }
+
+ @Override
+ public Either<List<ESArtifactData>, ResourceUploadStatus> getArtifacts(String[] ids) {
+ return null;
+ }
+
+ @Override
+ public void deleteArtifact(String id) {
+
+ }
+
+ @Override
+ public void deleteAllArtifacts() {
+
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMock.java
new file mode 100644
index 0000000..0038a95
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchClientMock.java
@@ -0,0 +1,26 @@
+package org.openecomp.sdc.asdctool.configuration.mocks.es;
+
+import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
+
+import java.net.URISyntaxException;
+
+public class ElasticSearchClientMock extends ElasticSearchClient {
+
+ @Override
+ public void initialize() {
+
+ }
+
+ @Override
+ public void setClusterName(final String clusterName) {
+
+ }
+
+ @Override
+ public void setLocal(final String strIsLocal) {
+ }
+
+ @Override
+ public void setTransportClient(final String strIsTransportclient) {
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchMocksConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchMocksConfiguration.java
new file mode 100644
index 0000000..ad78a8c
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/ElasticSearchMocksConfiguration.java
@@ -0,0 +1,27 @@
+package org.openecomp.sdc.asdctool.configuration.mocks.es;
+
+import org.openecomp.sdc.be.dao.api.ICatalogDAO;
+import org.openecomp.sdc.be.dao.api.IEsHealthCheckDao;
+import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class ElasticSearchMocksConfiguration {
+
+ @Bean("elasticsearch-client")
+ public ElasticSearchClient elasticSearchClientMock() {
+ return new ElasticSearchClientMock();
+ }
+
+ @Bean("resource-dao")
+ public ICatalogDAO esCatalogDAOMock() {
+ return new ESCatalogDAOMock();
+ }
+
+ @Bean("esHealthCheckDao")
+ public IEsHealthCheckDao esHealthCheckDaoMock() {
+ return new EsHealthCheckDaoMock();
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/EsHealthCheckDaoMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/EsHealthCheckDaoMock.java
new file mode 100644
index 0000000..2aabc86
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/configuration/mocks/es/EsHealthCheckDaoMock.java
@@ -0,0 +1,11 @@
+package org.openecomp.sdc.asdctool.configuration.mocks.es;
+
+import org.openecomp.sdc.be.dao.api.IEsHealthCheckDao;
+import org.openecomp.sdc.common.api.HealthCheckInfo;
+
+public class EsHealthCheckDaoMock implements IEsHealthCheckDao {
+ @Override
+ public HealthCheckInfo.HealthCheckStatus getClusterHealthStatus() {
+ return HealthCheckInfo.HealthCheckStatus.UP;
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
index b8d9ae6..5d4610c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ArtifactUuidFix.java
@@ -1,14 +1,13 @@
package org.openecomp.sdc.asdctool.impl;
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import fj.data.Either;
import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@@ -16,8 +15,12 @@
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
+
+import org.openecomp.sdc.asdctool.impl.validator.utils.VfModuleArtifactPayloadEx;
import org.openecomp.sdc.be.components.distribution.engine.VfModuleArtifactPayload;
+import org.openecomp.sdc.be.dao.api.ActionStatus;
import org.openecomp.sdc.be.dao.cassandra.ArtifactCassandraDao;
+import org.openecomp.sdc.be.dao.cassandra.CassandraOperationStatus;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
@@ -30,1081 +33,1413 @@
import org.openecomp.sdc.be.datatypes.elements.MapGroupsDataDefinition;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.OriginTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
import org.openecomp.sdc.be.datatypes.tosca.ToscaDataDefinition;
-import org.openecomp.sdc.be.model.ArtifactDefinition;
-import org.openecomp.sdc.be.model.Component;
-import org.openecomp.sdc.be.model.ComponentInstance;
-import org.openecomp.sdc.be.model.ComponentParametersView;
-import org.openecomp.sdc.be.model.DistributionStatusEnum;
-import org.openecomp.sdc.be.model.GroupDefinition;
-import org.openecomp.sdc.be.model.GroupInstance;
-import org.openecomp.sdc.be.model.LifecycleStateEnum;
-import org.openecomp.sdc.be.model.Resource;
-import org.openecomp.sdc.be.model.Service;
+import org.openecomp.sdc.be.impl.ComponentsUtils;
+import org.openecomp.sdc.be.model.*;
import org.openecomp.sdc.be.model.jsontitan.datamodel.TopologyTemplate;
import org.openecomp.sdc.be.model.jsontitan.datamodel.ToscaElement;
import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
import org.openecomp.sdc.be.model.jsontitan.utils.ModelConverter;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.DaoStatusConverter;
+import org.openecomp.sdc.be.model.operations.impl.UniqueIdBuilder;
import org.openecomp.sdc.be.resources.data.ESArtifactData;
+import org.openecomp.sdc.be.resources.data.auditing.AuditingActionEnum;
import org.openecomp.sdc.be.tosca.CsarUtils;
import org.openecomp.sdc.be.tosca.ToscaError;
import org.openecomp.sdc.be.tosca.ToscaExportHandler;
import org.openecomp.sdc.be.tosca.ToscaRepresentation;
+import org.openecomp.sdc.common.api.ArtifactGroupTypeEnum;
import org.openecomp.sdc.common.api.ArtifactTypeEnum;
import org.openecomp.sdc.common.api.Constants;
+
import org.openecomp.sdc.common.util.GeneralUtility;
import org.openecomp.sdc.exception.ResponseFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.util.StringUtils;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.JsonArray;
+import com.google.gson.JsonObject;
+
+import fj.data.Either;
@org.springframework.stereotype.Component("artifactUuidFix")
public class ArtifactUuidFix {
- @Autowired
- private TitanDao titanDao;
+ @Autowired
+ private TitanDao titanDao;
- @Autowired
- private ToscaOperationFacade toscaOperationFacade;
- @Autowired
- private ToscaExportHandler toscaExportUtils;
- @Autowired
- private ArtifactCassandraDao artifactCassandraDao;
+ @Autowired
+ private ToscaOperationFacade toscaOperationFacade;
+ @Autowired
+ private ToscaExportHandler toscaExportUtils;
+ @Autowired
+ private ArtifactCassandraDao artifactCassandraDao;
- @Autowired
- private CsarUtils csarUtils;
- private static Logger log = LoggerFactory.getLogger(ArtifactUuidFix.class.getName());
+ @Autowired
+ private CsarUtils csarUtils;
- public boolean doFix(String fixComponent, String runMode) {
- List<Resource> vfLst = new ArrayList<>();
- List<Service> serviceList = new ArrayList<>();
- Map<String, List<Component>> nodeToFixTosca = new HashMap<>();
- Map<String, List<Component>> vfToFixTosca = new HashMap<>();
- Map<String, List<Component>> serviceToFixTosca = new HashMap<>();
+ private static Logger log = LoggerFactory.getLogger(ArtifactUuidFix.class.getName());
- long time = System.currentTimeMillis();
+ public boolean doFix(String fixComponent, String runMode) {
+ List<Resource> vfLst = new ArrayList<>();
+ List<Service> serviceList = new ArrayList<>();
+ Map<String, List<Component>> nodeToFixTosca = new HashMap<>();
+ Map<String, List<Component>> vfToFixTosca = new HashMap<>();
+ Map<String, List<Component>> serviceToFixTosca = new HashMap<>();
- doFixTosca(nodeToFixTosca, vfToFixTosca, serviceToFixTosca);
+ long time = System.currentTimeMillis();
- if (fixComponent.equals("vf_only")) {
- if (!fetchFaultVf(vfLst, time)) {
- return false;
- }
- } else {
- if (!fetchServices(fixComponent, serviceList, time)) {
- return false;
- }
- }
- if (runMode.equals("service_vf") || runMode.equals("fix")) {
- log.info("Mode {}. Find problem VFs", runMode);
- if (!fetchVf(serviceList, vfLst, time)) {
- log.info("Mode {}. Find problem VFs finished with failure", runMode);
- return false;
- }
- log.info("Mode {}. Find problem VFs finished with success", runMode);
- }
- if (runMode.equals("fix") || runMode.equals("fix_only_services")) {
- log.info("Mode {}. Start fix", runMode);
- if (!fix(vfLst, serviceList, nodeToFixTosca, vfToFixTosca, serviceToFixTosca)) {
- log.info("Mode {}. Fix finished with failure", runMode);
- return false;
- }
- log.info("Mode {}. Fix finished with success", runMode);
- }
- return true;
- }
+ doFixTosca(nodeToFixTosca, vfToFixTosca, serviceToFixTosca);
- private boolean fetchFaultVf(List<Resource> vfLst, long time) {
- log.info("Find fault VF ");
- String fileName = "fault_" + time + ".csv";
+ if (fixComponent.equals("vf_only")) {
+ if (fetchFaultVf(fixComponent, vfLst, time) == false) {
+ return false;
+ }
+ } else {
+ if (fetchServices(fixComponent, serviceList, time) == false) {
+ return false;
+ }
+ }
+ if (runMode.equals("service_vf") || runMode.equals("fix")) {
+ log.info("Mode {}. Find problem VFs", runMode);
+ if (fetchVf(serviceList, vfLst, time) == false) {
+ log.info("Mode {}. Find problem VFs finished with failure", runMode);
+ return false;
+ }
+ log.info("Mode {}. Find problem VFs finished with success", runMode);
+ }
+ if (runMode.equals("fix") || runMode.equals("fix_only_services")) {
+ log.info("Mode {}. Start fix", runMode);
+ if (fix(vfLst, serviceList, nodeToFixTosca, vfToFixTosca, serviceToFixTosca) == false) {
+ log.info("Mode {}. Fix finished with failure", runMode);
+ return false;
+ }
+ log.info("Mode {}. Fix finished with success", runMode);
+ }
- try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8"))) {
- writer.write("vf name, vf id, state, version\n");
+ return true;
+ }
- Map<GraphPropertyEnum, Object> hasProps = new HashMap<>();
- hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
- hasProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.VF.name());
+ private boolean fetchFaultVf(String fixComponent, List<Resource> vfLst, long time) {
+ log.info("Find fault VF ");
+ Writer writer = null;
+ try {
+ String fileName = "fault_" + time + ".csv";
+ writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8"));
+ writer.write("vf name, vf id, state, version\n");
- Map<GraphPropertyEnum, Object> hasNotProps = new HashMap<>();
- hasNotProps.put(GraphPropertyEnum.IS_DELETED, true);
- log.info("Try to fetch resources with properties {} and not {}", hasProps, hasNotProps);
+ Map<GraphPropertyEnum, Object> hasProps = new HashMap<>();
+ hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
+ hasProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.VF.name());
- Either<List<GraphVertex>, TitanOperationStatus> servicesByCriteria = titanDao
- .getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps, hasNotProps, JsonParseFlagEnum.ParseAll);
- if (servicesByCriteria.isRight()) {
- log.info("Failed to fetch resources {}", servicesByCriteria.right().value());
- return false;
- }
- List<GraphVertex> resources = servicesByCriteria.left().value();
- for (GraphVertex gv : resources) {
- ComponentParametersView filter = new ComponentParametersView(true);
- filter.setIgnoreComponentInstances(false);
- filter.setIgnoreArtifacts(false);
- filter.setIgnoreGroups(false);
+ Map<GraphPropertyEnum, Object> hasNotProps = new HashMap<>();
+ hasNotProps.put(GraphPropertyEnum.IS_DELETED, true);
+ log.info("Try to fetch resources with properties {} and not {}", hasProps, hasNotProps);
- Either<Resource, StorageOperationStatus> toscaElement = toscaOperationFacade
- .getToscaElement(gv.getUniqueId());
- if (toscaElement.isRight()) {
- log.info("Failed to fetch resources {} {}", gv.getUniqueId(), toscaElement.right().value());
- return false;
- }
+ Either<List<GraphVertex>, TitanOperationStatus> servicesByCriteria = titanDao
+ .getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps, hasNotProps, JsonParseFlagEnum.ParseAll);
+ if (servicesByCriteria.isRight()) {
+ log.info("Failed to fetch resources {}", servicesByCriteria.right().value());
+ return false;
+ }
+ List<GraphVertex> resources = servicesByCriteria.left().value();
+ for (GraphVertex gv : resources) {
+ ComponentParametersView filter = new ComponentParametersView(true);
+ filter.setIgnoreComponentInstances(false);
+ filter.setIgnoreArtifacts(false);
+ filter.setIgnoreGroups(false);
- Resource resource = toscaElement.left().value();
- String resourceName = resource.getName();
- Map<String, ArtifactDefinition> deploymentArtifacts = resource.getDeploymentArtifacts();
- List<GroupDefinition> groups = resource.getGroups();
- if (groups == null || groups.isEmpty()) {
- log.info("No groups for resource {} id {} ", resourceName, gv.getUniqueId());
- continue;
- }
- boolean isProblematic = false;
- for (GroupDefinition gr : groups) {
- if ((gr.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE))
- && (isProblematicGroup(gr, resourceName, deploymentArtifacts))) {
- isProblematic = true;
- break;
- }
- }
- if (isProblematic) {
- vfLst.add(resource);
- writeModuleResultToFile(writer, resource, null);
- writer.flush();
- }
- titanDao.commit();
- }
+ Either<Resource, StorageOperationStatus> toscaElement = toscaOperationFacade
+ .getToscaElement(gv.getUniqueId());
+ if (toscaElement.isRight()) {
+ log.info("Failed to fetch resources {} {}", gv.getUniqueId(), toscaElement.right().value());
+ continue;
+ }
- } catch (Exception e) {
- log.info("Failed to fetch vf resources ", e);
- return false;
- } finally {
- titanDao.commit();
- }
- return true;
- }
+ Resource resource = toscaElement.left().value();
+ String resourceName = resource.getName();
+ Map<String, ArtifactDefinition> deploymentArtifacts = resource.getDeploymentArtifacts();
+ List<GroupDefinition> groups = resource.getGroups();
+ if (groups == null || groups.isEmpty()) {
+ log.info("No groups for resource {} id {} ", resourceName, gv.getUniqueId());
+ continue;
+ }
+ boolean isProblematic = false;
+ for (GroupDefinition gr : groups) {
+ if (gr.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)) {
+ if (isProblematicGroup(gr, resourceName, deploymentArtifacts)) {
+ isProblematic = true;
+ break;
+ }
+ }
+ }
+ if (isProblematic) {
+ vfLst.add(resource);
+ writeModuleResultToFile(writer, resource, null);
+ writer.flush();
+ }
+ titanDao.commit();
+ }
- private boolean fetchVf(List<Service> serviceList, List<Resource> vfLst, long time) {
- log.info("Find problem VF ");
- if (serviceList.isEmpty()) {
- log.info("No services as input");
- return true;
- }
- String fileName = "problemVf_" + time + ".csv";
+ } catch (Exception e) {
+ log.info("Failed to fetch vf resources ", e);
+ return false;
+ } finally {
+ titanDao.commit();
+ try {
+ writer.flush();
+ writer.close();
+ } catch (Exception ex) {
+ /* ignore */
+ }
+ }
+ return true;
+ }
- try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8"))) {
- writer.write("vf name, vf id, state, version, example service name\n");
- Set<String> vfIds = new HashSet<>();
- for (Service service : serviceList) {
- List<ComponentInstance> componentInstances = service.getComponentInstances().stream()
- .filter(ci -> ci.getOriginType().equals(OriginTypeEnum.VF)).collect(Collectors.toList());
- for (ComponentInstance ci : componentInstances) {
- if (!vfIds.contains(ci.getComponentUid())) {
- vfIds.add(ci.getComponentUid());
- Either<Resource, StorageOperationStatus> toscaElement = toscaOperationFacade
- .getToscaElement(ci.getComponentUid());
- if (toscaElement.isRight()) {
- log.info("Failed to fetch resource {} {}", ci.getComponentUid(),
- toscaElement.right().value());
- return false;
- }
- Resource resource = toscaElement.left().value();
- if (resource.getResourceType().equals(ResourceTypeEnum.VF)) {
- vfLst.add(resource);
- writeModuleResultToFile(writer, resource, service);
- writer.flush();
- titanDao.commit();
- }
- }
- }
- }
- log.info("output file with list of Vf : {}", fileName);
- } catch (Exception e) {
- log.info("Failed to fetch services ", e);
- return false;
- } finally {
- titanDao.commit();
- }
- return true;
- }
+ private boolean fetchVf(List<Service> serviceList, List<Resource> vfLst, long time) {
+ log.info("Find problem VF ");
+ if (serviceList.isEmpty()) {
+ log.info("No services as input");
+ return true;
+ }
+ Writer writer = null;
+ try {
+ String fileName = "problemVf_" + time + ".csv";
+ writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8"));
+ writer.write("vf name, vf id, state, version, example service name\n");
+ Set<String> vfIds = new HashSet<>();
+ for (Service service : serviceList) {
+ List<ComponentInstance> componentInstances = service.getComponentInstances().stream()
+ .filter(ci -> ci.getOriginType().equals(OriginTypeEnum.VF)).collect(Collectors.toList());
+ for (ComponentInstance ci : componentInstances) {
+ if (!vfIds.contains(ci.getComponentUid())) {
+ vfIds.add(ci.getComponentUid());
+ ComponentParametersView filter = new ComponentParametersView(true);
+ filter.setIgnoreComponentInstances(false);
+ filter.setIgnoreArtifacts(false);
+ filter.setIgnoreGroups(false);
+ Either<Resource, StorageOperationStatus> toscaElement = toscaOperationFacade
+ .getToscaElement(ci.getComponentUid(), filter);
+ if (toscaElement.isRight()) {
+ log.info("Failed to fetch resource {} {}", ci.getComponentUid(),
+ toscaElement.right().value());
+ continue;
+ }
+ Resource resource = toscaElement.left().value();
+ if (resource.getResourceType().equals(ResourceTypeEnum.VF)) {
+ vfLst.add(resource);
+ writeModuleResultToFile(writer, resource, service);
+ writer.flush();
+
+ }
+ titanDao.commit();
+ }
+ }
+ }
+ log.info("output file with list of Vf : {}", fileName);
+ } catch (Exception e) {
+ log.info("Failed to fetch services ", e);
+ return false;
+ } finally {
+ titanDao.commit();
+ try {
+ writer.flush();
+ writer.close();
+ } catch (Exception ex) {
+ /* ignore */
+ }
+ }
+ return true;
+ }
- private boolean fetchServices(String fixServices, List<Service> serviceList, long time) {
- log.info("Find problem Services {}", fixServices);
- String fileName = "problemService_" + time + ".csv";
+ private boolean fetchServices(String fixServices, List<Service> serviceList, long time) {
+ log.info("Find problem Services {}", fixServices);
+ Writer writer = null;
- try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8"))) {
- writer.write("service name, service id, state, version\n");
+ try {
+ String fileName = "problemService_" + time + ".csv";
+ writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8"));
+ writer.write("service name, service id, state, version\n");
- Map<GraphPropertyEnum, Object> hasProps = new HashMap<>();
- hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
- if (fixServices.equals("distributed_only")) {
- hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
- hasProps.put(GraphPropertyEnum.DISTRIBUTION_STATUS, DistributionStatusEnum.DISTRIBUTED.name());
- }
+ Map<GraphPropertyEnum, Object> hasProps = new HashMap<>();
+ hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
+ if (fixServices.equals("distributed_only")) {
+ hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
+ hasProps.put(GraphPropertyEnum.DISTRIBUTION_STATUS, DistributionStatusEnum.DISTRIBUTED.name());
+ }
- Map<GraphPropertyEnum, Object> hasNotProps = new HashMap<>();
- hasNotProps.put(GraphPropertyEnum.IS_DELETED, true);
- log.info("Try to fetch services with properties {} and not {}", hasProps, hasNotProps);
+ Map<GraphPropertyEnum, Object> hasNotProps = new HashMap<>();
+ hasNotProps.put(GraphPropertyEnum.IS_DELETED, true);
+ log.info("Try to fetch services with properties {} and not {}", hasProps, hasNotProps);
- Either<List<GraphVertex>, TitanOperationStatus> servicesByCriteria = titanDao
- .getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps, hasNotProps, JsonParseFlagEnum.ParseAll);
- if (servicesByCriteria.isRight()) {
- log.info("Failed to fetch services {}", servicesByCriteria.right().value());
- return false;
- }
+ Either<List<GraphVertex>, TitanOperationStatus> servicesByCriteria = titanDao
+ .getByCriteria(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps, hasNotProps, JsonParseFlagEnum.ParseAll);
+ if (servicesByCriteria.isRight()) {
+ log.info("Failed to fetch services {}", servicesByCriteria.right().value());
+ return false;
+ }
+ List<GraphVertex> services = servicesByCriteria.left().value();
+ for (GraphVertex gv : services) {
+ ComponentParametersView filter = new ComponentParametersView(true);
+ filter.setIgnoreComponentInstances(false);
+ filter.setIgnoreArtifacts(false);
+ filter.setIgnoreGroups(false);
- List<GraphVertex> services = servicesByCriteria.left().value();
- for (GraphVertex gv : services) {
- ComponentParametersView filter = new ComponentParametersView(true);
- filter.setIgnoreComponentInstances(false);
- filter.setIgnoreArtifacts(false);
- filter.setIgnoreGroups(false);
+ Either<Service, StorageOperationStatus> toscaElement = toscaOperationFacade
+ .getToscaElement(gv.getUniqueId(), filter);
+ if (toscaElement.isRight()) {
+ log.info("Failed to fetch service {} {}", gv.getUniqueId(), toscaElement.right().value());
+ continue;
+ }
+ Service service = toscaElement.left().value();
+
+ String serviceName = (String) gv.getMetadataProperty(GraphPropertyEnum.NAME);
+
+ boolean isProblematic = isProblematicService(service, serviceName);
+ if (isProblematic) {
+ serviceList.add(service);
+ writeModuleResultToFile(writer, service, null);
+ writer.flush();
+
+ }
+
+ titanDao.commit();
+ }
+ log.info("output file with list of services : {}", fileName);
+ } catch (Exception e) {
+ log.info("Failed to fetch services ", e);
+ return false;
+ } finally {
+ titanDao.commit();
+ try {
+ writer.flush();
+ writer.close();
+ } catch (Exception ex) {
+ /* ignore */
+ }
+ }
+ return true;
+ }
- Either<Service, StorageOperationStatus> toscaElement = toscaOperationFacade
- .getToscaElement(gv.getUniqueId());
- if (toscaElement.isRight()) {
- log.info("Failed to fetch service {} {}", gv.getUniqueId(), toscaElement.right().value());
- continue;
- }
+ private boolean isProblematicService( Service service, String serviceName) throws IOException {
+
+ List<ComponentInstance> componentInstances = service.getComponentInstances();
+
+ if (componentInstances == null) {
+ log.info("No instances for service {} ", service.getUniqueId());
+ return false;
+ }
+ boolean isCheckVFModules = true;
+ if(service.getLifecycleState() == LifecycleStateEnum.NOT_CERTIFIED_CHECKIN ||
+ service.getLifecycleState() == LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT){
+ isCheckVFModules = false;
+ }
+ for (ComponentInstance ci : componentInstances) {
+ Map<String, ArtifactDefinition> deploymentArtifacts = ci.getDeploymentArtifacts();
+ List<GroupInstance> groupInstances = ci.getGroupInstances();
+ if (groupInstances == null || groupInstances.isEmpty()) {
+ log.info("No instance groups for instance {} in service {} id {} ", ci.getName(), serviceName,
+ service.getUniqueId());
+ continue;
+ }
+ List<VfModuleArtifactPayloadEx> vfModules = null;
+ if(isCheckVFModules){
+ Optional<ArtifactDefinition> optionalVfModuleArtifact = deploymentArtifacts.values().stream()
+ .filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.VF_MODULES_METADATA.name())).findAny();
+
+ if(!optionalVfModuleArtifact.isPresent())
+ return true;
+
+ ArtifactDefinition vfModuleArtifact = optionalVfModuleArtifact.get();
+ Either<List<VfModuleArtifactPayloadEx>, StorageOperationStatus> vfModulesEither = parseVFModuleJson(vfModuleArtifact);
+ if(vfModulesEither.isRight()){
+ log.error("Failed to parse vfModule for service {} status is {}", service.getUniqueId(), vfModulesEither.right().value());
+ return true;
+ }
+ vfModules = vfModulesEither.left().value();
+ if(vfModules == null || vfModules.isEmpty() ){
+ log.info("vfModules empty for service {}", service.getUniqueId());
+ return true;
+ }
+ }
- Service service = toscaElement.left().value();
- List<ComponentInstance> componentInstances = service.getComponentInstances();
- boolean isProblematic = false;
- if (componentInstances == null) {
- log.info("No instances for service {} ", gv.getUniqueId());
- continue;
- }
- String serviceName = (String) gv.getMetadataProperty(GraphPropertyEnum.NAME);
+ for (GroupInstance gi : groupInstances) {
+ if (gi.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)) {
+ VfModuleArtifactPayloadEx vfModule= null;
+ if(isCheckVFModules && vfModules != null){
+ Optional<VfModuleArtifactPayloadEx> op = vfModules.stream().filter(vf -> vf.getVfModuleModelName().equals(gi.getGroupName())).findAny();
+ if(!op.isPresent()){
+ log.error("Failed to find vfModule for group {}", gi.getGroupName());
+ return true;
+ }
+ vfModule = op.get();
+ }
+ if (isProblematicGroupInstance(gi, ci.getName(), serviceName, deploymentArtifacts, vfModule)) {
+ return true;
+ }
+ }
+ }
+
+ }
+ return false;
+ }
- for (ComponentInstance ci : componentInstances) {
- Map<String, ArtifactDefinition> deploymentArtifacts = ci.getDeploymentArtifacts();
- List<GroupInstance> groupInstances = ci.getGroupInstances();
- if (groupInstances == null || groupInstances.isEmpty()) {
- log.info("No instance groups for instance {} in service {} id {} ", ci.getName(), serviceName,
- gv.getUniqueId());
- continue;
- }
+ private boolean isProblematicGroup(GroupDefinition gr, String resourceName,
+ Map<String, ArtifactDefinition> deploymentArtifacts) {
+ List<String> artifacts = gr.getArtifacts();
+ List<String> artifactsUuid = gr.getArtifactsUuid();
+ Set<String> artifactsSet = new HashSet<>();
- for (GroupInstance gi : groupInstances) {
- if (gi.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)
- && isProblematicGroupInstance(gi, ci.getName(), serviceName, deploymentArtifacts)) {
- isProblematic = true;
- break;
- }
- }
- if (isProblematic) {
- serviceList.add(service);
- writeModuleResultToFile(writer, service, null);
- writer.flush();
- break;
- }
- }
- titanDao.commit();
- }
- log.info("output file with list of services : {}", fileName);
- } catch (Exception e) {
- log.info("Failed to fetch services ", e);
- return false;
- } finally {
- titanDao.commit();
- }
- return true;
- }
+ if ((artifactsUuid == null || artifactsUuid.isEmpty()) && (artifacts == null || artifacts.isEmpty())) {
+ log.info("No groups in resource {} ", resourceName);
+ return true;
+ }
+ artifactsSet.addAll(artifacts);
+ if (artifactsSet.size() < artifacts.size()) {
+ log.info(" artifactsSet.size() < artifacts.size() group {} in resource {} ", gr.getName(), resourceName);
+ return true;
+ }
+
+ if (artifacts.size() < artifactsUuid.size()) {
+ log.info(" artifacts.size() < artifactsUuid.size() group {} in resource {} ", gr.getName(), resourceName);
+ return true;
+ }
+ if (artifacts.size() > 0 && (artifactsUuid == null || artifactsUuid.isEmpty())) {
+ log.info(
+ " artifacts.size() > 0 && (artifactsUuid == null || artifactsUuid.isEmpty() group {} in resource {} ",
+ gr.getName(), resourceName);
+ return true;
+ }
+ if (artifactsUuid.contains(null)) {
+ log.info(" artifactsUuid.contains(null) group {} in resource {} ", gr.getName(), resourceName);
+ return true;
+ }
+
+ for (String artifactId : artifacts) {
+ String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
+ ArtifactDefinition artifactDefinition = deploymentArtifacts.get(artifactlabel);
+ if (artifactDefinition == null) {
+ log.info(" artifactDefinition == null label {} group {} in resource {} ", artifactlabel, gr.getName(),
+ resourceName);
+ return true;
+ }
+ ArtifactTypeEnum artifactType = ArtifactTypeEnum.findType(artifactDefinition.getArtifactType());
+ if (artifactType != ArtifactTypeEnum.HEAT_ENV) {
+ if (!artifactId.equals(artifactDefinition.getUniqueId())) {
+ log.info(
+ " !artifactId.equals(artifactDefinition.getUniqueId() artifact {} artId {} group {} in resource {} ",
+ artifactlabel, artifactId, gr.getName(), resourceName);
+ return true;
+ }
+ if (!artifactsUuid.contains(artifactDefinition.getArtifactUUID())) {
+ log.info(
+ " artifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} group {} in resource {} ",
+ artifactlabel, gr.getName(), resourceName);
+ return true;
+ }
+ }
+ }
+ for (String artifactUUID : artifactsUuid) {
+ String label = findArtifactLabelFromArtifactId(artifactUUID);
+ if (label != null && !label.isEmpty() && !label.equals("")) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ private boolean isProblematicGroupInstance(GroupInstance gi, String instName, String servicename,
+ Map<String, ArtifactDefinition> deploymentArtifacts, VfModuleArtifactPayloadEx vfModule) {
+ List<String> artifacts = gi.getArtifacts();
+ List<String> artifactsUuid = gi.getArtifactsUuid();
+ List<String> instArtifactsUuid = gi.getGroupInstanceArtifactsUuid();
+ List<String> instArtifactsId = gi.getGroupInstanceArtifacts();
+ Set<String> instArtifatIdSet = new HashSet<>();
+ Set<String> artifactsSet = new HashSet<>();
+
+ log.info("check group {} for instance {} ", gi.getGroupName(), instName);
+ if ((artifactsUuid == null || artifactsUuid.isEmpty()) && (artifacts == null || artifacts.isEmpty())) {
+ log.info("No instance groups for instance {} in service {} ", instName, servicename);
+ return true;
+ }
+ artifactsSet.addAll(artifacts);
+ if (artifactsSet.size() < artifacts.size()) {
+ log.info(" artifactsSet.size() < artifacts.size() group {} in resource {} ", instName, servicename);
+ return true;
+ }
+
+ if (instArtifactsId != null && !instArtifactsId.isEmpty()) {
+ instArtifatIdSet.addAll(instArtifactsId);
+ }
+
+ if (artifacts.size() < artifactsUuid.size()) {
+ log.info(" artifacts.size() < artifactsUuid.size() inst {} in service {} ", instName, servicename);
+ return true;
+ }
+ if (!artifacts.isEmpty() && (artifactsUuid == null || artifactsUuid.isEmpty())) {
+ log.info(
+ " artifacts.size() > 0 && (artifactsUuid == null || artifactsUuid.isEmpty() inst {} in service {} ",
+ instName, servicename);
+ return true;
+ }
+ if (artifactsUuid.contains(null)) {
+ log.info(" artifactsUuid.contains(null) inst {} in service {} ", instName, servicename);
+ return true;
+ }
+ if (instArtifactsId != null && instArtifatIdSet.size() < instArtifactsId.size()) {
+ log.info(" instArtifatIdSet.size() < instArtifactsId.size() inst {} in service {} ", instName, servicename);
+ return true;
+ }
+
+ if ((instArtifactsId != null && instArtifactsUuid != null)
+ && instArtifactsId.size() != instArtifactsUuid.size()) {
+ log.info(" instArtifactsId.size() != instArtifactsUuid.size() inst {} in service {} ", instName,
+ servicename);
+ return true;
+ }
+
+ for (String artifactId : artifacts) {
+ String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
+ ArtifactDefinition artifactDefinition = deploymentArtifacts.get(artifactlabel);
+ if (artifactDefinition == null) {
+ log.info(" artifactDefinition == null label {} inst {} in service {} ", artifactlabel, instName,
+ servicename);
+ return true;
+ }
+ ArtifactTypeEnum artifactType = ArtifactTypeEnum.findType(artifactDefinition.getArtifactType());
+ if (artifactType != ArtifactTypeEnum.HEAT_ENV) {
+ if (!artifactId.equals(artifactDefinition.getUniqueId())) {
+ log.info(
+ " !artifactId.equals(artifactDefinition.getUniqueId() artifact {} artId {} inst {} in service {} ",
+ artifactlabel, artifactId, instName, servicename);
+ return true;
+ }
+ if (!artifactsUuid.contains(artifactDefinition.getArtifactUUID())) {
+ log.info(
+ " artifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} inst {} in service {} ",
+ artifactlabel, instName, servicename);
+ return true;
+ }
+ } else {
+ if (instArtifactsUuid == null || instArtifactsUuid.isEmpty()) {
+ log.info(" instArtifactsUuid empty. label {} inst {} in service {} ", artifactlabel, instName,
+ servicename);
+ return true;
+ }
+ if (!instArtifactsUuid.contains(artifactDefinition.getArtifactUUID())) {
+ log.info(
+ " instArtifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} inst {} in service {} ",
+ artifactlabel, instName, servicename);
+ return true;
+ }
+ }
+ }
+ for (String artifactUUID : artifactsUuid) {
+ String label = findArtifactLabelFromArtifactId(artifactUUID);
+ if (label != null && !label.isEmpty() && !label.equals("")) {
+ return true;
+ }
+ }
+ if(vfModule != null ){
+ return isProblematicVFModule(vfModule, artifactsUuid, instArtifactsUuid);
+ }
+
+ return false;
+ }
+
+ private boolean isProblematicVFModule(VfModuleArtifactPayloadEx vfModule, List<String> artifactsUuid,
+ List<String> instArtifactsUuid) {
+ log.info(" isProblematicVFModule {} ", vfModule.getVfModuleModelName());
+ List<String> vfModuleArtifacts = vfModule.getArtifacts();
+ List<String> allArtifacts = new ArrayList<>();
+ allArtifacts.addAll(artifactsUuid);
+ if(instArtifactsUuid != null)
+ allArtifacts.addAll(instArtifactsUuid);
+ if((vfModuleArtifacts == null || vfModuleArtifacts.isEmpty()) && !artifactsUuid.isEmpty()){
+ log.error(" vfModuleArtifacts == null || vfModuleArtifacts.isEmpty()) && !artifactsUuid.isEmpty()");
+ return true;
+ }
+ if(vfModuleArtifacts!= null){
+ if( vfModuleArtifacts.size() != allArtifacts.size()){
+ log.error(" vfModuleArtifacts.size() != allArtifacts.size()");
+ return true;
+ }
+ for(String vfModuleArtifact: vfModuleArtifacts){
+ Optional<String> op = allArtifacts.stream().filter(a -> a.equals(vfModuleArtifact)).findAny();
+ if(!op.isPresent()){
+ log.error("failed to find artifact {} in group artifacts {}", vfModuleArtifact, allArtifacts);
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+
+
+ private boolean fix(List<Resource> vfLst, List<Service> serviceList, Map<String, List<Component>> nodesToFixTosca,
+ Map<String, List<Component>> vfToFixTosca, Map<String, List<Component>> servicesToFixTosca) {
+ boolean res = true;
+ log.info(" Fix started ***** ");
+ if (vfLst != null && !vfLst.isEmpty()) {
+ res = fixVf(vfLst);
+
+ }
+
+ if (res && serviceList != null && !serviceList.isEmpty()) {
+ res = fixServices(serviceList);
+
+ }
+
+ Set<String> fixedIds = new HashSet<>();
+
+ long time = System.currentTimeMillis();
+ String fileName = "FailedGenerateTosca" + "_" + time + ".csv";
+ Writer writer = null;
+ try {
+ writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8"));
+ writer.write("componentType, name, version, UID, UUID, invariantUUID, state\n");
+ List<Component> failedList = new ArrayList<>();
+
+ if (res && nodesToFixTosca != null && !nodesToFixTosca.isEmpty()) {
+
+ generateAndSaveToscaArtifacts(nodesToFixTosca, fixedIds, null, failedList);
+
+ }
+ if (vfToFixTosca != null && !vfToFixTosca.isEmpty()) {
+
+ generateAndSaveToscaArtifacts(vfToFixTosca, fixedIds, vfLst, failedList);
+
+ }
+
+ for (Component component : vfLst) {
+ res = generateToscaPerComponent(fixedIds, component);
+ if (res) {
+ TopologyTemplate topologyTemplate = ModelConverter.convertToToscaElement(component);
+ Map<String, GroupDataDefinition> groups = topologyTemplate.getGroups();
+ res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.GROUPS, EdgeLabelEnum.GROUPS, groups);
+ if (res) {
+ Map<String, ArtifactDataDefinition> arifacts = topologyTemplate.getDeploymentArtifacts();
+ res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.DEPLOYMENT_ARTIFACTS,
+ EdgeLabelEnum.DEPLOYMENT_ARTIFACTS, arifacts);
+ }
+ if (res) {
+ Map<String, ArtifactDataDefinition> arifacts = topologyTemplate.getToscaArtifacts();
+ res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS,
+ EdgeLabelEnum.TOSCA_ARTIFACTS, arifacts);
+ }
+ titanDao.commit();
+ } else {
+ failedList.add(component);
+ }
+ }
+
+ if (servicesToFixTosca != null && !servicesToFixTosca.isEmpty()) {
+ generateAndSaveToscaArtifacts(servicesToFixTosca, fixedIds, serviceList, failedList);
+
+ }
+
+
+ for (Component component : serviceList) {
+ res = generateToscaPerComponent(fixedIds, component);
+ if (res) {
+ TopologyTemplate topologyTemplate = ModelConverter.convertToToscaElement(component);
+ Map<String, MapGroupsDataDefinition> groups = topologyTemplate.getInstGroups();
+ res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.INST_GROUPS, EdgeLabelEnum.INST_GROUPS,
+ groups);
+
+ if (res) {
+ Map<String, MapArtifactDataDefinition> artifacts = topologyTemplate
+ .getInstDeploymentArtifacts();
+ res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.INST_DEPLOYMENT_ARTIFACTS,
+ EdgeLabelEnum.INST_DEPLOYMENT_ARTIFACTS, artifacts);
+ }
+ if (res) {
+ Map<String, ArtifactDataDefinition> arifacts = topologyTemplate.getToscaArtifacts();
+ res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS,
+ EdgeLabelEnum.TOSCA_ARTIFACTS, arifacts);
+ }
+ titanDao.commit();
+ } else {
+ failedList.add(component);
+ }
+
+ }
+ if (!failedList.isEmpty()) {
+ for (Component component : failedList) {
+ StringBuilder sb = new StringBuilder(component.getComponentType().getValue());
+ sb.append(",").append(component.getName()).append(",").append(component.getVersion()).append(",")
+ .append(component.getUniqueId()).append(",").append(component.getUUID()).append(",")
+ .append(component.getInvariantUUID()).append(",").append(component.getLifecycleState());
+
+ sb.append("\n");
+ writer.write(sb.toString());
+ }
+ writer.flush();
+ }
+ } catch (IOException e) {
+ log.error(e.getMessage());
+ } finally {
+ titanDao.commit();
+ try {
+ writer.flush();
+ writer.close();
+ } catch (Exception ex) {
+ /* ignore */}
+ }
+ log.info(" Fix finished with res {} ***** ", res);
+ return res;
+ }
+
+ private boolean generateAndSaveToscaArtifacts(Map<String, List<Component>> nodesToFixTosca, Set<String> fixedIds,
+ List<? extends Component> componentsWithFailedGroups, List<Component> failedList) {
+ boolean res = true;
+ log.debug("Migration1707ArtifactUuidFix generateAndSaveToscaArtifacts started ");
+ for (Map.Entry<String, List<Component>> entry : nodesToFixTosca.entrySet()) {
+
+ List<Component> component = entry.getValue();
+ for (Component c : component) {
+ log.debug("Migration1707ArtifactUuidFix fix tosca on component : id {}, name {} ", c.getUniqueId(),
+ c.getName());
+ if (componentsWithFailedGroups != null) {
+ Optional<Component> op = (Optional<Component>) componentsWithFailedGroups.stream()
+ .filter(cg -> cg.getUniqueId().equals(c.getUniqueId())).findAny();
+ if (!op.isPresent())
+ res = generateToscaPerComponent(fixedIds, c);
+ } else
+ res = generateToscaPerComponent(fixedIds, c);
+ if (res) {
+ ToscaElement topologyTemplate = ModelConverter.convertToToscaElement(c);
+ Map<String, ArtifactDataDefinition> arifacts = topologyTemplate.getToscaArtifacts();
+ res = fixDataOnGraph(c.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS, EdgeLabelEnum.TOSCA_ARTIFACTS,
+ arifacts);
+ titanDao.commit();
+ } else {
+ failedList.add(c);
+ }
+
+ }
+ }
+ log.debug("Migration1707ArtifactUuidFix generateAndSaveToscaArtifacts finished with res {} ", res);
+ return res;
+ }
+
+ private boolean generateToscaPerComponent(Set<String> fixedIds, Component c) {
+ boolean res = true;
+ log.debug("Migration1707ArtifactUuidFix generateToscaPerComponent started component name {} id {}",
+ c.getName(), c.getUniqueId());
+ try {
+ Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade
+ .getToscaFullElement(c.getUniqueId());
+ if (toscaElement.isRight()) {
+ log.info("Failed to fetch resources {} {}", c.getUniqueId(), toscaElement.right().value());
+ return false;
+ }
+ Component toscaElementFull = toscaElement.left().value();
+ toscaElementFull.setGroups(c.getGroups());
+ List<ComponentInstance> ciListFull = toscaElementFull.getComponentInstances();
+ List<ComponentInstance> ciList = c.getComponentInstances();
+ if (ciListFull != null && !ciListFull.isEmpty()) {
+ ciListFull.forEach(ciFull -> {
+ ComponentInstance compInst = ciList.stream()
+ .filter(ci -> ci.getUniqueId().equals(ciFull.getUniqueId())).findAny().get();
+ ciFull.setGroupInstances(compInst.getGroupInstances());
+ });
+ }
+
+ Either<Component, ToscaError> either = generateToscaArtifact(toscaElementFull);
+
+ if (either.isRight()) {
+ log.error("Couldn't generate and save tosca template component unique id {}, name {} error: {}",
+ toscaElementFull.getUniqueId(), toscaElementFull.getName(), either.right().value());
+ res = false;
+
+ }
+
+ if (res) {
+ c.setToscaArtifacts(either.left().value().getToscaArtifacts());
+ fixedIds.add(toscaElementFull.getUniqueId());
+ }
+ } finally {
+ if (res)
+ titanDao.commit();
+ else
+ titanDao.rollback();
+ }
+ log.debug("Migration1707ArtifactUuidFix generateToscaPerComponent finished component name {} id {} res {}",
+ c.getName(), c.getUniqueId(), res);
+ return res;
+ }
+
+ private <T extends ToscaDataDefinition> boolean fixDataOnGraph(String componentId, VertexTypeEnum vertexTypeEnum,
+ EdgeLabelEnum edgeLabelEnum, Map<String, T> groups) {
+ log.debug("amount groups to update: VertexTypeEnum {} EdgeLabelEnum {} data size {}", vertexTypeEnum.getName(),
+ edgeLabelEnum, groups.size());
+ boolean res = true;
+ Either<GraphVertex, TitanOperationStatus> getResponse = titanDao.getVertexById(componentId,
+ JsonParseFlagEnum.NoParse);
+ if (getResponse.isRight()) {
+ log.debug("Couldn't fetch component unique id {}, error: {}", componentId, getResponse.right().value());
+ res = false;
+
+ }
+ if (res) {
+ GraphVertex componentVertex = getResponse.left().value();
+
+ GraphVertex toscaDataVertex = null;
+ Either<GraphVertex, TitanOperationStatus> groupVertexEither = titanDao.getChildVertex(componentVertex,
+ edgeLabelEnum, JsonParseFlagEnum.ParseJson);
+ if (groupVertexEither.isRight() && groupVertexEither.right().value() == TitanOperationStatus.NOT_FOUND) {
+ log.debug("no child {} vertex for component unique id {}, error: {}", edgeLabelEnum, componentId,
+ groupVertexEither.right().value());
+ return true;
+ }
+ if (groupVertexEither.isRight()) {
+ res = false;
+ log.debug("failed to get child {} vertex for component unique id {}, error: {}", edgeLabelEnum,
+ componentId, groupVertexEither.right().value());
+ }
+ if (res) {
+ toscaDataVertex = groupVertexEither.left().value();
+ toscaDataVertex.setJson(groups);
+ Either<GraphVertex, TitanOperationStatus> updatevertexEither = titanDao.updateVertex(toscaDataVertex);
+ if (updatevertexEither.isRight()) {
+ log.debug("failed to update vertex for component unique id {}, error: {}", componentId,
+ updatevertexEither.right().value());
+ titanDao.rollback();
+ return false;
+ }
+ }
+ }
+ log.debug("Fix data on graph finished: VertexTypeEnum {} EdgeLabelEnum {} res {}", vertexTypeEnum.getName(),
+ res);
+ return res;
+ }
+
+ private boolean fixServices(List<Service> serviceList) {
+ for (Service service : serviceList) {
+ log.debug("Migration1707ArtifactUuidFix fix service: id {}, name {} ", service.getUniqueId(),
+ service.getName());
+ List<ComponentInstance> instances = service.getComponentInstances();
+ for (ComponentInstance instance : instances) {
+ fixComponentInstances(service, instance);
+ }
+
+ }
+ return true;
+
+ }
+
+ private void fixComponentInstances(Service service, ComponentInstance instance) {
+ Map<String, ArtifactDefinition> artifactsMap = instance.getDeploymentArtifacts();
+ List<GroupInstance> groupsList = instance.getGroupInstances();
+ if (groupsList != null && artifactsMap != null) {
+ List<GroupInstance> groupsToDelete = new ArrayList<>();
+ for (GroupInstance group : groupsList) {
+ fixGroupInstances(service, artifactsMap, groupsToDelete, group);
+
+ }
+
+ if (!groupsToDelete.isEmpty()) {
+ log.debug("Migration1707ArtifactUuidFix delete group: resource id {}, group instance to delete {} ",
+ service.getUniqueId(), groupsToDelete);
+ groupsList.removeAll(groupsToDelete);
+
+ }
+
+ Optional<ArtifactDefinition> optionalVfModuleArtifact = artifactsMap.values().stream()
+ .filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.VF_MODULES_METADATA.name())).findAny();
+ ArtifactDefinition vfModuleAertifact = null;
+ if(!optionalVfModuleArtifact.isPresent()){
+ vfModuleAertifact = createVfModuleArtifact(instance, service);
+ artifactsMap.put(vfModuleAertifact.getArtifactLabel(), vfModuleAertifact);
+ }
+ else {
+ vfModuleAertifact = optionalVfModuleArtifact.get();
+ }
+ fillVfModuleInstHeatEnvPayload(service, instance, groupsList, vfModuleAertifact);
+ }
+ }
+
+ private void fixGroupInstances(Service service, Map<String, ArtifactDefinition> artifactsMap,
+ List<GroupInstance> groupsToDelete, GroupInstance group) {
+ if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)) {
+ log.debug("Migration1707ArtifactUuidFix fix group: resource id {}, group name {} ", service.getUniqueId(),
+ group.getName());
+ if(group.getArtifacts() != null){
+ Set<String> groupArtifactsSet = new HashSet<>(group.getArtifacts());
+ if(group.getGroupInstanceArtifacts() != null){
+ List<String> groupInsArtifacts = new ArrayList<>(group.getGroupInstanceArtifacts());
+ groupArtifactsSet.addAll(groupInsArtifacts);
+ }
+ List<String> groupArtifacts = new ArrayList<>(groupArtifactsSet);
+
+ clearGroupInstanceArtifacts(group);
+
+ for (String artifactId : groupArtifacts) {
+ fixArtifactUndergroupInstances(artifactsMap, group, groupArtifacts, artifactId);
+ }
+ }
+ if (group.getArtifacts() == null || group.getArtifacts().isEmpty()) {
+ log.debug(
+ "Migration1707ArtifactUuidFix fix groupInstance add to delete list: resource id {} name {} , group name {} ",
+ service.getUniqueId(), service.getName(), group.getName());
+ groupsToDelete.add(group);
+ }
+ }
+ }
+
+ private void clearGroupInstanceArtifacts(GroupInstance group) {
+ if(group.getArtifacts() != null)
+ group.getArtifacts().clear();
+ else
+ group.setArtifacts(new ArrayList<>());
+ if(group.getArtifactsUuid() != null)
+ group.getArtifactsUuid().clear();
+ else{
+ group.setArtifactsUuid(new ArrayList<>());
+ }
+ if(group.getGroupInstanceArtifacts() != null)
+ group.getGroupInstanceArtifacts().clear();
+ else{
+ group.setGroupInstanceArtifacts(new ArrayList<>());
+ }
+ if(group.getGroupInstanceArtifactsUuid() != null )
+ group.getGroupInstanceArtifactsUuid().clear();
+ else
+ group.setGroupInstanceArtifactsUuid(new ArrayList<>());
+ }
+
+ private void fixArtifactUndergroupInstances(Map<String, ArtifactDefinition> artifactsMap, GroupInstance group,
+ List<String> groupArtifacts, String artifactId) {
+ String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
+ log.debug("Migration1707ArtifactUuidFix fix group: group name {} artifactId for fix {} artifactlabel {} ",
+ group.getName(), artifactId, artifactlabel);
+ if (!artifactlabel.isEmpty() && artifactsMap.containsKey(artifactlabel)) {
+ ArtifactDefinition artifact = artifactsMap.get(artifactlabel);
+ ArtifactTypeEnum artifactType = ArtifactTypeEnum.findType(artifact.getArtifactType());
+ String correctArtifactId = artifact.getUniqueId();
+ String correctArtifactUUID = artifact.getArtifactUUID();
+ if (artifactType != ArtifactTypeEnum.HEAT_ENV) {
+ boolean isAddToGroup = true;
+ if (groupArtifacts.size() == 1) {
+ if (artifactType == ArtifactTypeEnum.HEAT_ARTIFACT) {
+ isAddToGroup = false;
+ artifact.setArtifactType(ArtifactTypeEnum.OTHER.getType());
+ }
+ }
+ if (isAddToGroup) {
+ log.debug(
+ "Migration1707ArtifactUuidFix fix group: group name {} correct artifactId {} artifactUUID {} ",
+ group.getName(), correctArtifactId, correctArtifactUUID);
+ group.getArtifacts().add(correctArtifactId);
+ if (correctArtifactUUID != null && !correctArtifactUUID.isEmpty()) {
+ group.getArtifactsUuid().add(correctArtifactUUID);
+ }
+ }
+ } else {
+ log.debug(
+ "Migration1707ArtifactUuidFix fix group: group name {} correct artifactId {} artifactUUID {} ",
+ group.getName(), correctArtifactId, correctArtifactUUID);
+ Set<String> tmpSet = new HashSet<>(group.getGroupInstanceArtifacts());
+ tmpSet.add(correctArtifactId);
+ group.setGroupInstanceArtifacts(new ArrayList<>(tmpSet));
+ if (correctArtifactUUID != null && !correctArtifactUUID.isEmpty()) {
+ Set<String> tmpSetUUID = new HashSet<>(group.getGroupInstanceArtifactsUuid());
+ tmpSetUUID.add(correctArtifactUUID);
+ group.setGroupInstanceArtifactsUuid(new ArrayList<>(tmpSetUUID));
+ }
+ }
+ }
+ }
+
+ private boolean fixVf(List<Resource> vfLst) {
+ for (Resource resource : vfLst) {
+ log.debug("Migration1707ArtifactUuidFix fix resource: id {}, name {} ", resource.getUniqueId(),
+ resource.getName());
+ Map<String, ArtifactDefinition> artifactsMap = resource.getDeploymentArtifacts();
+ List<GroupDefinition> groupsList = resource.getGroups();
+ List<GroupDefinition> groupsToDelete = new ArrayList<>();
+ if (groupsList != null && artifactsMap != null) {
+ for (GroupDefinition group : groupsList) {
+ if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) && group.getArtifacts() != null) {
+ fixVfGroup(resource, artifactsMap, group);
+ }
+ if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)
+ && (group.getArtifacts() == null || group.getArtifacts().isEmpty())) {
+ log.debug(
+ "Migration1707ArtifactUuidFix add group to delete list fix resource: id {}, name {} ",
+ resource.getUniqueId(), resource.getName(), group.getName());
+ groupsToDelete.add(group);
+ }
+ }
+
+ if (!groupsToDelete.isEmpty()) {
+ groupsList.removeAll(groupsToDelete);
+
+ }
+ }
+
+ }
+
+ return true;
+ }
+
+ private void fixVfGroup(Resource resource, Map<String, ArtifactDefinition> artifactsMap, GroupDefinition group) {
+ log.debug("Migration1707ArtifactUuidFix fix group: resource id {}, group name {} ", resource.getUniqueId(),
+ group.getName());
+ Set<String> groupArtifactsSet = new HashSet<>(group.getArtifacts());
+ List<String> groupArtifacts = new ArrayList<>(groupArtifactsSet);
+ group.getArtifacts().clear();
+ group.getArtifactsUuid().clear();
- private boolean isProblematicGroup(GroupDefinition gr, String resourceName,
- Map<String, ArtifactDefinition> deploymentArtifacts) {
- List<String> artifacts = gr.getArtifacts();
- List<String> artifactsUuid = gr.getArtifactsUuid();
+ for (String artifactId : groupArtifacts) {
+ fixArtifactUnderGroup(artifactsMap, group, groupArtifacts, artifactId);
+ }
+ }
- if (artifactsUuid == null || artifactsUuid.isEmpty() || artifacts == null || artifacts.isEmpty()) {
- log.info("No groups in resource {} ", resourceName);
- return true;
- }
- Set<String> artifactsSet = new HashSet<>(artifacts);
- if (artifactsSet.size() < artifacts.size()) {
- log.info(" artifactsSet.size() < artifacts.size() group {} in resource {} ", gr.getName(), resourceName);
- return true;
- }
- if (artifacts.size() < artifactsUuid.size()) {
- log.info(" artifacts.size() < artifactsUuid.size() group {} in resource {} ", gr.getName(), resourceName);
- return true;
- }
- if (artifactsUuid.contains(null)) {
- log.info(" artifactsUuid.contains(null) group {} in resource {} ", gr.getName(), resourceName);
- return true;
- }
+ private void fixArtifactUnderGroup(Map<String, ArtifactDefinition> artifactsMap, GroupDefinition group,
+ List<String> groupArtifacts, String artifactId) {
- for (String artifactId : artifacts) {
- String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
- ArtifactDefinition artifactDefinition = deploymentArtifacts.get(artifactlabel);
- if (artifactDefinition == null) {
- log.info(" artifactDefinition == null label {} group {} in resource {} ", artifactlabel, gr.getName(),
- resourceName);
- return true;
- }
- ArtifactTypeEnum artifactType = ArtifactTypeEnum.findType(artifactDefinition.getArtifactType());
- if (artifactType != ArtifactTypeEnum.HEAT_ENV) {
- if (!artifactId.equals(artifactDefinition.getUniqueId())) {
- log.info(
- " !artifactId.equals(artifactDefinition.getUniqueId() artifact {} artId {} group {} in resource {} ",
- artifactlabel, artifactId, gr.getName(), resourceName);
- return true;
- }
- if (!artifactsUuid.contains(artifactDefinition.getArtifactUUID())) {
- log.info(
- " artifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} group {} in resource {} ",
- artifactlabel, gr.getName(), resourceName);
- return true;
- }
- }
- }
+ String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
+ log.debug("Migration1707ArtifactUuidFix fix group: group name {} artifactId for fix {} artifactlabel {} ",
+ group.getName(), artifactId, artifactlabel);
+ if (!artifactlabel.isEmpty() && artifactsMap.containsKey(artifactlabel)) {
+ ArtifactDefinition artifact = artifactsMap.get(artifactlabel);
+ String correctArtifactId = artifact.getUniqueId();
+ String correctArtifactUUID = artifact.getArtifactUUID();
+ boolean isAddToGroup = true;
+ if (groupArtifacts.size() == 1) {
+ ArtifactTypeEnum artifactType = ArtifactTypeEnum.findType(artifact.getArtifactType());
+ if (artifactType == ArtifactTypeEnum.HEAT_ARTIFACT) {
+ isAddToGroup = false;
+ artifact.setArtifactType(ArtifactTypeEnum.OTHER.getType());
+ }
+ }
+ if (isAddToGroup) {
+ log.debug(
+ "Migration1707ArtifactUuidFix fix group: group name {} correct artifactId {} artifactUUID {} ",
+ group.getName(), correctArtifactId, correctArtifactUUID);
+ group.getArtifacts().add(correctArtifactId);
+ if (correctArtifactUUID != null && !correctArtifactUUID.isEmpty()) {
+ group.getArtifactsUuid().add(correctArtifactUUID);
+ }
+ }
- for (String artifactUuid : artifactsUuid) {
- String label = findArtifactLabelFromArtifactId(artifactUuid);
- if (label != null && !label.isEmpty()) {
- return true;
- }
- }
- return false;
- }
+ }
+ }
- private boolean isProblematicGroupInstance(GroupInstance gi, String instName, String servicename,
- Map<String, ArtifactDefinition> deploymentArtifacts) {
- List<String> artifacts = gi.getArtifacts();
- List<String> artifactsUuid = gi.getArtifactsUuid();
- List<String> instArtifactsUuid = gi.getGroupInstanceArtifactsUuid();
- List<String> instArtifactsId = gi.getGroupInstanceArtifacts();
- Set<String> instArtifatIdSet = new HashSet<>();
+ private String findArtifactLabelFromArtifactId(String artifactId) {
+ String artifactLabel = "";
- if (artifactsUuid == null || artifactsUuid.isEmpty() || artifacts == null || artifacts.isEmpty()) {
- log.info("No instance groups for instance {} in service {} ", instName, servicename);
- return true;
- }
- Set<String> artifactsSet = new HashSet<>(artifacts);
- if (artifactsSet.size() < artifacts.size()) {
- log.info(" artifactsSet.size() < artifacts.size() group {} in resource {} ", instName, servicename);
- return true;
- }
- if (instArtifactsId != null && !instArtifactsId.isEmpty()) {
- instArtifatIdSet.addAll(instArtifactsId);
- }
- if (artifacts.size() < artifactsUuid.size()) {
- log.info(" artifacts.size() < artifactsUuid.size() inst {} in service {} ", instName, servicename);
- return true;
- }
- if (artifactsUuid.contains(null)) {
- log.info(" artifactsUuid.contains(null) inst {} in service {} ", instName, servicename);
- return true;
- }
- if (instArtifactsId != null && instArtifatIdSet.size() < instArtifactsId.size()) {
- log.info(" instArtifatIdSet.size() < instArtifactsId.size() inst {} in service {} ", instName, servicename);
- return true;
- }
- if ((instArtifactsId != null && instArtifactsUuid != null)
- && instArtifactsId.size() != instArtifactsUuid.size()) {
- log.info(" instArtifactsId.size() != instArtifactsUuid.size() inst {} in service {} ", instName,
- servicename);
- return true;
- }
+ int index = artifactId.lastIndexOf('.');
+ if (index > 0 && index + 1 < artifactId.length())
+ artifactLabel = artifactId.substring(index + 1);
+ return artifactLabel;
+ }
- for (String artifactId : artifacts) {
- String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
- ArtifactDefinition artifactDefinition = deploymentArtifacts.get(artifactlabel);
- if (artifactDefinition == null) {
- log.info(" artifactDefinition == null label {} inst {} in service {} ", artifactlabel, instName,
- servicename);
- return true;
- }
- ArtifactTypeEnum artifactType = ArtifactTypeEnum.findType(artifactDefinition.getArtifactType());
- if (artifactType != ArtifactTypeEnum.HEAT_ENV) {
- if (!artifactId.equals(artifactDefinition.getUniqueId())) {
- log.info(
- " !artifactId.equals(artifactDefinition.getUniqueId() artifact {} artId {} inst {} in service {} ",
- artifactlabel, artifactId, instName, servicename);
- return true;
- }
- if (!artifactsUuid.contains(artifactDefinition.getArtifactUUID())) {
- log.info(
- " artifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} inst {} in service {} ",
- artifactlabel, instName, servicename);
- return true;
- }
- } else {
- if (instArtifactsUuid == null || instArtifactsUuid.isEmpty()) {
- log.info(" instArtifactsUuid empty. label {} inst {} in service {} ", artifactlabel, instName,
- servicename);
- return true;
- }
- if (!instArtifactsUuid.contains(artifactDefinition.getArtifactUUID())) {
- log.info(
- " instArtifactsUuid.contains(artifactDefinition.getArtifactUUID() label {} inst {} in service {} ",
- artifactlabel, instName, servicename);
- return true;
- }
- }
- }
- for (String artifactUuid : artifactsUuid) {
- String label = findArtifactLabelFromArtifactId(artifactUuid);
- if (label != null && !label.isEmpty()) {
- return true;
- }
- }
- return false;
- }
+ private void writeModuleResultToFile(Writer writer, org.openecomp.sdc.be.model.Component component,
+ Service service) {
+ try {
+ // "service name, service id, state, version
+ StringBuilder sb = new StringBuilder(component.getName());
+ sb.append(",").append(component.getUniqueId()).append(",").append(component.getLifecycleState()).append(",")
+ .append(component.getVersion());
+ if (service != null) {
+ sb.append(",").append(service.getName());
+ }
+ sb.append("\n");
+ writer.write(sb.toString());
+ } catch (IOException e) {
+ log.error(e.getMessage());
+ }
+ }
- private boolean fix(List<Resource> vfLst, List<Service> serviceList, Map<String, List<Component>> nodesToFixTosca,
- Map<String, List<Component>> vfToFixTosca, Map<String, List<Component>> servicesToFixTosca) {
- boolean res = true;
- log.info(" Fix started ***** ");
- if (vfLst != null && !vfLst.isEmpty()) {
- res = fixVf(vfLst);
- }
- if (res && serviceList != null && !serviceList.isEmpty()) {
- res = fixServices(serviceList);
- }
+ private void writeModuleResultToFile(Writer writer, List<Component> components) {
+ try {
+ // "service name, service id, state, version
+ for (Component component : components) {
+ StringBuilder sb = new StringBuilder(component.getName());
+ sb.append(",").append(component.getUniqueId()).append(",").append(component.getInvariantUUID())
+ .append(",").append(component.getLifecycleState()).append(",").append(component.getVersion());
- Set<String> fixedIds = new HashSet<>();
- if (res && nodesToFixTosca != null && !nodesToFixTosca.isEmpty()) {
- generateAndSaveToscaArtifacts(nodesToFixTosca, fixedIds, null);
+ sb.append("\n");
+ writer.write(sb.toString());
+ }
+ } catch (IOException e) {
- for (Map.Entry<String, List<Component>> entry : nodesToFixTosca.entrySet()) {
- List<Component> components = entry.getValue();
- for (Component c : components) {
+ log.error(e.getMessage());
+ }
+ }
- ToscaElement topologyTemplate = ModelConverter.convertToToscaElement(c);
- Map<String, ArtifactDataDefinition> arifacts = topologyTemplate.getToscaArtifacts();
- res = fixDataOnGraph(c.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS, EdgeLabelEnum.TOSCA_ARTIFACTS,
- arifacts);
- titanDao.commit();
- }
- }
- }
- if (res && vfToFixTosca != null && !vfToFixTosca.isEmpty()) {
- generateAndSaveToscaArtifacts(vfToFixTosca, fixedIds, vfLst);
+ public boolean doFixTosca(Map<String, List<Component>> nodeToFix, Map<String, List<Component>> vfToFix,
+ Map<String, List<Component>> serviceToFix) {
- for (Map.Entry<String, List<Component>> entry : vfToFixTosca.entrySet()) {
- List<Component> components = entry.getValue();
- for (Component c : components) {
- TopologyTemplate topologyTemplate = ModelConverter.convertToToscaElement(c);
- Map<String, ArtifactDataDefinition> arifacts = topologyTemplate.getToscaArtifacts();
- res = fixDataOnGraph(c.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS, EdgeLabelEnum.TOSCA_ARTIFACTS,
- arifacts);
- titanDao.commit();
- }
- }
- }
+ Map<GraphPropertyEnum, Object> hasProps = new HashMap<>();
+ hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
+ hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
- if (res && servicesToFixTosca != null && !servicesToFixTosca.isEmpty()) {
- generateAndSaveToscaArtifacts(servicesToFixTosca, fixedIds, serviceList);
+ Map<String, List<Component>> vertices = getVerticesToValidate(VertexTypeEnum.NODE_TYPE, hasProps);
+ boolean result = validateTosca(vertices, nodeToFix, "RESOURCE_TOSCA_ARTIFACTS");//
- for (Map.Entry<String, List<Component>> entry : servicesToFixTosca.entrySet()) {
- List<Component> components = entry.getValue();
- for (Component c : components) {
- TopologyTemplate topologyTemplate = ModelConverter.convertToToscaElement(c);
- Map<String, ArtifactDataDefinition> arifacts = topologyTemplate.getToscaArtifacts();
- res = fixDataOnGraph(c.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS, EdgeLabelEnum.TOSCA_ARTIFACTS,
- arifacts);
- titanDao.commit();
- }
- }
- }
+ hasProps.clear();
+ hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
+ hasProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.VF);
+ hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
- if (res) {
- for (Component component : vfLst) {
- generateToscaPerComponent(fixedIds, component);
+ vertices = getVerticesToValidate(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps);
+ result = validateTosca(vertices, vfToFix, "VF_TOSCA_ARTIFACTS");
- TopologyTemplate topologyTemplate = ModelConverter.convertToToscaElement(component);
- Map<String, GroupDataDefinition> groups = topologyTemplate.getGroups();
- res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.GROUPS, EdgeLabelEnum.GROUPS, groups);
- if (res) {
- Map<String, ArtifactDataDefinition> arifacts = topologyTemplate.getDeploymentArtifacts();
- res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.DEPLOYMENT_ARTIFACTS,
- EdgeLabelEnum.DEPLOYMENT_ARTIFACTS, arifacts);
- }
- if (res) {
- Map<String, ArtifactDataDefinition> arifacts = topologyTemplate.getToscaArtifacts();
- res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS,
- EdgeLabelEnum.TOSCA_ARTIFACTS, arifacts);
- }
- titanDao.commit();
- }
- }
+ hasProps.clear();
+ hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
+ hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
- if (res) {
- for (Component component : serviceList) {
- generateToscaPerComponent(fixedIds, component);
+ vertices = getVerticesToValidate(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps);
+ result = validateTosca(vertices, serviceToFix, "SERVICE_TOSCA_ARTIFACTS");
- TopologyTemplate topologyTemplate = ModelConverter.convertToToscaElement(component);
- Map<String, MapGroupsDataDefinition> groups = topologyTemplate.getInstGroups();
- res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.INST_GROUPS, EdgeLabelEnum.INST_GROUPS,
- groups);
+ return result;
+ }
- if (res) {
- Map<String, MapArtifactDataDefinition> artifacts = topologyTemplate.getInstDeploymentArtifacts();
- res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.INST_DEPLOYMENT_ARTIFACTS,
- EdgeLabelEnum.INST_DEPLOYMENT_ARTIFACTS, artifacts);
- }
- if (res) {
- Map<String, ArtifactDataDefinition> arifacts = topologyTemplate.getToscaArtifacts();
- res = fixDataOnGraph(component.getUniqueId(), VertexTypeEnum.TOSCA_ARTIFACTS,
- EdgeLabelEnum.TOSCA_ARTIFACTS, arifacts);
- }
- titanDao.commit();
- }
- }
- log.info(" Fix finished with res {} ***** ", res);
- return res;
- }
+ public Map<String, List<Component>> getVerticesToValidate(VertexTypeEnum type,
+ Map<GraphPropertyEnum, Object> hasProps) {
- private boolean generateAndSaveToscaArtifacts(Map<String, List<Component>> nodesToFixTosca, Set<String> fixedIds,
- List<? extends Component> componentsWithFailedGroups) {
- boolean res = true;
- log.debug("Migration1707ArtifactUuidFix generateAndSaveToscaArtifacts started ");
- for (Map.Entry<String, List<Component>> entry : nodesToFixTosca.entrySet()) {
+ Map<String, List<Component>> result = new HashMap<>();
+ try {
- List<Component> component = entry.getValue();
- for (Component c : component) {
- log.debug("Migration1707ArtifactUuidFix fix tosca on component : id {}, name {} ", c.getUniqueId(),
- c.getName());
- if (componentsWithFailedGroups != null) {
- Optional<Component> op = (Optional<Component>) componentsWithFailedGroups.stream()
- .filter(cg -> cg.getUniqueId().equals(c.getUniqueId())).findAny();
- if (!op.isPresent()) {
- res = generateToscaPerComponent(fixedIds, c);
- }
- } else {
- res = generateToscaPerComponent(fixedIds, c);
- }
- }
- }
- log.debug("Migration1707ArtifactUuidFix generateAndSaveToscaArtifacts finished with res {} ", res);
- return res;
- }
+ Either<List<GraphVertex>, TitanOperationStatus> resultsEither = titanDao.getByCriteria(type, hasProps);
+ if (resultsEither.isRight()) {
+ log.error("getVerticesToValidate failed {} ",resultsEither.right().value());
+ return result;
+ }
+ log.info("getVerticesToValidate: {} vertices to scan", resultsEither.left().value().size());
+ List<GraphVertex> componentsList = resultsEither.left().value();
+ componentsList.forEach(vertex -> {
+ String ivariantUuid = (String) vertex.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID);
+ if (!result.containsKey(ivariantUuid)) {
+ List<Component> compList = new ArrayList<Component>();
+ result.put(ivariantUuid, compList);
+ }
+ List<Component> compList = result.get(ivariantUuid);
- private boolean generateToscaPerComponent(Set<String> fixedIds, Component c) {
- boolean res = true;
- log.debug("Migration1707ArtifactUuidFix generateToscaPerComponent started component name {} id {}",
- c.getName(), c.getUniqueId());
- try {
- Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade
- .getToscaFullElement(c.getUniqueId());
- if (toscaElement.isRight()) {
- log.info("Failed to fetch resources {} {}", c.getUniqueId(), toscaElement.right().value());
- return false;
- }
- Component toscaElementFull = toscaElement.left().value();
- toscaElementFull.setGroups(c.getGroups());
- List<ComponentInstance> ciListFull = toscaElementFull.getComponentInstances();
- List<ComponentInstance> ciList = c.getComponentInstances();
- if (ciListFull != null && !ciListFull.isEmpty()) {
- ciListFull.forEach(ciFull -> ciList.stream()
- .filter(ci -> ci.getUniqueId().equals(ciFull.getUniqueId()))
- .findAny()
- .ifPresent(compInst -> ciFull.setGroupInstances(compInst.getGroupInstances())));
- }
+ ComponentParametersView filter = new ComponentParametersView(true);
+ filter.setIgnoreArtifacts(false);
- Map<String, ArtifactDefinition> toscaArtifacts = c.getToscaArtifacts();
- log.debug("Migration1707ArtifactUuidFix generateToscaPerComponent tocsa artifacts size {}",
- toscaArtifacts.size());
+ Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade
+ .getToscaElement(vertex.getUniqueId(), filter);
+ if (toscaElement.isRight()) {
+ log.error("getVerticesToValidate: failed to find element {} staus is {}", vertex.getUniqueId()
+ ,toscaElement.right().value());
+ } else {
+ compList.add(toscaElement.left().value());
+ }
+ titanDao.commit();
- Either<ArtifactDefinition, ToscaError> either = Either.right(ToscaError.GENERAL_ERROR);
- ArtifactDefinition toscaArtifact = null;
- Optional<ArtifactDefinition> op = toscaArtifacts.values().stream()
- .filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.TOSCA_TEMPLATE.getType())).findAny();
+ });
- if (op.isPresent()) {
- toscaArtifact = op.get();
- }
+ } catch (Exception e) {
+ log.info("Failed to fetch vf resources ", e);
- if (toscaArtifact != null) {
- log.debug("Migration1707ArtifactUuidFix generateToscaPerComponent artifact name {} id {} esId {}",
- toscaArtifact.getArtifactName(), toscaArtifact.getUniqueId(), toscaArtifact.getEsId());
- either = generateToscaArtifact(toscaElementFull, toscaArtifact);
- if (either.isRight()) {
- log.error("Couldn't generate and save tosca template component unique id {}, name {} error: {}",
- toscaElementFull.getUniqueId(), toscaElementFull.getName(), either.right().value());
- res = false;
+ } finally {
+ titanDao.commit();
- }
- }
- if (res) {
- ArtifactDefinition csarArtifact = null;
- op = toscaArtifacts.values().stream()
- .filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.TOSCA_CSAR.getType())).findAny();
+ }
+ return result;
+ }
- if (op.isPresent()) {
- csarArtifact = op.get();
- }
+ public boolean validateTosca(Map<String, List<Component>> vertices, Map<String, List<Component>> compToFix,
+ String name) {
+ boolean result = true;
+ long time = System.currentTimeMillis();
+ String fileName = name + "_" + time + ".csv";
+ Writer writer = null;
+ try {
+ writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8"));
+ writer.write("name, UUID, invariantUUID, state, version\n");
+ for (Map.Entry<String, List<Component>> entry : vertices.entrySet()) {
+ List<Component> compList = entry.getValue();
+ Set<String> artifactEsId = new HashSet<>();
+ for (Component component : compList) {
+ Map<String, ArtifactDefinition> toscaArtifacts = component.getToscaArtifacts();
+ Optional<ArtifactDefinition> op = toscaArtifacts.values().stream()
+ .filter(a -> artifactEsId.contains(a.getEsId()) && a.getEsId() != null).findAny();
+ if (op.isPresent()) {
+ result = false;
+ writeModuleResultToFile(writer, compList);
+ writer.flush();
+ break;
+ } else {
+ artifactEsId.addAll(toscaArtifacts.values().stream().map(ArtifactDefinition::getEsId)
+ .collect(Collectors.toList()));
+ }
+ }
+ if (!result) {
+ List<Component> compListfull = new ArrayList<>();
+ for (Component c : compList) {
+ ComponentParametersView filter = new ComponentParametersView(true);
+ filter.setIgnoreComponentInstances(false);
+ filter.setIgnoreArtifacts(false);
+ filter.setIgnoreGroups(false);
- if (csarArtifact != null) {
- log.debug("Migration1707ArtifactUuidFix generateToscaPerComponent artifact name {} id {} esId {}",
- csarArtifact.getArtifactName(), csarArtifact.getUniqueId(), csarArtifact.getEsId());
- either = generateToscaArtifact(toscaElementFull, csarArtifact);
- if (either.isRight()) {
- log.error("Couldn't generate and save tosca csar for component uuid {}, id {}, name {}. error: {}",
- toscaElementFull.getUUID(), toscaElementFull.getUniqueId(), toscaElementFull.getName(), either.right().value());
- res = false;
+ Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade
+ .getToscaElement(c.getUniqueId(), filter);
+ if (toscaElement.isRight()) {
+ System.out.println("getVerticesToValidate: failed to find element" + c.getUniqueId()
+ + " staus is" + toscaElement.right().value());
+ } else {
+ compListfull.add(toscaElement.left().value());
+ }
+ this.titanDao.commit();
+ }
- }
- }
- }
- c.setToscaArtifacts(toscaArtifacts);
+ compToFix.put(entry.getKey(), compListfull);
+ result = true;
+ }
- if (res) {
- fixedIds.add(toscaElementFull.getUniqueId());
- }
- } finally {
- titanDao.commit();
- }
- log.debug("Migration1707ArtifactUuidFix generateToscaPerComponent finished component name {} id {} res {}",
- c.getName(), c.getUniqueId(), res);
- return res;
- }
+ }
- private <T extends ToscaDataDefinition> boolean fixDataOnGraph(String componentId, VertexTypeEnum vertexTypeEnum,
- EdgeLabelEnum edgeLabelEnum, Map<String, T> groups) {
- log.debug("amount groups to update: VertexTypeEnum {} EdgeLabelEnum {} data size {}", vertexTypeEnum.getName(),
- edgeLabelEnum, groups.size());
- boolean res = true;
- Either<GraphVertex, TitanOperationStatus> getResponse = titanDao.getVertexById(componentId,
- JsonParseFlagEnum.NoParse);
- if (getResponse.isRight()) {
- log.debug("Couldn't fetch component unique id {}, error: {}", componentId, getResponse.right().value());
- res = false;
- }
- if (res) {
- GraphVertex componentVertex = getResponse.left().value();
- GraphVertex toscaDataVertex;
- Either<GraphVertex, TitanOperationStatus> groupVertexEither = titanDao.getChildVertex(componentVertex,
- edgeLabelEnum, JsonParseFlagEnum.ParseJson);
- if (groupVertexEither.isRight() && groupVertexEither.right().value() == TitanOperationStatus.NOT_FOUND) {
- log.debug("no child {} vertex for component unique id {}, error: {}", edgeLabelEnum, componentId,
- groupVertexEither.right().value());
- return true;
- }
- if (groupVertexEither.isRight()) {
- res = false;
- log.debug("failed to get child {} vertex for component unique id {}, error: {}", edgeLabelEnum,
- componentId, groupVertexEither.right().value());
- }
- if (res) {
- toscaDataVertex = groupVertexEither.left().value();
- toscaDataVertex.setJson(groups);
- Either<GraphVertex, TitanOperationStatus> updatevertexEither = titanDao.updateVertex(toscaDataVertex);
- if (updatevertexEither.isRight()) {
- log.debug("failed to update vertex for component unique id {}, error: {}", componentId,
- updatevertexEither.right().value());
- titanDao.rollback();
- return false;
- }
- }
- }
- log.debug("Fix data on graph finished: VertexTypeEnum {} EdgeLabelEnum {} res {}", vertexTypeEnum.getName(),
- res);
- return res;
- }
+ } catch (Exception e) {
+ log.info("Failed to fetch vf resources ", e);
+ return false;
+ } finally {
+ titanDao.commit();
+ try {
+ writer.flush();
+ writer.close();
+ } catch (Exception ex) {
+ /* ignore */}
+ }
+ return result;
+ }
- private boolean fixServices(List<Service> serviceList) {
- for (Service service : serviceList) {
- log.debug("Migration1707ArtifactUuidFix fix service: id {}, name {} ", service.getUniqueId(),
- service.getName());
- List<ComponentInstance> instances = service.getComponentInstances();
- for (ComponentInstance instance : instances) {
- fixComponentInstances(service, instance);
- }
- }
- return true;
- }
+ private Either<Component, ToscaError> generateToscaArtifact(Component parent) {
+ log.debug("tosca artifact generation");
+ try {
+ Map<String, ArtifactDefinition> toscaArtifacts = parent.getToscaArtifacts();
- private void fixComponentInstances(Service service, ComponentInstance instance) {
- Map<String, ArtifactDefinition> artifactsMap = instance.getDeploymentArtifacts();
- List<GroupInstance> groupsList = instance.getGroupInstances();
- if (groupsList != null && artifactsMap != null) {
- List<GroupInstance> groupsToDelete = new ArrayList<>();
- for (GroupInstance group : groupsList) {
- fixGroupInstances(service, artifactsMap, groupsToDelete, group);
- }
+ ArtifactDefinition toscaArtifact = null;
+ Optional<ArtifactDefinition> op = toscaArtifacts.values().stream()
+ .filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.TOSCA_TEMPLATE.getType())).findAny();
- if (!groupsToDelete.isEmpty()) {
- log.debug("Migration1707ArtifactUuidFix delete group: resource id {}, group instance to delete {} ",
- service.getUniqueId(), groupsToDelete);
- groupsList.removeAll(groupsToDelete);
- }
+ if (op.isPresent()) {
+ toscaArtifact = op.get();
+ }
+ if (toscaArtifact != null) {
+ log.debug("Migration1707ArtifactUuidFix generateToscaPerComponent artifact name {} id {} esId {}",
+ toscaArtifact.getArtifactName(), toscaArtifact.getUniqueId(), toscaArtifact.getEsId());
- Optional<ArtifactDefinition> optionalVfModuleArtifact = artifactsMap.values().stream()
- .filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.VF_MODULES_METADATA.name())).findAny();
- if (optionalVfModuleArtifact.isPresent()) {
- ArtifactDefinition vfModuleAertifact = optionalVfModuleArtifact.get();
- fillVfModuleInstHeatEnvPayload(groupsList, vfModuleAertifact);
- }
- }
- }
+ Either<ToscaRepresentation, ToscaError> exportComponent = toscaExportUtils.exportComponent(parent);
+ if (exportComponent.isRight()) {
+ log.debug("Failed export tosca yaml for component {} error {}", parent.getUniqueId(),
+ exportComponent.right().value());
- private void fixGroupInstances(Service service, Map<String, ArtifactDefinition> artifactsMap,
- List<GroupInstance> groupsToDelete, GroupInstance group) {
- if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)) {
- log.debug("Migration1707ArtifactUuidFix fix group: resource id {}, group name {} ", service.getUniqueId(),
- group.getName());
- List<String> groupArtifacts = new ArrayList<>(group.getArtifacts());
+ return Either.right(exportComponent.right().value());
+ }
+ log.debug("Tosca yaml exported for component {} ", parent.getUniqueId());
+ String payload = exportComponent.left().value().getMainYaml();
- group.getArtifacts().clear();
- group.getArtifactsUuid().clear();
- group.getGroupInstanceArtifacts().clear();
- group.getGroupInstanceArtifactsUuid().clear();
+ toscaArtifact.setPayloadData(payload);
+ byte[] decodedPayload = toscaArtifact.getPayloadData();
- for (String artifactId : groupArtifacts) {
- fixArtifactUndergroupInstances(artifactsMap, group, groupArtifacts, artifactId);
- }
- if (group.getArtifacts() == null || group.getArtifacts().isEmpty()) {
- log.debug(
- "Migration1707ArtifactUuidFix fix groupInstance add to delete list: resource id {} name {} , group name {} ",
- service.getUniqueId(), service.getName(), group.getName());
- groupsToDelete.add(group);
- }
- }
- }
+ String uniqueId = UniqueIdBuilder.buildPropertyUniqueId(parent.getUniqueId(),
+ toscaArtifact.getArtifactLabel());
+ toscaArtifact.setUniqueId(uniqueId);
+ toscaArtifact.setEsId(toscaArtifact.getUniqueId());
- private void fixArtifactUndergroupInstances(Map<String, ArtifactDefinition> artifactsMap, GroupInstance group,
- List<String> groupArtifacts, String artifactId) {
- String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
- log.debug("Migration1707ArtifactUuidFix fix group: group name {} artifactId for fix {} artifactlabel {} ",
- group.getName(), artifactId, artifactlabel);
- if (!artifactlabel.isEmpty() && artifactsMap.containsKey(artifactlabel)) {
- ArtifactDefinition artifact = artifactsMap.get(artifactlabel);
- ArtifactTypeEnum artifactType = ArtifactTypeEnum.findType(artifact.getArtifactType());
- String correctArtifactId = artifact.getUniqueId();
- String correctArtifactUUID = artifact.getArtifactUUID();
- if (artifactType != ArtifactTypeEnum.HEAT_ENV) {
- boolean isAddToGroup = true;
- if (groupArtifacts.size() == 1 && artifactType == ArtifactTypeEnum.HEAT_ARTIFACT) {
- isAddToGroup = false;
- artifact.setArtifactType(ArtifactTypeEnum.OTHER.getType());
- }
- if (isAddToGroup) {
- log.debug(
- "Migration1707ArtifactUuidFix fix group: group name {} correct artifactId {} artifactUUID {} ",
- group.getName(), correctArtifactId, correctArtifactUUID);
- group.getArtifacts().add(correctArtifactId);
- if (correctArtifactUUID != null && !correctArtifactUUID.isEmpty()) {
- group.getArtifactsUuid().add(correctArtifactUUID);
- }
- }
- } else {
- log.debug(
- "Migration1707ArtifactUuidFix fix group: group name {} correct artifactId {} artifactUUID {} ",
- group.getName(), correctArtifactId, correctArtifactUUID);
- group.getGroupInstanceArtifacts().add(correctArtifactId);
- if (correctArtifactUUID != null && !correctArtifactUUID.isEmpty()) {
- group.getGroupInstanceArtifactsUuid().add(correctArtifactUUID);
- }
- }
- }
- }
+ toscaArtifact.setArtifactChecksum(GeneralUtility.calculateMD5Base64EncodedByByteArray(decodedPayload));
+ ESArtifactData artifactData = new ESArtifactData(toscaArtifact.getEsId(), decodedPayload);
+ artifactCassandraDao.saveArtifact(artifactData);
- private boolean fixVf(List<Resource> vfLst) {
- for (Resource resource : vfLst) {
- log.debug("Migration1707ArtifactUuidFix fix resource: id {}, name {} ", resource.getUniqueId(),
- resource.getName());
- Map<String, ArtifactDefinition> artifactsMap = resource.getDeploymentArtifacts();
- List<GroupDefinition> groupsList = resource.getGroups();
- List<GroupDefinition> groupsToDelete = new ArrayList<>();
- if (groupsList != null && artifactsMap != null) {
- for (GroupDefinition group : groupsList) {
- if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE) && group.getArtifacts() != null) {
- fixVfGroup(resource, artifactsMap, group);
- }
- if (group.getType().equals(Constants.DEFAULT_GROUP_VF_MODULE)
- && (group.getArtifacts() == null || group.getArtifacts().isEmpty())) {
- log.debug(
- "Migration1707ArtifactUuidFix add group to delete list fix resource: id {}, name {} ",
- resource.getUniqueId(), resource.getName(), group.getName());
- groupsToDelete.add(group);
- }
- }
- if (!groupsToDelete.isEmpty()) {
- groupsList.removeAll(groupsToDelete);
- }
- }
- }
- return true;
- }
+ log.debug("Tosca yaml artifact esId {} ", toscaArtifact.getEsId());
+ }
+ ArtifactDefinition csarArtifact = null;
+ op = toscaArtifacts.values().stream()
+ .filter(p -> p.getArtifactType().equals(ArtifactTypeEnum.TOSCA_CSAR.getType())).findAny();
- private void fixVfGroup(Resource resource, Map<String, ArtifactDefinition> artifactsMap, GroupDefinition group) {
- log.debug("Migration1707ArtifactUuidFix fix group: resource id {}, group name {} ", resource.getUniqueId(),
- group.getName());
- List<String> groupArtifacts = new ArrayList<>(group.getArtifacts());
+ if (op.isPresent()) {
+ csarArtifact = op.get();
+ }
- for (String artifactId : groupArtifacts) {
- fixArtifactUnderGroup(artifactsMap, group, groupArtifacts, artifactId);
- }
- }
+ if (csarArtifact != null) {
+ Either<byte[], ResponseFormat> generated = csarUtils.createCsar(parent, true, true);
- private void fixArtifactUnderGroup(Map<String, ArtifactDefinition> artifactsMap, GroupDefinition group,
- List<String> groupArtifacts, String artifactId) {
- group.getArtifacts().clear();
- group.getArtifactsUuid().clear();
- String artifactlabel = findArtifactLabelFromArtifactId(artifactId);
- log.debug("Migration1707ArtifactUuidFix fix group: group name {} artifactId for fix {} artifactlabel {} ",
- group.getName(), artifactId, artifactlabel);
- if (!artifactlabel.isEmpty() && artifactsMap.containsKey(artifactlabel)) {
- ArtifactDefinition artifact = artifactsMap.get(artifactlabel);
- String correctArtifactId = artifact.getUniqueId();
- String correctArtifactUUID = artifact.getArtifactUUID();
- boolean isAddToGroup = true;
- if (groupArtifacts.size() == 1) {
- ArtifactTypeEnum artifactType = ArtifactTypeEnum.findType(artifact.getArtifactType());
- if (artifactType == ArtifactTypeEnum.HEAT_ARTIFACT) {
- isAddToGroup = false;
- artifact.setArtifactType(ArtifactTypeEnum.OTHER.getType());
- }
- }
- if (isAddToGroup) {
- log.debug(
- "Migration1707ArtifactUuidFix fix group: group name {} correct artifactId {} artifactUUID {} ",
- group.getName(), correctArtifactId, correctArtifactUUID);
- group.getArtifacts().add(correctArtifactId);
- if (correctArtifactUUID != null && !correctArtifactUUID.isEmpty()) {
- group.getArtifactsUuid().add(correctArtifactUUID);
- }
- }
- }
- }
+ if (generated.isRight()) {
+ log.debug("Failed to export tosca csar for component {} error {}", parent.getUniqueId(),
+ generated.right().value());
- private String findArtifactLabelFromArtifactId(String artifactId) {
- String artifactLabel = "";
- int index = artifactId.lastIndexOf('.');
- if (index > 0 && index + 1 < artifactId.length()) {
- artifactLabel = artifactId.substring(index + 1);
- }
- return artifactLabel;
- }
+ return Either.right(ToscaError.GENERAL_ERROR);
+ }
+ byte[] value = generated.left().value();
+ csarArtifact.setPayload(value);
+ byte[] decodedPayload = csarArtifact.getPayloadData();
- private void writeModuleResultToFile(Writer writer, org.openecomp.sdc.be.model.Component component,
- Service service) {
- try {
- // "service name, service id, state, version
- StringBuilder sb = new StringBuilder(component.getName());
- sb.append(",").append(component.getUniqueId()).append(",").append(component.getLifecycleState()).append(",")
- .append(component.getVersion());
- if (service != null) {
- sb.append(",").append(service.getName());
- }
- sb.append("\n");
- writer.write(sb.toString());
- } catch (IOException e) {
- log.error(e.getMessage(), e);
- }
- }
+ String uniqueId = UniqueIdBuilder.buildPropertyUniqueId(parent.getUniqueId(),
+ csarArtifact.getArtifactLabel());
+ csarArtifact.setUniqueId(uniqueId);
+ csarArtifact.setEsId(csarArtifact.getUniqueId());
- private void writeModuleResultToFile(Writer writer, List<Component> components) {
- try {
- // "service name, service id, state, version
- for (Component component : components) {
- StringBuilder sb = new StringBuilder(component.getName());
- sb.append(",").append(component.getUniqueId())
- .append(",").append(component.getInvariantUUID())
- .append(",").append(component.getLifecycleState())
- .append(",").append(component.getVersion())
- .append("\n");
- writer.write(sb.toString());
- }
- } catch (IOException e) {
- log.error(e.getMessage(), e);
- }
- }
+ csarArtifact.setArtifactChecksum(GeneralUtility.calculateMD5Base64EncodedByByteArray(decodedPayload));
+ ESArtifactData artifactData = new ESArtifactData(csarArtifact.getEsId(), decodedPayload);
+ artifactCassandraDao.saveArtifact(artifactData);
+ log.debug("Tosca csar artifact esId {} ", csarArtifact.getEsId());
- public boolean doFixTosca(Map<String, List<Component>> nodeToFix, Map<String, List<Component>> vfToFix,
- Map<String, List<Component>> serviceToFix) {
+ }
- Map<GraphPropertyEnum, Object> hasProps = new HashMap<>();
- hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
- hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
+ } catch (Exception ex) {
+ log.error("Failed to generate tosca atifact component id {} component name {} error {}",
+ parent.getUniqueId(), parent.getName(), ex.getMessage());
- Map<String, List<Component>> vertices = getVerticesToValidate(VertexTypeEnum.NODE_TYPE, hasProps);
- boolean result = validateTosca(vertices, nodeToFix, "RESOURCE_TOSCA_ARTIFACTS");
+ return Either.right(ToscaError.GENERAL_ERROR);
+ }
- hasProps.clear();
- hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.RESOURCE.name());
- hasProps.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.VF);
- hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
+ return Either.left(parent);
+ }
+
+ private ArtifactDefinition createVfModuleArtifact(ComponentInstance currVF, Service service) {
- vertices = getVerticesToValidate(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps);
- result = validateTosca(vertices, vfToFix, "VF_TOSCA_ARTIFACTS");
+ ArtifactDefinition vfModuleArtifactDefinition = new ArtifactDefinition();
+ String newCheckSum = null;
- hasProps.clear();
- hasProps.put(GraphPropertyEnum.COMPONENT_TYPE, ComponentTypeEnum.SERVICE.name());
- hasProps.put(GraphPropertyEnum.STATE, LifecycleStateEnum.CERTIFIED.name());
+ vfModuleArtifactDefinition.setDescription("Auto-generated VF Modules information artifact");
+ vfModuleArtifactDefinition.setArtifactDisplayName("Vf Modules Metadata");
+ vfModuleArtifactDefinition.setArtifactType(ArtifactTypeEnum.VF_MODULES_METADATA.getType());
+ vfModuleArtifactDefinition.setArtifactGroupType(ArtifactGroupTypeEnum.DEPLOYMENT);
+ vfModuleArtifactDefinition.setArtifactLabel("vfModulesMetadata");
+ vfModuleArtifactDefinition.setTimeout(0);
+ vfModuleArtifactDefinition.setArtifactName(currVF.getNormalizedName() + "_modules.json");
+
+ return vfModuleArtifactDefinition;
+ }
- vertices = getVerticesToValidate(VertexTypeEnum.TOPOLOGY_TEMPLATE, hasProps);
- result = validateTosca(vertices, serviceToFix, "SERVICE_TOSCA_ARTIFACTS");
- return result;
- }
+ private void fillVfModuleInstHeatEnvPayload(Component parent, ComponentInstance instance, List<GroupInstance> groupsForCurrVF,
+ ArtifactDefinition vfModuleArtifact) {
+ log.debug("generate new vf module for component. name {}, id {}, Version {}", instance.getName(), instance.getUniqueId());
+
+ String uniqueId = UniqueIdBuilder.buildInstanceArtifactUniqueId(parent.getUniqueId(), instance.getUniqueId(), vfModuleArtifact.getArtifactLabel());
+
+ vfModuleArtifact.setUniqueId(uniqueId);
+ vfModuleArtifact.setEsId(vfModuleArtifact.getUniqueId());
- public Map<String, List<Component>> getVerticesToValidate(VertexTypeEnum type,
- Map<GraphPropertyEnum, Object> hasProps) {
+ List<VfModuleArtifactPayload> vfModulePayloadForCurrVF = new ArrayList<>();
+ if (groupsForCurrVF != null) {
+ for (GroupInstance groupInstance : groupsForCurrVF) {
+ VfModuleArtifactPayload modulePayload = new VfModuleArtifactPayload(groupInstance);
+ vfModulePayloadForCurrVF.add(modulePayload);
+ }
+ Collections.sort(vfModulePayloadForCurrVF,
+ (art1, art2) -> VfModuleArtifactPayload.compareByGroupName(art1, art2));
- Map<String, List<Component>> result = new HashMap<>();
- try {
- Either<List<GraphVertex>, TitanOperationStatus> resultsEither = titanDao.getByCriteria(type, hasProps);
- if (resultsEither.isRight()) {
- log.info("getVerticesToValidate failed " + resultsEither.right().value());
- return result;
- }
- log.info("getVerticesToValidate: " + resultsEither.left().value().size() + " vertices to scan");
- List<GraphVertex> componentsList = resultsEither.left().value();
- componentsList.forEach(vertex -> {
- String ivariantUuid = (String) vertex.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID);
- if (!result.containsKey(ivariantUuid)) {
- result.put(ivariantUuid, new ArrayList<>());
- }
- List<Component> compList = result.get(ivariantUuid);
+ final Gson gson = new GsonBuilder().setPrettyPrinting().create();
- ComponentParametersView filter = new ComponentParametersView(true);
- filter.setIgnoreArtifacts(false);
+ String vfModulePayloadString = gson.toJson(vfModulePayloadForCurrVF);
+ log.debug("vfModulePayloadString {}", vfModulePayloadString);
+ if (vfModulePayloadString != null) {
+ String newCheckSum = GeneralUtility
+ .calculateMD5Base64EncodedByByteArray(vfModulePayloadString.getBytes());
+ vfModuleArtifact.setArtifactChecksum(newCheckSum);
- Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade
- .getToscaElement(vertex.getUniqueId(), filter);
- if (toscaElement.isRight()) {
- log.info("getVerticesToValidate: failed to find element" + vertex.getUniqueId()
- + " staus is" + toscaElement.right().value());
- } else {
- compList.add(toscaElement.left().value());
- }
- titanDao.commit();
- });
- } catch (Exception e) {
- log.info("Failed to fetch vf resources ", e);
- } finally {
- titanDao.commit();
- }
- return result;
- }
+ ESArtifactData artifactData = new ESArtifactData(vfModuleArtifact.getEsId(),
+ vfModulePayloadString.getBytes());
+ artifactCassandraDao.saveArtifact(artifactData);
- public boolean validateTosca(Map<String, List<Component>> vertices, Map<String, List<Component>> compToFix,
- String name) {
- boolean result = true;
- long time = System.currentTimeMillis();
- String fileName = name + "_" + time + ".csv";
- try (Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "utf-8"))) {
- writer.write("name, UUID, invariantUUID, state, version\n");
- for (Map.Entry<String, List<Component>> entry : vertices.entrySet()) {
- List<Component> compList = entry.getValue();
- Set<String> artifactEsId = new HashSet<>();
- for (Component component : compList) {
- Map<String, ArtifactDefinition> toscaArtifacts = component.getToscaArtifacts();
- Optional<ArtifactDefinition> op = toscaArtifacts.values().stream()
- .filter(a -> artifactEsId.contains(a.getEsId()) && a.getEsId() != null).findAny();
- if (op.isPresent()) {
- result = false;
- writeModuleResultToFile(writer, compList);
- writer.flush();
- break;
- } else {
- artifactEsId.addAll(toscaArtifacts.values().stream().map(ArtifactDefinition::getEsId)
- .collect(Collectors.toList()));
- }
- }
- if (!result) {
- List<Component> compListfull = new ArrayList<>();
- for (Component c : compList) {
- ComponentParametersView filter = new ComponentParametersView(true);
- filter.setIgnoreComponentInstances(false);
- filter.setIgnoreArtifacts(false);
- filter.setIgnoreGroups(false);
+ }
- Either<Component, StorageOperationStatus> toscaElement = toscaOperationFacade
- .getToscaElement(c.getUniqueId(), filter);
- if (toscaElement.isRight()) {
- log.info("getVerticesToValidate: failed to find element" + c.getUniqueId()
- + " staus is" + toscaElement.right().value());
- } else {
- compListfull.add(toscaElement.left().value());
- }
- this.titanDao.commit();
- }
- compToFix.put(entry.getKey(), compListfull);
- result = true;
- }
- }
- } catch (Exception e) {
- log.info("Failed to fetch vf resources ", e);
- return false;
- } finally {
- titanDao.commit();
- }
- return result;
- }
+ }
- private Either<ArtifactDefinition, ToscaError> generateToscaArtifact(Component parent,
- ArtifactDefinition artifactInfo) {
- log.debug("tosca artifact generation");
- try {
- if (artifactInfo.getArtifactType().equals(ArtifactTypeEnum.TOSCA_CSAR.getType())) {
- Either<byte[], ResponseFormat> generated = csarUtils.createCsar(parent, true, true);
+ }
+
+ private Either<List<VfModuleArtifactPayloadEx>, StorageOperationStatus> parseVFModuleJson(ArtifactDefinition vfModuleArtifact) {
+ log.info("Try to get vfModule json from cassandra {}", vfModuleArtifact.getEsId());
+ Either<ESArtifactData, CassandraOperationStatus> vfModuleData = artifactCassandraDao.getArtifact(vfModuleArtifact.getEsId());
+
+ if (vfModuleData.isRight()) {
+ CassandraOperationStatus resourceUploadStatus = vfModuleData.right().value();
+ StorageOperationStatus storageResponse = DaoStatusConverter.convertCassandraStatusToStorageStatus(resourceUploadStatus);
+ log.error("failed to fetch vfModule json {} from cassandra. Status is {}", vfModuleArtifact.getEsId(), storageResponse);
+ return Either.right(storageResponse);
+
+ }
- if (generated.isRight()) {
- log.debug("Failed to export tosca csar for component {} error {}", parent.getUniqueId(),
- generated.right().value());
+ ESArtifactData esArtifactData = vfModuleData.left().value();
+ String gsonData = new String( esArtifactData.getDataAsArray());
+ final Gson gson = new GsonBuilder().setPrettyPrinting().create();
+ JsonArray jsonElement = new JsonArray();
+ jsonElement = gson.fromJson(gsonData, jsonElement.getClass());
+ List<VfModuleArtifactPayloadEx> vfModules = new ArrayList<>();
+ jsonElement.forEach(je ->{
+ VfModuleArtifactPayloadEx vfModule = ComponentsUtils.parseJsonToObject(je.toString(), VfModuleArtifactPayloadEx.class);
+ vfModules.add(vfModule);
+ });
+
+ log.debug ("parse vf module finish {}", gsonData);
+ return Either.left(vfModules);
+
+ }
- return Either.right(ToscaError.GENERAL_ERROR);
- }
- byte[] value = generated.left().value();
- artifactInfo.setPayload(value);
- } else {
- Either<ToscaRepresentation, ToscaError> exportComponent = toscaExportUtils.exportComponent(parent);
- if (exportComponent.isRight()) {
- log.debug("Failed export tosca yaml for component {} error {}", parent.getUniqueId(),
- exportComponent.right().value());
+ /*public boolean manualFix() {
- return Either.right(exportComponent.right().value());
- }
- log.debug("Tosca yaml exported for component {} ", parent.getUniqueId());
- String payload = exportComponent.left().value().getMainYaml();
+ Set<String> fixedIds = new HashSet<>();
+ Component component;
+ String componentId = "86683566-20e8-4cc5-872d-12abca1d57f0";//"9f6a6976-18e3-488a-98a4-c1aade480739";
+ Either<Resource, StorageOperationStatus> toscaElement = toscaOperationFacade.getToscaFullElement(componentId);
+ if (toscaElement.isRight()) {
+ log.info("Failed to fetch resources {} {}", componentId, toscaElement.right().value());
- artifactInfo.setPayloadData(payload);
- }
+ }
+ boolean res = generateToscaPerComponent(fixedIds, toscaElement.left().value());
+ log.info("Fix component return res {} ", res);
+ titanDao.commit();
- byte[] decodedPayload = artifactInfo.getPayloadData();
- artifactInfo.setEsId(artifactInfo.getUniqueId());
- artifactInfo.setArtifactChecksum(GeneralUtility.calculateMD5Base64EncodedByByteArray(decodedPayload));
- ESArtifactData artifactData = new ESArtifactData(artifactInfo.getEsId(), decodedPayload);
- artifactCassandraDao.saveArtifact(artifactData);
- log.debug("Tosca yaml artifact esId ", artifactInfo.getEsId());
- } catch (Exception ex) {
- log.error("Failed to generate tosca atifact id {} component id {} component name {} error {}",
- artifactInfo.getUniqueId(), parent.getUniqueId(), parent.getName(), ex.getMessage(), ex);
- return Either.right(ToscaError.GENERAL_ERROR);
- }
- return Either.left(artifactInfo);
- }
+ return res;
+ }
- private void fillVfModuleInstHeatEnvPayload(List<GroupInstance> groupsForCurrVF,
- ArtifactDefinition vfModuleArtifact) {
+
+ public boolean manualCheck() {
- List<VfModuleArtifactPayload> vfModulePayloadForCurrVF = new ArrayList<>();
- if (groupsForCurrVF != null) {
- for (GroupInstance groupInstance : groupsForCurrVF) {
- VfModuleArtifactPayload modulePayload = new VfModuleArtifactPayload(groupInstance);
- vfModulePayloadForCurrVF.add(modulePayload);
- }
- vfModulePayloadForCurrVF.sort(VfModuleArtifactPayload::compareByGroupName);
+ Set<String> fixedIds = new HashSet<>();
+ Component component;
+ String componentId = "86d50186-7b00-4bfc-abcb-9e4c6892f338";//"9f6a6976-18e3-488a-98a4-c1aade480739";
+ Either<Service, StorageOperationStatus> toscaElement = toscaOperationFacade.getToscaFullElement(componentId);
+ if (toscaElement.isRight()) {
+ log.info("Failed to fetch resources {} {}", componentId, toscaElement.right().value());
- final Gson gson = new GsonBuilder().setPrettyPrinting().create();
+ }
+ boolean res = true;
+ try {
+ res = isProblematicService(toscaElement.left().value(), toscaElement.left().value().getName());
+ if(res){
+ List<Service> services = new ArrayList<>();
+ services.add(toscaElement.left().value());
+ this.fix(new ArrayList<Resource>(), services, null, null, null);
+
+ Either<Service, StorageOperationStatus> toscaElementNew = toscaOperationFacade.getToscaFullElement(componentId);
+ if (toscaElement.isRight()) {
+ log.info("Failed to fetch resources {} {}", componentId, toscaElementNew.right().value());
- String vfModulePayloadString = gson.toJson(vfModulePayloadForCurrVF);
- if (vfModulePayloadString != null) {
- String newCheckSum = GeneralUtility
- .calculateMD5Base64EncodedByByteArray(vfModulePayloadString.getBytes());
- vfModuleArtifact.setArtifactChecksum(newCheckSum);
+ }
+ res = isProblematicService(toscaElementNew.left().value(), toscaElementNew.left().value().getName());
+ }
+ titanDao.commit();
- ESArtifactData artifactData = new ESArtifactData(vfModuleArtifact.getEsId(),
- vfModulePayloadString.getBytes());
- artifactCassandraDao.saveArtifact(artifactData);
- }
- }
- }
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ log.info("Fix component return res {} ", res);
+
+ return res;
+}*/
}
+
+
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
index 7ed8b17..502ddd1 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/ProductLogic.java
@@ -44,23 +44,18 @@
public boolean deleteAllProducts(String titanFile, String beHost, String bePort, String adminUser) {
log.debug("retrieving all products from graph");
RestUtils restUtils = null;
- try {
- List<String> productList = getAllProducts(titanFile);
- restUtils = new RestUtils();
- if (productList != null) {
- for (String productUid : productList) {
- Integer status = restUtils.deleteProduct(productUid, beHost, bePort, adminUser);
- }
- return true;
- } else {
- log.error("failed to get products from graph");
- return false;
- }
- } finally {
- if (restUtils != null) {
- restUtils.closeClient();
- }
- }
+ List<String> productList = getAllProducts(titanFile);
+ restUtils = new RestUtils();
+ if (productList != null) {
+ for (String productUid : productList) {
+ restUtils.deleteProduct(productUid, beHost, bePort, adminUser);
+ }
+ return true;
+ }
+ else {
+ log.error("failed to get products from graph");
+ return false;
+ }
}
private List<String> getAllProducts(String titanFile) {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java
index 355780c..50781f2 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/RestUtils.java
@@ -20,13 +20,11 @@
package org.openecomp.sdc.asdctool.impl;
-import java.io.IOException;
+import java.util.Properties;
-import org.apache.http.client.methods.CloseableHttpResponse;
-import org.apache.http.client.methods.HttpDelete;
-import org.apache.http.client.methods.HttpUriRequest;
-import org.apache.http.impl.client.CloseableHttpClient;
-import org.apache.http.impl.client.HttpClients;
+import org.apache.http.HttpStatus;
+import org.openecomp.sdc.common.http.client.api.HttpRequest;
+import org.openecomp.sdc.common.http.client.api.HttpResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -35,48 +33,33 @@
*/
public class RestUtils {
- final String DELETE_PRODUCT = "http://%s:%s/sdc2/rest/v1/catalog/products/%s";
- final Integer DELETE_SUCCSES_RESPONSE = 200;
+ final static String DELETE_PRODUCT = "http://%s:%s/sdc2/rest/v1/catalog/products/%s";
private static Logger log = LoggerFactory.getLogger(RestUtils.class.getName());
- CloseableHttpClient httpClient;
public RestUtils() {
- this.httpClient = HttpClients.createDefault();
- }
-
- private CloseableHttpResponse exacuteRequest(HttpUriRequest httpRequest) throws IOException {
- log.debug("received http request: {}", httpRequest.toString());
- return httpClient.execute(httpRequest);
- }
-
- public void closeClient() {
- log.debug("closing http client");
- try {
- this.httpClient.close();
- log.debug("closed http client");
- } catch (IOException e) {
- log.debug("close http client failed", e);
-
- }
}
public Integer deleteProduct(String productUid, String beHost, String bePort, String adminUser) {
String url = String.format(DELETE_PRODUCT, beHost, bePort, productUid);
- HttpDelete deleteRequest = new HttpDelete(url);
- deleteRequest.setHeader("USER_ID", adminUser);
- try (CloseableHttpResponse response = this.httpClient.execute(deleteRequest)) {
- int status = response.getStatusLine().getStatusCode();
- if (DELETE_SUCCSES_RESPONSE.equals(status)) {
- log.debug("Product uid:{} succsesfully deleted", productUid);
- } else {
- log.error("Product uid:{} delete failed status {}", productUid, status);
- }
- return status;
- } catch (IOException e) {
- log.error("Product uid:{} delete failed with exception",productUid, e);
+
+ Properties headers = new Properties();
+ headers.put("USER_ID", adminUser);
+ try {
+ HttpResponse<String> httpResponse = HttpRequest.delete(url, headers);
+ int status = httpResponse.getStatusCode();
+ if (status == HttpStatus.SC_OK) {
+ log.debug("Product uid:{} succsesfully deleted", productUid);
+ }
+ else {
+ log.error("Product uid:{} delete failed status {}", productUid, status);
+ }
+ return status;
}
- return null;
+ catch(Exception e) {
+ log.error("Product uid:{} delete failed with exception",productUid, e);
+ }
+ return null;
}
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/TitanGraphInitializer.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/TitanGraphInitializer.java
index 11d5202..ae226f1 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/TitanGraphInitializer.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/TitanGraphInitializer.java
@@ -28,9 +28,12 @@
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.openecomp.sdc.be.dao.graph.datatype.ActionEnum;
import org.openecomp.sdc.be.dao.graph.datatype.GraphElementTypeEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.dao.jsongraph.utils.IdBuilderUtils;
import org.openecomp.sdc.be.dao.neo4j.GraphEdgePropertiesDictionary;
import org.openecomp.sdc.be.dao.neo4j.GraphPropertiesDictionary;
import org.openecomp.sdc.be.dao.utils.UserStatusEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.datatypes.enums.NodeTypeEnum;
import org.openecomp.sdc.be.resources.data.UserData;
import org.slf4j.Logger;
@@ -50,7 +53,6 @@
private static Logger logger = LoggerFactory.getLogger(TitanGraphInitializer.class.getName());
private static TitanGraph graph;
-
public static boolean createGraph(String titanCfgFile) {
logger.info("** createGraph with {}", titanCfgFile);
try {
@@ -64,9 +66,9 @@
logger.info("createGraph : failed to open Titan graph with configuration file: {}", titanCfgFile, e);
return false;
}
-
+
createIndexesAndDefaults();
-
+
logger.info("** Titan graph created ");
return true;
@@ -100,13 +102,13 @@
checkedProperties.put(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.User.getName());
Map<String, Object> properties = null;
if (!isVertexExist(checkedProperties)) {
- Vertex vertex = graph.addVertex();
- vertex.property(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.User.getName());
- properties = user.toGraphMap();
- for (Map.Entry<String, Object> entry : properties.entrySet()) {
- vertex.property(entry.getKey(), entry.getValue());
- }
- }
+ Vertex vertex = graph.addVertex();
+ vertex.property(GraphPropertiesDictionary.LABEL.getProperty(), NodeTypeEnum.User.getName());
+ properties = user.toGraphMap();
+ for (Map.Entry<String, Object> entry : properties.entrySet()) {
+ vertex.property(entry.getKey(), entry.getValue());
+ }
+ }
}
private static UserData getDefaultUserAdmin() {
@@ -123,7 +125,6 @@
return userData;
}
-
private static void createVertexIndixes() {
logger.info("** createVertexIndixes started");
@@ -186,5 +187,13 @@
createVertexIndixes();
createEdgeIndixes();
createDefaultAdminUser();
+ createRootCatalogVertex();
+ }
+
+ private static void createRootCatalogVertex() {
+ Vertex vertex = graph.addVertex();
+ vertex.property(GraphPropertyEnum.UNIQUE_ID.getProperty(), IdBuilderUtils.generateUniqueId());
+ vertex.property(GraphPropertyEnum.LABEL.getProperty(), VertexTypeEnum.CATALOG_ROOT.getName());
+ graph.tx().commit();
}
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBL.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBL.java
index 556d7e0..341a56f 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBL.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/ArtifactToolBL.java
@@ -2,6 +2,7 @@
import java.util.List;
+import org.openecomp.sdc.asdctool.impl.validator.executers.ArtifactValidatorExecuter;
import org.openecomp.sdc.asdctool.impl.validator.executers.IArtifactValidatorExecuter;
import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManager;
import org.slf4j.Logger;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java
index 63e95d5..64d6bc1 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/config/ValidationToolConfiguration.java
@@ -10,6 +10,10 @@
import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.ArtifactValidationUtils;
import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.ServiceArtifactValidationTask;
import org.openecomp.sdc.asdctool.impl.validator.tasks.artifacts.VfArtifactValidationTask;
+import org.openecomp.sdc.asdctool.impl.validator.executers.NodeToscaArtifactsValidatorExecuter;
+import org.openecomp.sdc.asdctool.impl.validator.executers.ServiceToscaArtifactsValidatorExecutor;
+import org.openecomp.sdc.asdctool.impl.validator.executers.VFToscaArtifactValidatorExecutor;
+import org.openecomp.sdc.asdctool.impl.validator.ArtifactToolBL;
import org.openecomp.sdc.asdctool.impl.validator.tasks.moduleJson.ModuleJsonTask;
import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManager;
import org.openecomp.sdc.be.dao.DAOTitanStrategy;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java
index 7f85d9d..ceab832 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ArtifactValidatorExecuter.java
@@ -22,14 +22,16 @@
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+import org.openecomp.sdc.be.datatypes.enums.JsonPresentationFields;
import org.openecomp.sdc.be.model.ArtifactDefinition;
import org.openecomp.sdc.be.model.Component;
import org.openecomp.sdc.be.model.ComponentParametersView;
+import org.openecomp.sdc.be.model.jsontitan.operations.TopologyTemplateOperation;
import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.springframework.beans.factory.annotation.Autowired;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.annotation.Autowired;
public class ArtifactValidatorExecuter {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/IArtifactValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/IArtifactValidatorExecuter.java
index 6f9405f..30bfab1 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/IArtifactValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/IArtifactValidatorExecuter.java
@@ -1,5 +1,14 @@
package org.openecomp.sdc.asdctool.impl.validator.executers;
+import java.util.List;
+import java.util.Map;
+
+import org.openecomp.sdc.asdctool.impl.validator.tasks.TopologyTemplateValidationTask;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
+
public interface IArtifactValidatorExecuter {
boolean executeValidations();
String getName();
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java
index 6715c8a..c0a2589 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/NodeToscaArtifactsValidatorExecuter.java
@@ -4,6 +4,7 @@
import java.util.List;
import java.util.Map;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java
index 2fe5abe..1dd1779 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/ServiceToscaArtifactsValidatorExecutor.java
@@ -4,6 +4,7 @@
import java.util.List;
import java.util.Map;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java
index bc22f2e..69be96c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/executers/VFToscaArtifactValidatorExecutor.java
@@ -4,6 +4,7 @@
import java.util.List;
import java.util.Map;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/VfModuleArtifactPayloadEx.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/VfModuleArtifactPayloadEx.java
new file mode 100644
index 0000000..398ce6b
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/impl/validator/utils/VfModuleArtifactPayloadEx.java
@@ -0,0 +1,92 @@
+package org.openecomp.sdc.asdctool.impl.validator.utils;
+
+import java.util.List;
+import java.util.Map;
+
+
+
+public class VfModuleArtifactPayloadEx {
+
+ private String vfModuleModelName, vfModuleModelInvariantUUID, vfModuleModelVersion, vfModuleModelUUID, vfModuleModelCustomizationUUID, vfModuleModelDescription;
+ private Boolean isBase;
+ private List<String> artifacts;
+ private Map< String, Object> properties;
+
+
+
+ public String getVfModuleModelName() {
+ return vfModuleModelName;
+ }
+
+ public void setVfModuleModelName(String vfModuleModelName) {
+ this.vfModuleModelName = vfModuleModelName;
+ }
+
+ public String getVfModuleModelInvariantUUID() {
+ return vfModuleModelInvariantUUID;
+ }
+
+ public void setVfModuleModelInvariantUUID(String vfModuleModelInvariantUUID) {
+ this.vfModuleModelInvariantUUID = vfModuleModelInvariantUUID;
+ }
+
+ public String getVfModuleModelVersion() {
+ return vfModuleModelVersion;
+ }
+
+ public void setVfModuleModelVersion(String vfModuleModelVersion) {
+ this.vfModuleModelVersion = vfModuleModelVersion;
+ }
+
+ public String getVfModuleModelUUID() {
+ return vfModuleModelUUID;
+ }
+
+ public void setVfModuleModelUUID(String vfModuleModelUUID) {
+ this.vfModuleModelUUID = vfModuleModelUUID;
+ }
+
+ public String getVfModuleModelCustomizationUUID() {
+ return vfModuleModelCustomizationUUID;
+ }
+
+ public void setVfModuleModelCustomizationUUID(String vfModuleModelCustomizationUUID) {
+ this.vfModuleModelCustomizationUUID = vfModuleModelCustomizationUUID;
+ }
+
+ public String getVfModuleModelDescription() {
+ return vfModuleModelDescription;
+ }
+
+ public void setVfModuleModelDescription(String vfModuleModelDescription) {
+ this.vfModuleModelDescription = vfModuleModelDescription;
+ }
+
+ public Boolean getIsBase() {
+ return isBase;
+ }
+
+ public void setIsBase(Boolean isBase) {
+ this.isBase = isBase;
+ }
+
+ public List<String> getArtifacts() {
+ return artifacts;
+ }
+
+ public void setArtifacts(List<String> artifacts) {
+ this.artifacts = artifacts;
+ }
+
+ public Map<String, Object> getProperties() {
+ return properties;
+ }
+
+ public void setProperties(Map<String, Object> properties) {
+ this.properties = properties;
+ }
+
+
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactUUIDFixMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactUUIDFixMenu.java
index 5ca8e55..a2e5946 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactUUIDFixMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactUUIDFixMenu.java
@@ -27,14 +27,7 @@
boolean isSuccessful = artifactUuidFix.doFix(fixServices, runMode);
if (isSuccessful) {
log.info("Fixing artifacts UUID for 1707 was finished successfully");
- /* isSuccessful = artifactUuidFix.doFixTosca(fixTosca, fixServices, runMode);
- if (isSuccessful) {
- log.info("Fixing tosca artifacts was finished successfully");
- isSuccessful = artifactUuidFix.doFixTosca(fixTosca, fixServices, runMode);
- } else{
- log.info("Fixing tosca artifacts has failed");
- System.exit(2);
- }*/
+
} else{
log.info("Fixing artifacts UUID for 1707 has failed");
System.exit(2);
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactValidatorTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactValidatorTool.java
index f7e8f1f..5ae543c 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactValidatorTool.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/ArtifactValidatorTool.java
@@ -3,6 +3,7 @@
import org.openecomp.sdc.asdctool.impl.validator.ArtifactToolBL;
import org.openecomp.sdc.asdctool.impl.validator.config.ValidationConfigManager;
import org.openecomp.sdc.asdctool.impl.validator.config.ValidationToolConfiguration;
+import org.openecomp.sdc.asdctool.impl.validator.utils.ReportManager;
import org.openecomp.sdc.be.config.ConfigurationManager;
import org.openecomp.sdc.common.api.ConfigurationSource;
import org.openecomp.sdc.common.impl.ExternalConfiguration;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java
new file mode 100644
index 0000000..189348a
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/main/GetConsumersMenu.java
@@ -0,0 +1,57 @@
+package org.openecomp.sdc.asdctool.main;
+
+import fj.data.Either;
+import org.openecomp.sdc.asdctool.cli.CLIToolData;
+import org.openecomp.sdc.asdctool.cli.SpringCLITool;
+import org.openecomp.sdc.asdctool.configuration.GetConsumersConfiguration;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.model.operations.impl.ConsumerOperation;
+import org.openecomp.sdc.be.resources.data.ConsumerData;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+
+public class GetConsumersMenu extends SpringCLITool {
+
+ private static final Logger LOGGER = LoggerFactory.getLogger(GetConsumersMenu.class);
+
+ public static void main(String[] args) {
+ GetConsumersMenu getConsumersMenu = new GetConsumersMenu();
+ CLIToolData cliToolData = getConsumersMenu.init(args);
+ ConsumerOperation consumersService = cliToolData.getSpringApplicationContext().getBean(ConsumerOperation.class);
+ printConsumers(getConsumersMenu, consumersService);
+ }
+
+ private static void printConsumers(GetConsumersMenu getConsumersMenu, ConsumerOperation consumersService) {
+ Either<List<ConsumerData>, StorageOperationStatus> allConsumers = consumersService.getAll();
+ allConsumers.left().foreachDoEffect(getConsumersMenu::printConsumers);
+ allConsumers.right().foreachDoEffect(getConsumersMenu::printErr);
+ }
+
+ private void printConsumers(List<ConsumerData> consumers) {
+ System.out.println("SDC consumers: ");
+ consumers.forEach(consumer -> {
+ System.out.println("#########################");
+ System.out.println(consumer);
+ });
+ System.exit(0);
+ }
+
+ private void printErr(StorageOperationStatus err) {
+ String errMsg = String.format("failed to fetch consumers. reason: %s", err);
+ LOGGER.error(errMsg);
+ System.err.print(errMsg);
+ System.exit(1);
+ }
+
+ @Override
+ protected String commandName() {
+ return "get-consumers";
+ }
+
+ @Override
+ protected Class<?> getSpringConfigurationClass() {
+ return GetConsumersConfiguration.class;
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java
index 19651ec..f192d87 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfig.java
@@ -10,55 +10,11 @@
import org.openecomp.sdc.asdctool.migration.resolver.MigrationResolver;
import org.openecomp.sdc.asdctool.migration.resolver.SpringBeansMigrationResolver;
import org.openecomp.sdc.asdctool.migration.service.SdcRepoService;
-import org.openecomp.sdc.be.auditing.impl.AuditingManager;
-import org.openecomp.sdc.be.components.ArtifactsResolver;
-import org.openecomp.sdc.be.components.distribution.engine.DistributionEngine;
import org.openecomp.sdc.be.components.distribution.engine.ServiceDistributionArtifactsBuilder;
-import org.openecomp.sdc.be.components.impl.AdditionalInformationBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ArtifactResolverImpl;
-import org.openecomp.sdc.be.components.impl.ArtifactsBusinessLogic;
-import org.openecomp.sdc.be.components.impl.CompositionBusinessLogic;
-import org.openecomp.sdc.be.components.impl.GroupBusinessLogic;
-import org.openecomp.sdc.be.components.impl.InputsBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ProductBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ResourceBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ResourceImportManager;
-import org.openecomp.sdc.be.components.impl.ServiceBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ServiceComponentInstanceBusinessLogic;
-import org.openecomp.sdc.be.components.impl.VFComponentInstanceBusinessLogic;
-import org.openecomp.sdc.be.components.impl.generic.GenericTypeBusinessLogic;
-import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic;
-import org.openecomp.sdc.be.components.merge.heat.HeatEnvArtifactsMergeBusinessLogic;
-import org.openecomp.sdc.be.components.merge.input.InputsValuesMergingBusinessLogic;
-import org.openecomp.sdc.be.components.merge.instance.ComponentInstanceMergeDataBusinessLogic;
-import org.openecomp.sdc.be.components.merge.property.DataDefinitionsValuesMergingBusinessLogic;
-import org.openecomp.sdc.be.dao.DAOTitanStrategy;
-import org.openecomp.sdc.be.dao.TitanClientStrategy;
-import org.openecomp.sdc.be.dao.cassandra.AuditCassandraDao;
-import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
+import org.openecomp.sdc.be.config.CatalogModelSpringConfig;
import org.openecomp.sdc.be.dao.config.DAOSpringConfig;
-import org.openecomp.sdc.be.dao.es.ElasticSearchClient;
-import org.openecomp.sdc.be.dao.impl.AuditingDao;
-import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
-import org.openecomp.sdc.be.dao.titan.TitanGenericDao;
-import org.openecomp.sdc.be.dao.titan.TitanGraphClient;
-import org.openecomp.sdc.be.impl.ComponentsUtils;
-import org.openecomp.sdc.be.model.cache.ComponentCache;
-import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
-import org.openecomp.sdc.be.model.operations.impl.CapabilityTypeOperation;
-import org.openecomp.sdc.be.model.operations.impl.ComponentInstanceOperation;
-import org.openecomp.sdc.be.model.operations.impl.CsarOperation;
-import org.openecomp.sdc.be.model.operations.impl.ElementOperation;
-import org.openecomp.sdc.be.model.operations.impl.GraphLockOperation;
-import org.openecomp.sdc.be.model.operations.impl.GroupInstanceOperation;
-import org.openecomp.sdc.be.model.operations.impl.GroupOperation;
-import org.openecomp.sdc.be.model.operations.impl.GroupTypeOperation;
-import org.openecomp.sdc.be.model.operations.impl.PropertyOperation;
-import org.openecomp.sdc.be.tosca.CsarUtils;
-import org.openecomp.sdc.be.tosca.ToscaExportHandler;
-import org.openecomp.sdc.be.user.UserBusinessLogic;
+import org.openecomp.sdc.config.CatalogBESpringConfig;
import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.config.PropertiesFactoryBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
@@ -67,17 +23,10 @@
import org.springframework.core.io.FileSystemResource;
@Configuration
-@Import(DAOSpringConfig.class)
+@Import({DAOSpringConfig.class, CatalogBESpringConfig.class, CatalogModelSpringConfig.class})
@ComponentScan({"org.openecomp.sdc.asdctool.migration.tasks",//migration tasks
- "org.openecomp.sdc.be.model.operations.impl",
- "org.openecomp.sdc.be.model.cache",
- "org.openecomp.sdc.be.dao.titan",
- "org.openecomp.sdc.be.components.validation",
- "org.openecomp.sdc.be.dao.cassandra",
- "org.openecomp.sdc.be.model.jsontitan.operations",
- "org.openecomp.sdc.be.dao.jsongraph",
- "org.openecomp.sdc.be.components.merge",
- "org.openecomp.sdc.be.impl"})
+ "org.openecomp.sdc.asdctool.migration.config.mocks"
+ })
public class MigrationSpringConfig {
@Autowired(required=false)
@@ -106,246 +55,10 @@
return new MigrationTasksDao();
}
- @Bean(name = "cassandra-client")
- public CassandraClient cassandraClient() {
- return new CassandraClient();
- }
-
-
- @Bean(name = "dao-titan-strategy")
- public TitanClientStrategy daoStrategy() {
- return new DAOTitanStrategy();
- }
-
- @Bean(name = "titan-dao")
- public TitanDao titanDao(@Qualifier("titan-client") TitanGraphClient titanGraphClient) {
- return new TitanDao(titanGraphClient);
- }
-
- @Bean(name = "titan-client", initMethod = "createGraph")
- public TitanGraphClient titanClient(@Qualifier("dao-titan-strategy") TitanClientStrategy titanClientStrategy) {
- return new TitanGraphClient(titanClientStrategy);
- }
-
- @Bean(name = "resource-business-logic")
- public ResourceBusinessLogic resourceBusinessLogic() {
- return new ResourceBusinessLogic();
- }
-
-// @Bean(name = "healthCheckBusinessLogic")
-// public HealthCheckBusinessLogic healthCheckBusinessLogic() {
-// return new HealthCheckBusinessLogic();
-// }
-//
-// @Bean(name = "distribution-engine-cluster-health")
-// public DistributionEngineClusterHealth distributionEngineClusterHealth() {
-// return new DistributionEngineClusterHealth();
-// }
-//
-// @Bean(name = "cassandra-health-check")
-// public CassandraHealthCheck cassandraHealthCheck() {
-// return new CassandraHealthCheck();
-// }
-
-// @Bean(name = "switchover-detector")
-// public SwitchoverDetector switchoverDetector() {
-// return new SwitchoverDetector();
-// }
-
- @Bean(name = "service-business-logic")
- public ServiceBusinessLogic serviceBusinessLogic() {
- return new ServiceBusinessLogic();
- }
-
- @Bean(name = "capability-type-operation")
- public CapabilityTypeOperation CapabilityTypeOperation() {
- return new CapabilityTypeOperation();
- }
-
- @Bean(name = "lifecycle-business-logic")
- public LifecycleBusinessLogic lifecycleBusinessLogic() {
- return new LifecycleBusinessLogic();
- }
-
- @Bean(name = "property-operation")
- public PropertyOperation propertyOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao) {
- return new PropertyOperation(titanGenericDao);
- }
-
- @Bean(name = "csar-operation")
- public CsarOperation csarOperation() {
- return new CsarOperation();
- }
-
- @Bean(name = "vf-component-instance-business-logic")
- public VFComponentInstanceBusinessLogic vFComponentInstanceBusinessLogic() {
- return new VFComponentInstanceBusinessLogic();
- }
-
- @Bean(name = "resource-import-manager")
- public ResourceImportManager resourceImportManager() {
- return new ResourceImportManager();
- }
-
- @Bean(name = "group-business-logic")
- public GroupBusinessLogic groupBusinessLogic() {
- return new GroupBusinessLogic();
- }
-
- @Bean(name = "inputs-business-logic")
- public InputsBusinessLogic inputsBusinessLogic() {
- return new InputsBusinessLogic();
- }
-
- @Bean(name = "composition-business-logic")
- public CompositionBusinessLogic compositionBusinessLogic() {
- return new CompositionBusinessLogic();
- }
-
- @Bean(name = "artifacts-business-logic")
- public ArtifactsBusinessLogic artifactsBusinessLogic() {
- return new ArtifactsBusinessLogic();
- }
-
- @Bean(name = "component-cache")
- public ComponentCache componentCache() {
- return new ComponentCache();
- }
-
- @Bean(name = "componentUtils")
- public ComponentsUtils componentsUtils() {
- return new ComponentsUtils();
- }
-
- @Bean(name = "user-business-logic")
- public UserBusinessLogic userBusinessLogic() {
- return new UserBusinessLogic();
- }
-
- @Bean(name = "graph-lock-operation")
- public GraphLockOperation graphLockOperation() {
- return new GraphLockOperation();
- }
-
- @Bean(name = "titan-generic-dao")
- public TitanGenericDao titanGenericDao(@Qualifier("titan-client") TitanGraphClient titanGraphClient) {
- return new TitanGenericDao(titanGraphClient);
- }
-
- @Bean(name = "element-operation")
- public ElementOperation elementOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao) {
- return new ElementOperation(titanGenericDao);
- }
-
- @Bean(name = "group-operation")
- public GroupOperation groupOperation() {
- return new GroupOperation();
- }
-
- @Bean(name = "group-instance-operation")
- public GroupInstanceOperation groupInstanceOperation() {
- return new GroupInstanceOperation();
- }
-
- @Bean(name = "group-type-operation")
- public GroupTypeOperation groupTypeOperation(@Qualifier("titan-generic-dao") TitanGenericDao titanGenericDao, @Qualifier("property-operation") PropertyOperation propertyOperation) {
- return new GroupTypeOperation(titanGenericDao, propertyOperation);
- }
-
- @Bean(name = "tosca-operation-facade")
- public ToscaOperationFacade toscaOperationFacade() {
- return new ToscaOperationFacade();
- }
-
- @Bean(name = "distribution-engine")
- public DistributionEngine distributionEngine() {
- return null;
- }
-
- @Bean(name = "audit-cassandra-dao")
- public AuditCassandraDao auditCassandraDao() {
- return new AuditCassandraDao();
- }
-
- @Bean(name = "service-component-instance-business-logic")
- public ServiceComponentInstanceBusinessLogic serviceComponentInstanceBusinessLogic() {
- return new ServiceComponentInstanceBusinessLogic();
- }
-
- @Bean("tosca-export-handler")
- public ToscaExportHandler toscaExportHandler() {
- return new ToscaExportHandler();
- }
-
- @Bean(name = "component-instance-operation")
- public ComponentInstanceOperation componentInstanceOperation() {
- return new ComponentInstanceOperation();
- }
-
- @Bean(name = "additional-information-business-logic")
- public AdditionalInformationBusinessLogic additionalInformationBusinessLogic() {
- return new AdditionalInformationBusinessLogic();
- }
-
- @Bean(name = "auditing-manager")
- public AuditingManager auditingManager() {
- return new AuditingManager();
- }
-
- @Bean(name = "auditing-dao")
- public AuditingDao auditingDao() {
- return new AuditingDao();
- }
-
- @Bean(name = "elasticsearch-client", initMethod = "initialize")
- public ElasticSearchClient elasticSearchClient() {
- return new ElasticSearchClient();
- }
-
- @Bean(name = "csar-utils")
- public CsarUtils csarUtils() {
- return new CsarUtils();
- }
-
- @Bean(name = "service-distribution-artifacts-builder")
+ @Bean(name = "serviceDistributionArtifactsBuilder")
public ServiceDistributionArtifactsBuilder serviceDistributionArtifactsBuilder() {
return new ServiceDistributionArtifactsBuilder();
}
-
- @Bean(name = "product-business-logic")
- public ProductBusinessLogic productBusinessLogic() {
- return null;
- }
-
- @Bean(name = "dataDefinitionsValuesMergingBusinessLogic")
- public DataDefinitionsValuesMergingBusinessLogic dataDefinitionsValuesMergingBusinessLogic() {
- return new DataDefinitionsValuesMergingBusinessLogic();
- }
-
- @Bean(name = "artifacts-resolver")
- public ArtifactsResolver artifactsResolver() {
- return new ArtifactResolverImpl();
- }
-
- @Bean(name = "InputsValuesMergingBusinessLogic")
- public InputsValuesMergingBusinessLogic InputsValuesMergingBusinessLogic(){
- return new InputsValuesMergingBusinessLogic();
- }
-
- @Bean(name = "GenericTypeBusinessLogic")
- public GenericTypeBusinessLogic genericTypeBusinessLogic(){
- return new GenericTypeBusinessLogic();
- }
-
- @Bean(name ="componentInstanceMergeDataBusinessLogic")
- public ComponentInstanceMergeDataBusinessLogic componentInstanceMergeDataBusinessLogic(){
- return new ComponentInstanceMergeDataBusinessLogic();
- }
-
- @Bean(name ="heatEnvArtifactsMergeBusinessLogic")
- public HeatEnvArtifactsMergeBusinessLogic heatEnvArtifactsMergeBusinessLogic(){
- return new HeatEnvArtifactsMergeBusinessLogic();
- }
@Bean(name = "elasticsearchConfig")
public PropertiesFactoryBean mapper() {
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineClusterHealthMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineClusterHealthMock.java
new file mode 100644
index 0000000..4d34f50
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineClusterHealthMock.java
@@ -0,0 +1,16 @@
+package org.openecomp.sdc.asdctool.migration.config.mocks;
+
+import org.openecomp.sdc.be.components.distribution.engine.DistributionEngineClusterHealth;
+import org.springframework.stereotype.Component;
+
+@Component("distribution-engine-cluster-health")
+public class DistributionEngineClusterHealthMock extends DistributionEngineClusterHealth {
+
+ @Override
+ protected void init() {
+ }
+
+ @Override
+ protected void destroy() {
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java
new file mode 100644
index 0000000..36f6814
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DistributionEngineMock.java
@@ -0,0 +1,68 @@
+package org.openecomp.sdc.asdctool.migration.config.mocks;
+
+import fj.data.Either;
+import org.openecomp.sdc.be.components.distribution.engine.IDistributionEngine;
+import org.openecomp.sdc.be.components.distribution.engine.INotificationData;
+import org.openecomp.sdc.be.components.impl.ActivationRequestInformation;
+import org.openecomp.sdc.be.dao.api.ActionStatus;
+import org.openecomp.sdc.be.externalapi.servlet.representation.ServiceDistributionReqInfo;
+import org.openecomp.sdc.be.model.Service;
+import org.openecomp.sdc.be.model.User;
+import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
+import org.openecomp.sdc.be.resources.data.OperationalEnvironmentEntry;
+import org.openecomp.sdc.exception.ResponseFormat;
+import org.springframework.stereotype.Component;
+
+@Component("distributionEngine")
+public class DistributionEngineMock implements IDistributionEngine {
+ @Override
+ public boolean isActive() {
+ return false;
+ }
+
+ @Override
+ public ActionStatus notifyService(String distributionId, Service service, INotificationData notificationData, String envName, String userId, String modifierName) {
+ return null;
+ }
+
+ @Override
+ public ActionStatus notifyService(String distributionId, Service service, INotificationData notificationData, String envId, String envName, String userId, String modifierName) {
+ return null;
+ }
+
+ @Override
+ public StorageOperationStatus isEnvironmentAvailable(String envName) {
+ return null;
+ }
+
+ @Override
+ public StorageOperationStatus isEnvironmentAvailable() {
+ return null;
+ }
+
+ @Override
+ public void disableEnvironment(String envName) {
+
+ }
+
+ @Override
+ public StorageOperationStatus isReadyForDistribution(Service service, String envName) {
+ return null;
+ }
+
+ @Override
+ public INotificationData buildServiceForDistribution(Service service, String distributionId, String workloadContext) {
+ return null;
+ }
+
+ @Override
+ public StorageOperationStatus verifyServiceHasDeploymentArtifacts(Service service) {
+ return null;
+ }
+
+ @Override
+ public OperationalEnvironmentEntry getEnvironmentById(String opEnvId) {
+ return null;
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DmaapHealthCheckMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DmaapHealthCheckMock.java
new file mode 100644
index 0000000..7315bad
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/DmaapHealthCheckMock.java
@@ -0,0 +1,16 @@
+package org.openecomp.sdc.asdctool.migration.config.mocks;
+
+import org.openecomp.sdc.be.components.distribution.engine.DmaapHealth;
+import org.springframework.stereotype.Component;
+
+@Component("dmaapHealth")
+public class DmaapHealthCheckMock extends DmaapHealth {
+ @Override
+ public DmaapHealth init() {
+ return null;
+ }
+
+ @Override
+ protected void destroy() {
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/HealthCheckBusinessLogicMock.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/HealthCheckBusinessLogicMock.java
new file mode 100644
index 0000000..c7ef45e
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/config/mocks/HealthCheckBusinessLogicMock.java
@@ -0,0 +1,21 @@
+package org.openecomp.sdc.asdctool.migration.config.mocks;
+
+import org.openecomp.sdc.be.components.health.HealthCheckBusinessLogic;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.PostConstruct;
+
+@Component("healthCheckBusinessLogic")
+public class HealthCheckBusinessLogicMock extends HealthCheckBusinessLogic {
+
+ @Override
+ @PostConstruct
+ public void init() {
+
+ }
+
+ @Override
+ protected void destroy() {
+
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java
index 003a27a..03d5adf 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/DBVersion.java
@@ -13,7 +13,7 @@
/**
* The current db version. should be tested against real db to verify it is compatible to the db version
*/
- public static final DBVersion CURRENT_VERSION = new DBVersion(1710, 0);
+ public static final DBVersion CURRENT_VERSION = new DBVersion(1802, 0);
private DBVersion(BigInteger major, BigInteger minor) {
this.major = major;
@@ -62,8 +62,12 @@
@Override
public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
DBVersion dbVersion = (DBVersion) o;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/MigrationResult.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/MigrationResult.java
index 8c4c090..9024e80 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/MigrationResult.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/core/task/MigrationResult.java
@@ -21,6 +21,19 @@
this.migrationStatus = migrationStatus;
}
+ public static MigrationResult success() {
+ MigrationResult success = new MigrationResult();
+ success.setMigrationStatus(MigrationResult.MigrationStatus.COMPLETED);
+ return success;
+ }
+
+ public static MigrationResult error(String msg) {
+ MigrationResult error = new MigrationResult();
+ error.setMigrationStatus(MigrationStatus.FAILED);
+ error.setMsg(msg);
+ return error;
+ }
+
public enum MigrationStatus {
COMPLETED,
COMPLETED_WITH_ERRORS,
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/main/MigrationMenu.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/main/MigrationMenu.java
index dc2114d..de6c66d 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/main/MigrationMenu.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/main/MigrationMenu.java
@@ -1,34 +1,27 @@
package org.openecomp.sdc.asdctool.migration.main;
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.DefaultParser;
-import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-import org.openecomp.sdc.asdctool.configuration.ConfigurationUploader;
+import org.openecomp.sdc.asdctool.cli.CLIToolData;
+import org.openecomp.sdc.asdctool.cli.SpringCLITool;
import org.openecomp.sdc.asdctool.migration.config.MigrationSpringConfig;
import org.openecomp.sdc.asdctool.migration.core.SdcMigrationTool;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+import org.springframework.context.support.AbstractApplicationContext;
-public class MigrationMenu {
+public class MigrationMenu extends SpringCLITool {
- private final static Logger LOGGER = LoggerFactory.getLogger(MigrationMenu.class);
+ private static final Logger LOGGER = LoggerFactory.getLogger(MigrationMenu.class);
public static void main(String[] args) {
- CommandLine commandLine = initCmdLineOptions(args);
- String appConfigDir = commandLine.getOptionValue("c");
- boolean enforceAll = commandLine.hasOption("e");
- ConfigurationUploader.uploadConfigurationFiles(appConfigDir);
- AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(MigrationSpringConfig.class);
- doMigrate(enforceAll, context);
-
+ MigrationMenu migrationMenu = new MigrationMenu();
+ CLIToolData cliToolData = migrationMenu.init(args);
+ boolean enforceAll = cliToolData.getCommandLine().hasOption("e");
+ migrationMenu.doMigrate(enforceAll, cliToolData.getSpringApplicationContext());
}
- private static void doMigrate(boolean enforceAll, AnnotationConfigApplicationContext context) {
+ private void doMigrate(boolean enforceAll, AbstractApplicationContext context) {
SdcMigrationTool migrationTool = context.getBean(SdcMigrationTool.class);
boolean migrate = migrationTool.migrate(enforceAll);
if (migrate) {
@@ -40,38 +33,19 @@
}
}
- private static CommandLine initCmdLineOptions(String[] args) {
- Options options = buildCmdLineOptions();
- CommandLineParser parser = new DefaultParser();
- try {
- // parse the command line arguments
- return parser.parse( options, args );
- }
- catch( ParseException exp ) {
- // oops, something went wrong
- System.err.println( "Parsing failed. Reason: " + exp.getMessage() );
- usageAndExit(options);
- }
- return null;
- }
-
- private static void usageAndExit(Options options) {
- HelpFormatter formatter = new HelpFormatter();
- formatter.printHelp( "yy", options );
- System.exit(1);
- }
-
- private static Options buildCmdLineOptions() {
- Option configPath = buildConfigPathOption();
-
+ @Override
+ protected Options buildCmdLineOptions() {
+ Options options = super.buildCmdLineOptions();
Option enforceAll = buildEnforceAllOption();
-
- Options options = new Options();
- options.addOption(configPath);
options.addOption(enforceAll);
return options;
}
+ @Override
+ protected String commandName() {
+ return "sdc-migration";
+ }
+
private static Option buildEnforceAllOption() {
return Option.builder("e")
.longOpt("enforceAll")
@@ -79,13 +53,8 @@
.build();
}
- private static Option buildConfigPathOption() {
- return Option.builder("c")
- .longOpt("configFolderPath")
- .required()
- .hasArg()
- .desc("path to sdc configuration folder - required")
- .build();
+ @Override
+ protected Class<?> getSpringConfigurationClass() {
+ return MigrationSpringConfig.class;
}
-
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/MigrationResolver.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/MigrationResolver.java
index 22add31..9147ac0 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/MigrationResolver.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/MigrationResolver.java
@@ -1,6 +1,7 @@
package org.openecomp.sdc.asdctool.migration.resolver;
+import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage;
import java.util.List;
import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolver.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolver.java
index 182996f..9f2f600 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolver.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/resolver/SpringBeansMigrationResolver.java
@@ -1,6 +1,10 @@
package org.openecomp.sdc.asdctool.migration.resolver;
+import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage;
+import org.openecomp.sdc.asdctool.migration.core.task.IMigrationStage;
+import org.openecomp.sdc.asdctool.migration.core.task.PostMigration;
+import org.openecomp.sdc.asdctool.migration.core.task.PostMigration;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java
index 166512d..ca8c4c8 100644
--- a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710.java
@@ -1,20 +1,15 @@
package org.openecomp.sdc.asdctool.migration.tasks.mig1710;
-import java.util.ArrayList;
-import java.util.EnumMap;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.stream.Collectors;
-
+import com.google.common.collect.Lists;
+import fj.data.Either;
import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
import org.openecomp.sdc.asdctool.migration.core.task.PostMigration;
import org.openecomp.sdc.asdctool.migration.tasks.handlers.XlsOutputHandler;
+import org.openecomp.sdc.be.components.impl.ComponentInstanceBusinessLogic;
import org.openecomp.sdc.be.components.impl.ResourceBusinessLogic;
-import org.openecomp.sdc.be.components.impl.ServiceComponentInstanceBusinessLogic;
import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic;
import org.openecomp.sdc.be.components.lifecycle.LifecycleChangeInfoWithAction;
import org.openecomp.sdc.be.components.lifecycle.LifecycleChangeInfoWithAction.LifecycleChanceActionEnum;
@@ -33,6 +28,8 @@
import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
import org.openecomp.sdc.be.impl.ComponentsUtils;
import org.openecomp.sdc.be.model.ComponentInstance;
+import org.openecomp.sdc.be.model.ComponentInstanceProperty;
+import org.openecomp.sdc.be.model.ComponentParametersView;
import org.openecomp.sdc.be.model.LifeCycleTransitionEnum;
import org.openecomp.sdc.be.model.LifecycleStateEnum;
import org.openecomp.sdc.be.model.Resource;
@@ -49,11 +46,22 @@
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
-import fj.data.Either;
+import java.util.ArrayList;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
-@Component
+//@Component
public class UpgradeMigration1710 implements PostMigration {
-
+
+ private static final String SERVICE_UUID_RPOPERTY = "providing_service_uuid";
+
+ private static final String SERVICE_INVARIANT_UUID_RPOPERTY = "providing_service_invariant_uuid";
+
private static final String UNKNOWN = "UNKNOWN";
private static final String CHECKOUT_MESSAGE = "checkout upon upgrade migration";
@@ -70,53 +78,48 @@
@Autowired
private TitanDao titanDao;
-
+
@Autowired
private ToscaOperationFacade toscaOperationFacade;
-
+
@Autowired
private LifecycleBusinessLogic lifecycleBusinessLogic;
-
+
@Autowired
private IUserAdminOperation userAdminOperation;
@Autowired
private ResourceBusinessLogic resourceBusinessLogic;
-
+
@Autowired
private CsarOperation csarOperation;
-
+
@Autowired
- private ServiceComponentInstanceBusinessLogic componentInstanceBusinessLogic;
+ private ComponentInstanceBusinessLogic componentInstanceBusinessLogic;
@Autowired
private ComponentsUtils componentsUtils;
- private final XlsOutputHandler outputHandler = new XlsOutputHandler("COMPONENT TYPE", "COMPONENT NAME", "COMPONENT UUID", "COMPONENT UNIQUE_ID", "UPGRADE STATUS", "DESCRIPTION");
+ private XlsOutputHandler outputHandler = new XlsOutputHandler("COMPONENT TYPE", "COMPONENT NAME", "COMPONENT UUID", "COMPONENT UNIQUE_ID", "UPGRADE STATUS", "DESCRIPTION");
private User user = null;
- private final LifecycleChangeInfoWithAction changeInfo = new LifecycleChangeInfoWithAction(CHECKOUT_MESSAGE, LifecycleChanceActionEnum.UPGRADE_MIGRATION);
+ private final LifecycleChangeInfoWithAction changeInfo = new LifecycleChangeInfoWithAction(CHECKOUT_MESSAGE, LifecycleChanceActionEnum.UPGRADE_MIGRATION);
- private final Map<String,GraphVertex> latestGenericTypes = new HashMap<>();
+ private final Map<String, GraphVertex> latestGenericTypes = new HashMap<>();
+
+ private final Map<String, String> latestOriginResourceVersions = new HashMap<>();
+
+ private final List<String> proxyServiceContainers = new ArrayList<>();
+
+ private final List<String> vfAllottedResources = new ArrayList<>();
+
+ private final List<String> allottedVfContainers = new ArrayList<>();
private boolean isVfcUpgradeRequired = false;
private boolean skipIfUpgradeVfFailed = true;
- /** below methods is defined on package level for testing
- * where Spring object injection is not used **/
- void setUserAdminOperation(IUserAdminOperation userAdminOperation) { this.userAdminOperation = userAdminOperation; }
-
- void setTitanDao(TitanDao titanDao) { this.titanDao = titanDao; }
-
- void setTosckaOperationFacade(ToscaOperationFacade toscaOperationFacade) { this.toscaOperationFacade = toscaOperationFacade; }
-
- void setLifecycleBusinessLogic(LifecycleBusinessLogic lifecycleBusinessLogic) { this.lifecycleBusinessLogic = lifecycleBusinessLogic; }
-
- void setComponentsUtils(ComponentsUtils componentsUtils) { this.componentsUtils = componentsUtils; }
-
-
/***********************************************/
@Override
@@ -124,7 +127,7 @@
return "Upgrade migration 1710 - post migration task, which is dedicated to upgrade all latest certified (and not checked out) Node types, VFs and Services. ";
}
- private enum UpgradeStatus{
+ private enum UpgradeStatus {
UPGRADED,
NOT_UPGRADED
}
@@ -134,9 +137,8 @@
LOGGER.info("Starting upgrade migration 1710 process. ");
MigrationResult migrationResult = new MigrationResult();
- try{
- boolean result = true;
-
+ boolean result = true;
+ try {
isVfcUpgradeRequired = !ConfigurationManager.getConfigurationManager().getConfiguration().getSkipUpgradeVSPsFlag();
skipIfUpgradeVfFailed = ConfigurationManager.getConfigurationManager().getConfiguration().getSkipUpgradeFailedVfs();
final String userId = ConfigurationManager.getConfigurationManager().getConfiguration().getAutoHealingOwner();
@@ -144,23 +146,34 @@
Either<User, ActionStatus> userReq = userAdminOperation.getUserData(userId, false);
if (userReq.isRight()) {
result = false;
- LOGGER.error("Upgrade migration was failed. User {} resolve failed: {} ", userId, userReq.right().value());
- }
- else {
+ LOGGER.error("Upgrade migration failed. User {} resolve failed: {} ", userId, userReq.right().value());
+ } else {
user = userReq.left().value();
- LOGGER.info("User {} will perform upgrade operation", user.toString());
+ LOGGER.info("User {} will perform upgrade operation", user.getUserId());
}
-
- if(result){
+ if (result) {
result = upgradeNodeTypes();
}
- if(result){
+ if (result) {
result = upgradeVFs();
}
- if(result){
+ if (result) {
upgradeServices();
}
if(result){
+ upgradeProxyServiceContainers();
+ }
+ if(result){
+ upgradeAllottedVFs();
+ }
+ if(result){
+ upgradeAllottedVfContainers();
+ }
+ } catch (Exception e) {
+ result = false;
+ LOGGER.error("Error occurred {}. ", e);
+ } finally {
+ if (result) {
LOGGER.info("Upgrade migration 1710 has been successfully finished. ");
titanDao.commit();
migrationResult.setMigrationStatus(MigrationResult.MigrationStatus.COMPLETED);
@@ -169,43 +182,56 @@
titanDao.rollback();
migrationResult.setMigrationStatus(MigrationResult.MigrationStatus.FAILED);
}
- } catch(Exception e){
- LOGGER.error("Upgrade migration 1710 was failed. ", e);
- titanDao.rollback();
- migrationResult.setMigrationStatus(MigrationResult.MigrationStatus.FAILED);
- } finally {
outputHandler.writeOutput();
}
return migrationResult;
}
- private StorageOperationStatus upgradeServices() {
+ private void upgradeAllottedVfContainers() {
+ LOGGER.info("Starting upgrade proxy {} service containers upon upgrade migration 1710 process. ", allottedVfContainers.size());
+ for(String currUid : allottedVfContainers){
+ upgradeServiceAndCommitIfNeeded(currUid, component -> true);
+ }
+ }
+
+ private StorageOperationStatus upgradeServices() {
LOGGER.info("Starting upgrade services upon upgrade migration 1710 process. ");
- Map<String, String> latestOriginResourceVersions = new HashMap<>();
Either<List<String>, TitanOperationStatus> getServicesRes = getAllLatestCertifiedComponentUids(VertexTypeEnum.TOPOLOGY_TEMPLATE, ComponentTypeEnum.SERVICE);
- if(getServicesRes.isRight()){
+ if (getServicesRes.isRight()) {
return StorageOperationStatus.GENERAL_ERROR;
}
- for(String currUid : getServicesRes.left().value()){
- try{
- if(handleService(currUid, latestOriginResourceVersions)){
- titanDao.commit();
- } else {
- processComponentUpgradeFailure(ComponentTypeEnum.SERVICE.name(), currUid, "");
- }
- } catch(Exception e){
- processComponentUpgradeFailure(ComponentTypeEnum.SERVICE.name(), currUid, e.getMessage());
- }
+ for (String currUid : getServicesRes.left().value()) {
+ upgradeServiceAndCommitIfNeeded(currUid, this::shouldUpgrade);
}
return StorageOperationStatus.OK;
}
- private void processComponentUpgradeFailure(final String name, final String currUid, final String reason) {
- LOGGER.error("Failed to upgrade {} with uniqueId {} due to a reason {}. ", name, currUid, reason);
- titanDao.rollback();
+ private void upgradeServiceAndCommitIfNeeded(String currUid, Predicate<org.openecomp.sdc.be.model.Component> shouldUpgrade) {
+ boolean result = true;
+ try {
+ result = handleService(currUid, shouldUpgrade);
+ } catch (Exception e) {
+ result = false;
+ LOGGER.error("Failed to upgrade Service with uniqueId {} due to a reason {}. ", currUid, e);
+ }
+ finally {
+ if (result) {
+ titanDao.commit();
+ }
+ else {
+ titanDao.rollback();
+ }
+ }
}
+
+ private void upgradeProxyServiceContainers() {
+ LOGGER.info("Starting upgrade proxy service containers upon upgrade migration 1710 process. ");
+ for(String currUid : proxyServiceContainers){
+ upgradeServiceAndCommitIfNeeded(currUid, component -> true);
+ }
+ }
- private boolean handleService(String uniqueId, Map<String, String> latestOriginResourceVersions) {
+ private boolean handleService(String uniqueId, Predicate<org.openecomp.sdc.be.model.Component> shouldUpgrade) {
LOGGER.info("Starting upgrade Service with uniqueId {} upon upgrade migration 1710 process. ", uniqueId);
Either<org.openecomp.sdc.be.model.Component, StorageOperationStatus> getServiceRes = toscaOperationFacade.getToscaElement(uniqueId);
if(getServiceRes.isRight()){
@@ -231,43 +257,53 @@
if(latestVersionExists(latestGenericTypes.get(derivedFromGenericType), getServiceRes.left().value().getDerivedFromGenericVersion())){
return upgradeService(getServiceRes.left().value());
}
- if(!collectLatestOriginResourceVersions(getServiceRes.left().value(), latestOriginResourceVersions)){
+ if(!collectLatestOriginResourceVersions(getServiceRes.left().value())){
return false;
}
- if(shouldUpgrade(getServiceRes.left().value(), latestOriginResourceVersions)){
+ if(shouldUpgrade.test(getServiceRes.left().value())){
return upgradeService(getServiceRes.left().value());
}
outputHandler.addRecord(getServiceRes.left().value().getComponentType().name(), getServiceRes.left().value().getName(), getServiceRes.left().value().getInvariantUUID(), getServiceRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.COMPLETED.name(), UpgradeStatus.NOT_UPGRADED);
return true;
}
- private boolean collectLatestOriginResourceVersions(org.openecomp.sdc.be.model.Component component, Map<String, String> latestOriginResourceVersions) {
- if(CollectionUtils.isNotEmpty(component.getComponentInstances())){
- for(ComponentInstance instance : component.getComponentInstances()){
- if(instance.getOriginType() != OriginTypeEnum.ServiceProxy && !latestOriginResourceVersions.containsKey(instance.getToscaComponentName())){
- VertexTypeEnum vertexType = ModelConverter.getVertexType(instance.getOriginType().name());
- Either<Resource, StorageOperationStatus> getOriginRes = toscaOperationFacade.getLatestCertifiedByToscaResourceName(instance.getToscaComponentName(), vertexType, JsonParseFlagEnum.ParseMetadata);
- if(getOriginRes.isRight()){
- LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion(), "toscaOperationFacade.getLatestCertifiedByToscaResourceName", getOriginRes.right().value());
- outputHandler.addRecord( component.getComponentType().name(), component.getName(), component.getInvariantUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), getOriginRes.right().value());
- return false;
- }
- latestOriginResourceVersions.put(instance.getToscaComponentName(), getOriginRes.left().value().getVersion());
+ private boolean collectLatestOriginResourceVersions(org.openecomp.sdc.be.model.Component component) {
+ if (CollectionUtils.isNotEmpty(component.getComponentInstances())) {
+ for (ComponentInstance instance : component.getComponentInstances()) {
+ if (instance.getOriginType() != OriginTypeEnum.ServiceProxy && !latestOriginResourceVersions.containsKey(instance.getToscaComponentName()) && !addComponent(component, instance)) {
+ return false;
}
}
}
return true;
}
- private boolean shouldUpgrade(org.openecomp.sdc.be.model.Component component, Map<String, String> latestOriginResources) {
+ private boolean addComponent(org.openecomp.sdc.be.model.Component component, ComponentInstance instance) {
+ VertexTypeEnum vertexType = ModelConverter.getVertexType(instance.getOriginType().name());
+ Either<Resource, StorageOperationStatus> getOriginRes = toscaOperationFacade.getLatestCertifiedByToscaResourceName(instance.getToscaComponentName(), vertexType, JsonParseFlagEnum.ParseMetadata);
+ if (getOriginRes.isRight()) {
+ LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion(), "toscaOperationFacade.getLatestCertifiedByToscaResourceName", getOriginRes.right().value());
+ outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getInvariantUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), getOriginRes.right().value());
+ return false;
+ }
+ latestOriginResourceVersions.put(instance.getToscaComponentName(), getOriginRes.left().value().getVersion());
+ return true;
+ }
+
+ private boolean shouldUpgrade(org.openecomp.sdc.be.model.Component component) {
boolean shouldUpgrade = false;
if(CollectionUtils.isNotEmpty(component.getComponentInstances())){
for(ComponentInstance instance : component.getComponentInstances()){
if(instance.getOriginType() == OriginTypeEnum.ServiceProxy){
- LOGGER.info("The service with name {}, invariantUUID {}, version {}, contains Service proxy instance {}, than the service should be upgraded. ", component.getName(), component.getInvariantUUID(), component.getVersion(), instance.getName());
- shouldUpgrade = true;
+ LOGGER.info("The service with name {}, invariantUUID {}, version {}, contains Service proxy instance {}, than the service should be upgraded in the end of the upgrading proccess. ", component.getName(), component.getInvariantUUID(), component.getVersion(), instance.getName());
+ proxyServiceContainers.add(component.getUniqueId());
+ shouldUpgrade = false;
+ break;
}
- if(isGreater(latestOriginResources.get(instance.getToscaComponentName()), instance.getComponentVersion())){
+ if(isAllottedResource(instance.getActualComponentUid())){
+ allottedVfContainers.add(component.getUniqueId());
+ }
+ if(isGreater(latestOriginResourceVersions.get(instance.getToscaComponentName()), instance.getComponentVersion())){
LOGGER.info("The service with name {}, invariantUUID {}, version {}, contains instance {} from outdated version of origin {} {} , than the service should be upgraded. ", component.getName(), component.getInvariantUUID(), component.getVersion(), instance.getName(), instance.getComponentName(), instance.getComponentVersion());
shouldUpgrade = true;
}
@@ -282,19 +318,19 @@
LOGGER.info("Starting upgrade Service with name {}, invariantUUID {}, version {} upon upgrade migration 1710 process. ", serviceName, service.getInvariantUUID(), service.getVersion());
LOGGER.info("Starting to perform check out of service {}. ", serviceName);
Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> checkouRes = lifecycleBusinessLogic.changeComponentState(service.getComponentType(), service.getUniqueId(), user, LifeCycleTransitionEnum.CHECKOUT, changeInfo, true, false);
- if(checkouRes.isRight()){
+ if (checkouRes.isRight()) {
LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, service.getComponentType().getValue(), serviceName, service.getInvariantUUID(), service.getVersion(), "lifecycleBusinessLogic.changeComponentState", checkouRes.right().value().getFormattedMessage());
outputHandler.addRecord(service.getComponentType().name(), serviceName, serviceUuid, service.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), checkouRes.right().value().getFormattedMessage());
return false;
}
- Either<org.openecomp.sdc.be.model.Component, ResponseFormat> updateCompositionRes = updateComposition(checkouRes.left().value());
- if(updateCompositionRes.isRight()){
+ Either<org.openecomp.sdc.be.model.Component, ResponseFormat> updateCompositionRes = updateComposition(checkouRes.left().value());
+ if (updateCompositionRes.isRight()) {
LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, service.getComponentType().getValue(), serviceName, service.getInvariantUUID(), service.getVersion(), "updateComposition", updateCompositionRes.right().value().getFormattedMessage());
outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), updateCompositionRes.right().value().getFormattedMessage());
return false;
}
- Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> certifyRes = performFullCertification(checkouRes.left().value());
- if(certifyRes.isRight()){
+ Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> certifyRes = performFullCertification(checkouRes.left().value());
+ if (certifyRes.isRight()) {
LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, service.getComponentType().getValue(), serviceName, service.getInvariantUUID(), service.getVersion(), "performFullCertification", certifyRes.right().value().getFormattedMessage());
outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), certifyRes.right().value().getFormattedMessage());
return false;
@@ -305,9 +341,9 @@
private Either<org.openecomp.sdc.be.model.Component, ResponseFormat> updateComposition(org.openecomp.sdc.be.model.Component component) {
Either<ComponentInstance, ResponseFormat> upgradeInstanceRes;
- for(ComponentInstance instance : component.getComponentInstances()){
+ for (ComponentInstance instance : component.getComponentInstances()) {
upgradeInstanceRes = upgradeInstance(component, instance);
- if(upgradeInstanceRes.isRight()) {
+ if (upgradeInstanceRes.isRight()) {
LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion(), "upgradeInstance", upgradeInstanceRes.right().value().getFormattedMessage());
outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), upgradeInstanceRes.right().value().getFormattedMessage());
return Either.right(upgradeInstanceRes.right().value());
@@ -319,43 +355,94 @@
private Either<ComponentInstance, ResponseFormat> upgradeInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance) {
LOGGER.info("Starting upgrade {} instance {} upon upgrade migration 1710 process. ", component.getComponentType().getValue(), instance.getName());
ComponentInstance newComponentInstance = new ComponentInstance(instance);
- if(instance.getOriginType() == OriginTypeEnum.ServiceProxy){
+ if (instance.getOriginType() == OriginTypeEnum.ServiceProxy) {
return upgradeServiceProxyInstance(component, instance, newComponentInstance);
}
return upgradeResourceInstance(component, instance, newComponentInstance);
}
- private Either<ComponentInstance, ResponseFormat> upgradeResourceInstance(org.openecomp.sdc.be.model.Component component,
- ComponentInstance instance, ComponentInstance newComponentInstance) {
+ private Either<ComponentInstance, ResponseFormat> upgradeResourceInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, ComponentInstance newComponentInstance) {
+
LOGGER.info("Starting upgrade {} instance {} upon upgrade migration 1710 process. ", component.getComponentType().getValue(), instance.getName());
+ Either<ComponentInstance, ResponseFormat> upgradeInstanceRes = null;
VertexTypeEnum vertexType = ModelConverter.getVertexType(instance.getOriginType().name());
Either<Resource, StorageOperationStatus> getOriginRes = toscaOperationFacade.getLatestCertifiedByToscaResourceName(instance.getToscaComponentName(), vertexType, JsonParseFlagEnum.ParseMetadata);
if(getOriginRes.isRight()){
LOGGER.info("Upgrade of {} instance {} upon upgrade migration 1710 process failed due to a reason {}. ",
component.getComponentType().getValue(), instance.getName(), getOriginRes.right().value());
- return Either.right(componentsUtils.getResponseFormat(componentsUtils.convertFromStorageResponse(getOriginRes.right().value(), instance.getOriginType().getComponentType())));
+ upgradeInstanceRes = Either.right(componentsUtils.getResponseFormat(componentsUtils.convertFromStorageResponse(getOriginRes.right().value(), instance.getOriginType().getComponentType())));
}
- newComponentInstance.setComponentName(getOriginRes.left().value().getName());
- newComponentInstance.setComponentUid(getOriginRes.left().value().getUniqueId());
- newComponentInstance.setComponentVersion(getOriginRes.left().value().getVersion());
- newComponentInstance.setToscaComponentName(((Resource)getOriginRes.left().value()).getToscaResourceName());
- if(isGreater(getOriginRes.left().value().getVersion(), instance.getComponentVersion())){
- return changeAssetVersion(component, instance, newComponentInstance);
+ if(upgradeInstanceRes == null){
+ newComponentInstance.setComponentName(getOriginRes.left().value().getName());
+ newComponentInstance.setComponentUid(getOriginRes.left().value().getUniqueId());
+ newComponentInstance.setComponentVersion(getOriginRes.left().value().getVersion());
+ newComponentInstance.setToscaComponentName(((Resource)getOriginRes.left().value()).getToscaResourceName());
+ if(isGreater(getOriginRes.left().value().getVersion(), instance.getComponentVersion())){
+ upgradeInstanceRes = changeAssetVersion(component, instance, newComponentInstance);
+ }
+ if((upgradeInstanceRes == null || upgradeInstanceRes.isLeft()) && isAllottedResource(instance.getComponentUid()) && MapUtils.isNotEmpty(component.getComponentInstancesProperties())){
+ ComponentInstance instanceToUpdate = upgradeInstanceRes == null ? instance : upgradeInstanceRes.left().value();
+ upgradeInstanceRes = Either.left(updateServiceUuidProperty(component, instanceToUpdate, component.getComponentInstancesProperties().get(instance.getUniqueId())));
+ }
}
-
//upgrade nodes contained by CVFC
- if(isVfcUpgradeRequired && newComponentInstance.getOriginType() == OriginTypeEnum.CVFC &&
- !upgradeVf(getOriginRes.left().value().getUniqueId())) {
- return Either.right(componentsUtils.getResponseFormat(ActionStatus.GENERAL_ERROR));
+ if(upgradeInstanceRes == null && isVfcUpgradeRequired && newComponentInstance.getOriginType() == OriginTypeEnum.CVFC &&
+ !upgradeVf(getOriginRes.left().value().getUniqueId(), false)) {
+ upgradeInstanceRes = Either.right(componentsUtils.getResponseFormat(ActionStatus.GENERAL_ERROR));
+ }
+ if(upgradeInstanceRes == null){
+ upgradeInstanceRes = Either.left(instance);
}
LOGGER.info("Upgrade of {} instance {} upon upgrade migration 1710 process finished successfully. ",
- component.getComponentType().getValue(), instance.getName());
- return Either.left(instance);
+ component.getComponentType().getValue(), instance.getName());
+ return upgradeInstanceRes;
}
+ private ComponentInstance updateServiceUuidProperty(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, List<ComponentInstanceProperty> instanceProperties){
+ if(isAllottedResource(instance.getComponentUid()) && instanceProperties != null){
+ Optional<ComponentInstanceProperty> propertyUuid = instanceProperties.stream().filter(p->p.getName().equals(SERVICE_UUID_RPOPERTY)).findFirst();
+ Optional<ComponentInstanceProperty> propertyInvariantUuid = instanceProperties.stream().filter(p->p.getName().equals(SERVICE_INVARIANT_UUID_RPOPERTY)).findFirst();
+ if(propertyUuid.isPresent() && propertyInvariantUuid.isPresent()){
+ String serviceInvariantUUID = propertyInvariantUuid.get().getValue();
+ Either<List<GraphVertex>, TitanOperationStatus> getLatestOriginServiceRes = getLatestCertifiedService(serviceInvariantUUID);
+ if (getLatestOriginServiceRes.isRight()) {
+ return instance;
+ }
+ propertyUuid.get().setValue((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.UUID));
+ componentInstanceBusinessLogic.createOrUpdatePropertiesValues(component.getComponentType(), component.getUniqueId(), instance.getUniqueId(), Lists.newArrayList(propertyUuid.get()), user.getUserId())
+ .right()
+ .forEach(e -> LOGGER.debug("Failed to update property {} of the instance {} of the component {}. ", SERVICE_UUID_RPOPERTY, instance.getUniqueId(), component.getName()));
+ }
+ }
+ return instance;
+ }
+
+ private boolean isAllottedResource(String uniqueId){
+ ComponentParametersView filters = new ComponentParametersView(true);
+ filters.setIgnoreCategories(false);
+ Either<org.openecomp.sdc.be.model.Component, StorageOperationStatus> getResourceRes = toscaOperationFacade.getToscaElement(uniqueId, filters);
+ if(getResourceRes.isRight()){
+ return false;
+ }
+ if(getResourceRes.left().value().getCategories() != null && getResourceRes.left().value().getCategories().get(0)!= null){
+ return "Allotted Resource".equals(getResourceRes.left().value().getCategories().get(0).getName());
+ }
+ return false;
+ }
+
+ private boolean isAllottedVf(org.openecomp.sdc.be.model.Component component){
+ if(component.getComponentType() != ComponentTypeEnum.RESOURCE){
+ return false;
+ }
+ if(((Resource)component).getResourceType() != ResourceTypeEnum.VF){
+ return false;
+ }
+ return isAllottedResource(component.getUniqueId());
+ }
+
private Either<ComponentInstance, ResponseFormat> upgradeServiceProxyInstance(org.openecomp.sdc.be.model.Component component, ComponentInstance instance, ComponentInstance newComponentInstance) {
Either<List<GraphVertex>, TitanOperationStatus> getLatestOriginServiceRes = getLatestCertifiedService(instance.getSourceModelInvariant());
- if(getLatestOriginServiceRes.isRight()){
+ if (getLatestOriginServiceRes.isRight()) {
return Either.right(componentsUtils.getResponseFormat(componentsUtils.convertFromStorageResponse(DaoStatusConverter.convertTitanStatusToStorageStatus(getLatestOriginServiceRes.right().value()), instance.getOriginType().getComponentType())));
}
newComponentInstance.setComponentVersion((String) getLatestOriginServiceRes.left().value().get(0).getJsonMetadataField(JsonPresentationFields.VERSION));
@@ -385,18 +472,18 @@
LOGGER.info("Starting upgrade node types upon upgrade migration 1710 process. ");
String toscaConformanceLevel = ConfigurationManager.getConfigurationManager().getConfiguration().getToscaConformanceLevel();
Map<String, List<String>> resourcesForUpgrade = ConfigurationManager.getConfigurationManager().getConfiguration().getResourcesForUpgrade();
- Map<String, org.openecomp.sdc.be.model.Component> upgradedNodeTypesMap = new HashMap<> ();
+ Map<String, org.openecomp.sdc.be.model.Component> upgradedNodeTypesMap = new HashMap<>();
List<String> nodeTypes;
- if(resourcesForUpgrade.containsKey(toscaConformanceLevel)){
- nodeTypes = resourcesForUpgrade.get(toscaConformanceLevel);
- if(nodeTypes !=null && !nodeTypes.isEmpty()){
+ if (resourcesForUpgrade.containsKey(toscaConformanceLevel)) {
+ nodeTypes = resourcesForUpgrade.get(toscaConformanceLevel);
+ if (nodeTypes != null && !nodeTypes.isEmpty()) {
Either<List<String>, TitanOperationStatus> getRes = getAllLatestCertifiedComponentUids(VertexTypeEnum.NODE_TYPE, ComponentTypeEnum.RESOURCE);
- if(getRes.isRight()){
+ if (getRes.isRight()) {
return false;
}
List<String> allNodeTypes = getRes.left().value();
- for(String toscaResourceName: nodeTypes){
+ for (String toscaResourceName : nodeTypes) {
Either<List<GraphVertex>, StorageOperationStatus> status = getLatestByName(GraphPropertyEnum.TOSCA_RESOURCE_NAME, toscaResourceName);
if (status.isRight()) {
LOGGER.error("Failed to find node type {} ", toscaResourceName);
@@ -416,55 +503,75 @@
}
private boolean upgradeVFs() {
+ return upgradeVFs(false);
+ }
+
+ private boolean upgradeAllottedVFs() {
+ LOGGER.info("Starting upgrade {} allotted Vfs with upon upgrade migration 1710 process. ", vfAllottedResources.size());
+ return upgradeVFs(true);
+ }
+
+ private boolean upgradeVFs(boolean allottedVfsUpgrade) {
LOGGER.info("Starting upgrade VFs upon upgrade migration 1710 process. ");
Either<List<String>, TitanOperationStatus> getVfsRes = getAllLatestCertifiedComponentUids(VertexTypeEnum.TOPOLOGY_TEMPLATE, ComponentTypeEnum.RESOURCE);
- if(getVfsRes.isRight()){
+ if (getVfsRes.isRight()) {
LOGGER.info(UPGRADE_VFS_FAILED);
return false;
}
for (String currUid : getVfsRes.left().value()) {
+ boolean result = true;
try {
- if (!upgradeVf(currUid)) {
- processComponentUpgradeFailure(ComponentTypeEnum.RESOURCE.name(), currUid, "");
- if (!skipIfUpgradeVfFailed) {
- LOGGER.info(UPGRADE_VFS_FAILED);
- return false;
- }
- }
- titanDao.commit();
- } catch (Exception e) {
- processComponentUpgradeFailure(ComponentTypeEnum.RESOURCE.name(), currUid, e.getMessage());
- if (!skipIfUpgradeVfFailed) {
- LOGGER.info(UPGRADE_VFS_FAILED);
+ result = upgradeVf(currUid, allottedVfsUpgrade);
+ if (!result && !skipIfUpgradeVfFailed) {
return false;
}
+ } catch (Exception e) {
+ LOGGER.error("The exception {} occured upon upgrade VFs. ", e);
+ result = false;
+ if (!skipIfUpgradeVfFailed) {
+ return false;
+ }
+ }
+ finally {
+ if (!result) {
+ LOGGER.error("Failed to upgrade RESOURCE with uniqueId {} ", currUid);
+ titanDao.rollback();
+ }
+ else {
+ LOGGER.info("RESOURCE upgrade finished successfully: uniqueId {} ", currUid);
+ titanDao.commit();
+ }
}
}
LOGGER.info("Upgrade VFs upon upgrade migration 1710 process finished successfully. ");
return true;
}
- private boolean upgradeVf(String uniqueId) {
+ private boolean upgradeVf(String uniqueId, boolean allottedVfsUpgrade) {
LOGGER.info("Starting upgrade VF with uniqueId {} upon upgrade migration 1710 process. ", uniqueId);
Either<String, StorageOperationStatus> latestVersionRes;
Either<org.openecomp.sdc.be.model.Component, StorageOperationStatus> getRes = toscaOperationFacade.getToscaElement(uniqueId);
- if(getRes.isRight()){
+ if (getRes.isRight()) {
LOGGER.debug("Failed to fetch VF with uniqueId {} upon upgrade migration 1710 process. ", uniqueId);
outputHandler.addRecord(ComponentTypeEnum.RESOURCE.name(), UNKNOWN, UNKNOWN, uniqueId, MigrationResult.MigrationStatus.FAILED.name(), getRes.right().value());
return false;
}
- if(StringUtils.isNotEmpty(getRes.left().value().getCsarUUID())){
+ if(!allottedVfsUpgrade && isAllottedVf(getRes.left().value())){
+ vfAllottedResources.add(uniqueId);
+ return true;
+ }
+ if (StringUtils.isNotEmpty(getRes.left().value().getCsarUUID())) {
LOGGER.info("Going to fetch the latest version of VSP with csarUUID {} upon upgrade migration 1710 process. ", getRes.left().value().getCsarUUID());
latestVersionRes = csarOperation.getCsarLatestVersion(getRes.left().value().getCsarUUID(), user);
- if(latestVersionRes.isRight()){
+ if (latestVersionRes.isRight()) {
LOGGER.debug("Failed to fetch the latest version of VSP with csarUUID {} upon upgrade migration 1710 process. ", getRes.left().value().getCsarUUID());
- outputHandler.addRecord(getRes.left().value().getComponentType().name(), getRes.left().value().getName(), getRes.left().value().getUUID(), getRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(),latestVersionRes.right().value());
+ outputHandler.addRecord(getRes.left().value().getComponentType().name(), getRes.left().value().getName(), getRes.left().value().getUUID(), getRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), latestVersionRes.right().value());
return false;
}
- if(isGreater(latestVersionRes.left().value(), getRes.left().value().getCsarVersion())){
+ if (isGreater(latestVersionRes.left().value(), getRes.left().value().getCsarVersion())) {
return upgradeVfWithLatestVsp(getRes.left().value(), latestVersionRes);
}
- if (!isVfcUpgradeRequired){
+ if (!isVfcUpgradeRequired) {
LOGGER.warn("Warning: No need to upgrade VF with name {}, invariantUUID {}, version {} and VSP version {}. No new version of VSP. ", getRes.left().value().getName(), getRes.left().value().getInvariantUUID(), getRes.left().value().getVersion(), getRes.left().value().getCsarVersion());
}
}
@@ -473,9 +580,9 @@
private boolean upgradeVfWithLatestVsp(org.openecomp.sdc.be.model.Component vf, Either<String, StorageOperationStatus> latestVersionRes) {
LOGGER.info("Starting upgrade vf with name {}, invariantUUID {}, version {} and latest VSP version {} upon upgrade migration 1710 process. ", vf.getName(), vf.getInvariantUUID(), vf.getVersion(), latestVersionRes.left().value());
- LOGGER.info("Starting to perform check out of vf with name {}, invariantUUID {}, version {}. ", vf.getName(),vf.getInvariantUUID(), vf.getVersion());
+ LOGGER.info("Starting to perform check out of vf with name {}, invariantUUID {}, version {}. ", vf.getName(), vf.getInvariantUUID(), vf.getVersion());
Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> checkouRes = lifecycleBusinessLogic.changeComponentState(vf.getComponentType(), vf.getUniqueId(), user, LifeCycleTransitionEnum.CHECKOUT, changeInfo, true, false);
- if(checkouRes.isRight()){
+ if (checkouRes.isRight()) {
outputHandler.addRecord(vf.getComponentType().name(), vf.getName(), vf.getUUID(), vf.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), checkouRes.right().value().getFormattedMessage());
return false;
}
@@ -485,13 +592,13 @@
resourceToUpdate.setDerivedFromGenericVersion(((Resource) checkouRes.left().value()).getDerivedFromGenericVersion());
resourceToUpdate.setCsarVersion(Double.toString(Double.parseDouble(latestVersionRes.left().value())));
Either<Resource, ResponseFormat> updateResourceFromCsarRes = resourceBusinessLogic.validateAndUpdateResourceFromCsar(resourceToUpdate, user, null, null, resourceToUpdate.getUniqueId());
- if(updateResourceFromCsarRes.isRight()){
+ if (updateResourceFromCsarRes.isRight()) {
outputHandler.addRecord(resourceToUpdate.getComponentType().name(), resourceToUpdate.getName(), resourceToUpdate.getUUID(), resourceToUpdate.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), updateResourceFromCsarRes.right().value().getFormattedMessage());
LOGGER.info("Failed to update vf with name {}, invariantUUID {}, version {} and latest VSP {}. ", vf.getName(), vf.getInvariantUUID(), vf.getVersion(), latestVersionRes.left().value());
return false;
}
- Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> certifyRes = performFullCertification(checkouRes.left().value());
- if(certifyRes.isRight()){
+ Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> certifyRes = performFullCertification(checkouRes.left().value());
+ if (certifyRes.isRight()) {
LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getVersion(), LifeCycleTransitionEnum.CERTIFY);
outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), certifyRes.right().value().getFormattedMessage());
return false;
@@ -502,55 +609,55 @@
}
private boolean upgradeComponentWithLatestGeneric(org.openecomp.sdc.be.model.Component component) {
- String derivedFromGenericType = component.getDerivedFromGenericType();
+ String derivedFromGenericType = component.getDerivedFromGenericType();
String derivedFromGenericVersion = component.getDerivedFromGenericVersion();
org.openecomp.sdc.be.model.Component updatedComponent = component;
- if(StringUtils.isNotEmpty(derivedFromGenericType) && !latestGenericTypes.containsKey(derivedFromGenericType)){
+ if (StringUtils.isNotEmpty(derivedFromGenericType) && !latestGenericTypes.containsKey(derivedFromGenericType)) {
LOGGER.info("Starting upgrade vf with name {}, invariantUUID {}, version {}, latest derived from generic type {}, latest derived from generic version {}. ", component.getName(), component.getInvariantUUID(), component.getVersion(), derivedFromGenericType, derivedFromGenericVersion);
LOGGER.info("Starting to fetch latest generic node type {}. ", derivedFromGenericType);
Either<List<GraphVertex>, TitanOperationStatus> getDerivedRes = findDerivedResources(derivedFromGenericType);
- if(getDerivedRes.isRight()){
+ if (getDerivedRes.isRight()) {
outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getInvariantUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), getDerivedRes.right().value());
LOGGER.info("Failed to upgrade component with name {}, invariantUUID {}, version {} and latest generic. Status is {}. ", component.getName(), component.getInvariantUUID(), component.getVersion(), derivedFromGenericType);
return false;
}
latestGenericTypes.put(derivedFromGenericType, getDerivedRes.left().value().get(0));
}
- if(StringUtils.isEmpty(derivedFromGenericType) ||
+ if (StringUtils.isEmpty(derivedFromGenericType) ||
latestVersionExists(latestGenericTypes.get(derivedFromGenericType), derivedFromGenericVersion) ||
- isVfcUpgradeRequired){
- if(StringUtils.isNotEmpty(derivedFromGenericType))
+ isVfcUpgradeRequired) {
+ if (StringUtils.isNotEmpty(derivedFromGenericType))
LOGGER.info("Newer version {} of derived from generic type {} exists. ", latestGenericTypes.get(derivedFromGenericType).getJsonMetadataField(JsonPresentationFields.VERSION), derivedFromGenericType);
else
LOGGER.info("The vf resource with name {}, invariantUUID {}, version {}, has an empty derivedFromGenericType field. ", component.getName(), component.getInvariantUUID(), component.getVersion());
LOGGER.info("Starting to perform check out of vf with name {}, invariantUUID {}, version {}. ", component.getName(), component.getInvariantUUID(), component.getVersion());
Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> checkouRes = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), component.getUniqueId(), user, LifeCycleTransitionEnum.CHECKOUT, changeInfo, true, false);
- if(checkouRes.isRight()){
+ if (checkouRes.isRight()) {
LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CHECKOUT);
outputHandler.addRecord(component.getComponentType().name(), component.getName(), component.getInvariantUUID(), component.getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), checkouRes.right().value().getFormattedMessage());
return false;
}
//update included VFCs, if it is required as per configuration
- if(isVfcUpgradeRequired && CollectionUtils.isNotEmpty(checkouRes.left().value().getComponentInstances())){
+ if (CollectionUtils.isNotEmpty(checkouRes.left().value().getComponentInstances())) {
LOGGER.info("VFC upgrade is required: updating components of vf with name {}, invariantUUID {}, version {}. ", component.getName(), component.getInvariantUUID(), component.getVersion());
- Either<org.openecomp.sdc.be.model.Component, ResponseFormat> updateCompositionRes =
- updateComposition(checkouRes.left().value());
- if(updateCompositionRes.isRight()){
+ Either<org.openecomp.sdc.be.model.Component, ResponseFormat> updateCompositionRes =
+ updateComposition(checkouRes.left().value());
+ if (updateCompositionRes.isRight()) {
LOGGER.error(FAILED_TO_UPGRADE_COMPONENT, checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getVersion(), "updateComposition", updateCompositionRes.right().value().getFormattedMessage());
outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), updateCompositionRes.right().value().getFormattedMessage());
return false;
}
}
Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> certifyRes = performFullCertification(checkouRes.left().value());
- if(certifyRes.isRight()){
+ if (certifyRes.isRight()) {
LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFY);
outputHandler.addRecord(checkouRes.left().value().getComponentType().name(), checkouRes.left().value().getName(), checkouRes.left().value().getInvariantUUID(), checkouRes.left().value().getUniqueId(), MigrationResult.MigrationStatus.FAILED.name(), certifyRes.right().value().getFormattedMessage());
return false;
}
updatedComponent = certifyRes.left().value();
} else {
- LOGGER.info("The version {} of derived from generic type {} is up to date. No need to upgrade component with name {}, invariantUUID {} and version {}. ", latestGenericTypes.get(derivedFromGenericType), derivedFromGenericType,component.getName(), component.getInvariantUUID(), component.getVersion());
+ LOGGER.info("The version {} of derived from generic type {} is up to date. No need to upgrade component with name {}, invariantUUID {} and version {}. ", latestGenericTypes.get(derivedFromGenericType), derivedFromGenericType, component.getName(), component.getInvariantUUID(), component.getVersion());
}
LOGGER.info(UPGRADE_COMPONENT_SUCCEEDED, component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion());
outputHandler.addRecord(updatedComponent.getComponentType().name(), updatedComponent.getName(), updatedComponent.getUUID(), updatedComponent.getUniqueId(), MigrationResult.MigrationStatus.COMPLETED.name(), updatedComponent.equals(component) ? UpgradeStatus.NOT_UPGRADED : UpgradeStatus.UPGRADED);
@@ -562,46 +669,46 @@
LOGGER.info("Starting upgrade node type with name {}, invariantUUID {}, version{}. ", nodeTypeV.getMetadataProperty(GraphPropertyEnum.NAME), nodeTypeV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), nodeTypeV.getMetadataProperty(GraphPropertyEnum.VERSION));
LOGGER.info("Starting to find derived to for node type with name {}, invariantUUID {}, version{}. ", nodeTypeV.getMetadataProperty(GraphPropertyEnum.NAME), nodeTypeV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), nodeTypeV.getMetadataProperty(GraphPropertyEnum.VERSION));
Either<List<GraphVertex>, TitanOperationStatus> parentResourceRes = titanDao.getParentVertecies(nodeTypeV, EdgeLabelEnum.DERIVED_FROM, JsonParseFlagEnum.ParseMetadata);
- if(parentResourceRes.isRight() && parentResourceRes.right().value() != TitanOperationStatus.NOT_FOUND ){
+ if (parentResourceRes.isRight() && parentResourceRes.right().value() != TitanOperationStatus.NOT_FOUND) {
return DaoStatusConverter.convertTitanStatusToStorageStatus(parentResourceRes.right().value());
}
List<GraphVertex> derivedResourcesUid = new ArrayList<>();
- if(parentResourceRes.isLeft()){
- for(GraphVertex chV: parentResourceRes.left().value()){
- Optional<String> op = allCertifiedUids.stream().filter(id -> id.equals((String)chV.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID))).findAny();
- if(op.isPresent()){
+ if (parentResourceRes.isLeft()) {
+ for (GraphVertex chV : parentResourceRes.left().value()) {
+ Optional<String> op = allCertifiedUids.stream().filter(id -> id.equals((String) chV.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID))).findAny();
+ if (op.isPresent()) {
derivedResourcesUid.add(chV);
}
}
}
- String uniqueId = (String)nodeTypeV.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID);
+ String uniqueId = (String) nodeTypeV.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID);
Either<org.openecomp.sdc.be.model.Component, StorageOperationStatus> getRes = toscaOperationFacade.getToscaElement(uniqueId);
- if(getRes.isRight()){
+ if (getRes.isRight()) {
LOGGER.info("failed to fetch element with uniqueId {} ", uniqueId);
return getRes.right().value();
}
- org.openecomp.sdc.be.model.Resource nt = (Resource)getRes.left().value();
+ org.openecomp.sdc.be.model.Resource nt = (Resource) getRes.left().value();
boolean isNeedToUpgrade = true;
- if(upgradedNodeTypesMap.containsKey(nt.getToscaResourceName()) || nodeTypes.stream().filter( p -> p.equals(nt.getToscaResourceName())).findAny().isPresent()){
+ if (upgradedNodeTypesMap.containsKey(nt.getToscaResourceName()) || nodeTypes.stream().anyMatch(p -> p.equals(nt.getToscaResourceName()))) {
isNeedToUpgrade = false;
}
- if(isNeedToUpgrade){
+ if (isNeedToUpgrade) {
LOGGER.info("Starting to perform check out of node type with name {}, invariantUUID {}, version {}. ", nt.getName(), nt.getInvariantUUID(), nt.getVersion());
Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> checkouRes = lifecycleBusinessLogic.changeComponentState(nt.getComponentType(), nt.getUniqueId(), user, LifeCycleTransitionEnum.CHECKOUT, changeInfo, true, false);
- if(checkouRes.isRight()){
+ if (checkouRes.isRight()) {
return StorageOperationStatus.GENERAL_ERROR;
}
org.openecomp.sdc.be.model.Component upgradetComp = checkouRes.left().value();
boolean res = performFullCertification(upgradetComp).isLeft();
- if(!res){
+ if (!res) {
return StorageOperationStatus.GENERAL_ERROR;
}
upgradedNodeTypesMap.put(nt.getToscaResourceName(), upgradetComp);
titanDao.commit();
}
- for(GraphVertex chV: derivedResourcesUid){
+ for (GraphVertex chV : derivedResourcesUid) {
result = upgradeNodeType(chV, upgradedNodeTypesMap, allCertifiedUids, nodeTypes);
LOGGER.info("Upgrade node type with name {}, invariantUUID {}, version {} has been finished with the status {}", chV.getMetadataProperty(GraphPropertyEnum.NAME), chV.getMetadataProperty(GraphPropertyEnum.INVARIANT_UUID), chV.getMetadataProperty(GraphPropertyEnum.VERSION), result);
}
@@ -610,23 +717,22 @@
private Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> performFullCertification(org.openecomp.sdc.be.model.Component component) {
LOGGER.info("Starting to perform full certification of {} with name {}, invariantUUID {}, version {}. ",
- component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion());
+ component.getComponentType().getValue(), component.getName(), component.getInvariantUUID(), component.getVersion());
- Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), component.getUniqueId(), user, LifeCycleTransitionEnum.CERTIFICATION_REQUEST, changeInfo, true, false);
- if(changeStateEither.isRight()){
+ Either<? extends org.openecomp.sdc.be.model.Component, ResponseFormat> changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), component.getUniqueId(), user, LifeCycleTransitionEnum.CERTIFICATION_REQUEST, changeInfo, true, false);
+ if (changeStateEither.isRight()) {
LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFICATION_REQUEST);
return changeStateEither;
}
changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), changeStateEither.left().value().getUniqueId(), user, LifeCycleTransitionEnum.START_CERTIFICATION, changeInfo, true, false);
- if(changeStateEither.isRight()){
+ if (changeStateEither.isRight()) {
LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.START_CERTIFICATION);
return changeStateEither;
}
changeStateEither = lifecycleBusinessLogic.changeComponentState(component.getComponentType(), changeStateEither.left().value().getUniqueId(), user, LifeCycleTransitionEnum.CERTIFY, changeInfo, true, false);
- if(changeStateEither.isRight()){
+ if (changeStateEither.isRight()) {
LOGGER.info(FAILED_TO_CHANGE_STATE_OF_COMPONENT, component.getName(), component.getInvariantUUID(), component.getVersion(), LifeCycleTransitionEnum.CERTIFY);
- }
- else {
+ } else {
LOGGER.info("Full certification of {} with name {}, invariantUUID {}, version {} finished successfully",
changeStateEither.left().value().getComponentType().getValue(), changeStateEither.left().value().getName(),
changeStateEither.left().value().getInvariantUUID(), changeStateEither.left().value().getVersion());
@@ -645,13 +751,13 @@
}
private boolean latestVersionExists(GraphVertex latestDerivedFrom, String currentVersion) {
- return isGreater((String)latestDerivedFrom.getJsonMetadataField(JsonPresentationFields.VERSION), currentVersion);
+ return isGreater((String) latestDerivedFrom.getJsonMetadataField(JsonPresentationFields.VERSION), currentVersion);
}
private boolean isGreater(String latestVersion, String currentVersion) {
- if(latestVersion != null && currentVersion == null)
+ if (latestVersion != null && currentVersion == null)
return true;
- if(latestVersion == null)
+ if (latestVersion == null)
return false;
return Double.parseDouble(latestVersion) > Double.parseDouble(currentVersion);
}
@@ -661,58 +767,58 @@
Either<List<String>, TitanOperationStatus> result = null;
Map<String, String> latestCertifiedMap = new HashMap<>();
Map<String, String> latestNotCertifiedMap = new HashMap<>();
-
+
Either<List<GraphVertex>, TitanOperationStatus> getComponentsRes = getAllLatestCertifiedComponents(vertexType, componentType);
- if(getComponentsRes.isRight() && getComponentsRes.right().value() != TitanOperationStatus.NOT_FOUND){
+ if (getComponentsRes.isRight() && getComponentsRes.right().value() != TitanOperationStatus.NOT_FOUND) {
LOGGER.error("Failed to fetch all latest certified not checked out components with type {}. Status is {}. ", componentType, getComponentsRes.right().value());
result = Either.right(getComponentsRes.right().value());
}
- if(getComponentsRes.isRight()){
+ if (getComponentsRes.isRight()) {
result = Either.left(new ArrayList<>());
}
- if(result == null){
- for(GraphVertex component : getComponentsRes.left().value()){
- String invariantUUID = (String)component.getJsonMetadataField(JsonPresentationFields.INVARIANT_UUID);
- if(((String)component.getJsonMetadataField(JsonPresentationFields.LIFECYCLE_STATE)).equals(LifecycleStateEnum.CERTIFIED.name())){
- latestCertifiedMap.put(invariantUUID, (String)component.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID));
+ if (result == null) {
+ for (GraphVertex component : getComponentsRes.left().value()) {
+ String invariantUUID = (String) component.getJsonMetadataField(JsonPresentationFields.INVARIANT_UUID);
+ if (((String) component.getJsonMetadataField(JsonPresentationFields.LIFECYCLE_STATE)).equals(LifecycleStateEnum.CERTIFIED.name())) {
+ latestCertifiedMap.put(invariantUUID, (String) component.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID));
} else {
- latestNotCertifiedMap.put(invariantUUID, (String)component.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID));
+ latestNotCertifiedMap.put(invariantUUID, (String) component.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID));
}
}
- result = Either.left(latestCertifiedMap.entrySet().stream().filter(e->!latestNotCertifiedMap.containsKey(e.getKey())).map(e->e.getValue()).collect(Collectors.toList()));
+ result = Either.left(latestCertifiedMap.entrySet().stream().filter(e -> !latestNotCertifiedMap.containsKey(e.getKey())).map(Map.Entry::getValue).collect(Collectors.toList()));
}
return result;
}
- private Either<List<GraphVertex>, TitanOperationStatus> getAllLatestCertifiedComponents(VertexTypeEnum vertexType, ComponentTypeEnum componentType){
+ private Either<List<GraphVertex>, TitanOperationStatus> getAllLatestCertifiedComponents(VertexTypeEnum vertexType, ComponentTypeEnum componentType) {
Map<GraphPropertyEnum, Object> propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class);
propertiesToMatch.put(GraphPropertyEnum.COMPONENT_TYPE, componentType.name());
propertiesToMatch.put(GraphPropertyEnum.IS_HIGHEST_VERSION, true);
-
+
Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
propertiesNotToMatch.put(GraphPropertyEnum.IS_DELETED, true);
- if(vertexType == VertexTypeEnum.TOPOLOGY_TEMPLATE && componentType == ComponentTypeEnum.RESOURCE)
+ if (vertexType == VertexTypeEnum.TOPOLOGY_TEMPLATE && componentType == ComponentTypeEnum.RESOURCE)
propertiesNotToMatch.put(GraphPropertyEnum.RESOURCE_TYPE, ResourceTypeEnum.CVFC.name());
return titanDao.getByCriteria(vertexType, propertiesToMatch, propertiesNotToMatch, JsonParseFlagEnum.ParseMetadata);
}
protected Either<List<String>, TitanOperationStatus> findResourcesPathRecursively(GraphVertex nodeTypeV, List<String> allCertifiedUids) {
Either<List<GraphVertex>, TitanOperationStatus> parentResourceRes = titanDao.getParentVertecies(nodeTypeV, EdgeLabelEnum.DERIVED_FROM, JsonParseFlagEnum.ParseMetadata);
- if(parentResourceRes.isRight()){
+ if (parentResourceRes.isRight()) {
return Either.right(parentResourceRes.right().value());
}
List<GraphVertex> derivedResourcesUid = new ArrayList<>();
- for(GraphVertex chV: parentResourceRes.left().value()){
- Optional<String> op = allCertifiedUids.stream().filter(id -> id.equals((String)chV.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID))).findAny();
- if(op.isPresent()){
+ for (GraphVertex chV : parentResourceRes.left().value()) {
+ Optional<String> op = allCertifiedUids.stream().filter(id -> id.equals((String) chV.getJsonMetadataField(JsonPresentationFields.UNIQUE_ID))).findAny();
+ if (op.isPresent()) {
derivedResourcesUid.add(chV);
}
}
return null;
}
- private Either<List<GraphVertex>, StorageOperationStatus> getLatestByName(GraphPropertyEnum property, String nodeName){
+ private Either<List<GraphVertex>, StorageOperationStatus> getLatestByName(GraphPropertyEnum property, String nodeName) {
Map<GraphPropertyEnum, Object> propertiesToMatch = new EnumMap<>(GraphPropertyEnum.class);
Map<GraphPropertyEnum, Object> propertiesNotToMatch = new EnumMap<>(GraphPropertyEnum.class);
@@ -728,8 +834,8 @@
}
List<GraphVertex> resources = highestResources.left().value();
List<GraphVertex> result = new ArrayList<>();
- for(GraphVertex component:resources){
- if(((String)component.getJsonMetadataField(JsonPresentationFields.LIFECYCLE_STATE)).equals(LifecycleStateEnum.CERTIFIED.name())){
+ for (GraphVertex component : resources) {
+ if (((String) component.getJsonMetadataField(JsonPresentationFields.LIFECYCLE_STATE)).equals(LifecycleStateEnum.CERTIFIED.name())) {
result.add(component);
}
}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1802/SdcCatalogMigration.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1802/SdcCatalogMigration.java
new file mode 100644
index 0000000..9cbb7ec
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/migration/tasks/mig1802/SdcCatalogMigration.java
@@ -0,0 +1,137 @@
+package org.openecomp.sdc.asdctool.migration.tasks.mig1802;
+
+import fj.data.Either;
+import org.apache.commons.collections.ListUtils;
+import org.apache.tinkerpop.gremlin.structure.Direction;
+import org.openecomp.sdc.asdctool.migration.core.DBVersion;
+import org.openecomp.sdc.asdctool.migration.core.task.Migration;
+import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
+import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
+import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.JsonParseFlagEnum;
+import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
+import org.openecomp.sdc.be.dao.jsongraph.utils.IdBuilderUtils;
+import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
+import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
+import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
+import org.openecomp.sdc.be.model.jsontitan.operations.TopologyTemplateOperation;
+import org.openecomp.sdc.be.model.jsontitan.operations.ToscaElementOperation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import java.math.BigInteger;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+@Component
+public class SdcCatalogMigration implements Migration {
+ private static final Logger LOGGER = LoggerFactory.getLogger(SdcCatalogMigration.class);
+ private static final List<ResourceTypeEnum> EXCLUDE_TYPES = Arrays.asList(ResourceTypeEnum.VFCMT, ResourceTypeEnum.Configuration);
+
+ private ToscaElementOperation toscaElementOperation;
+ private TitanDao titanDao;
+
+ public SdcCatalogMigration(TopologyTemplateOperation toscaElementOperation, TitanDao titanDao) {
+ this.toscaElementOperation = toscaElementOperation;
+ this.titanDao = titanDao;
+ }
+
+ @Override
+ public String description() {
+ return "optimize sdc catalog vertices";
+ }
+
+ @Override
+ public DBVersion getVersion() {
+ return DBVersion.from(BigInteger.valueOf(1802), BigInteger.valueOf(0));
+ }
+
+ @Override
+ public MigrationResult migrate() {
+ TitanOperationStatus status = null;
+ try {
+ status = getOrCreateCatalogRoot()
+ .either(this::associateCatalogRootToCatalogElements,
+ err -> {LOGGER.error("failed to create catalog root. err: {}", err); return err;});
+ return status == TitanOperationStatus.OK ? MigrationResult.success() : MigrationResult.error("failed to create and associate catalog root. error: " + status);
+ } finally {
+ commitOrRollBack(status);
+ }
+ }
+
+ private void commitOrRollBack(TitanOperationStatus status) {
+ if (status == TitanOperationStatus.OK) {
+ titanDao.commit();
+ } else {
+ titanDao.rollback();
+ }
+ }
+
+ private Either<GraphVertex, TitanOperationStatus> getOrCreateCatalogRoot() {
+ LOGGER.info("creating or getting catalog root vertex");
+ return titanDao.getVertexByLabel(VertexTypeEnum.CATALOG_ROOT)
+ .right()
+ .bind(this::createRootCatalogVertexOrError);
+ }
+
+
+ private Either<GraphVertex, TitanOperationStatus> createRootCatalogVertexOrError(TitanOperationStatus titanOperationStatus) {
+ return titanOperationStatus == TitanOperationStatus.NOT_FOUND ? createRootCatalogVertex() : Either.right(titanOperationStatus);
+ }
+
+ private Either<GraphVertex, TitanOperationStatus> createRootCatalogVertex() {
+ LOGGER.info("Creating root catalog vertex");
+ GraphVertex catalogRootVertex = new GraphVertex(VertexTypeEnum.CATALOG_ROOT);
+ catalogRootVertex.setUniqueId(IdBuilderUtils.generateUniqueId());
+ return titanDao.createVertex(catalogRootVertex);
+ }
+
+ private Either<List<GraphVertex>, TitanOperationStatus> getAllCatalogVertices() {
+ LOGGER.info("fetching all catalog resources");
+ return toscaElementOperation.getListOfHighestComponents(ComponentTypeEnum.RESOURCE, EXCLUDE_TYPES, JsonParseFlagEnum.ParseMetadata)
+ .left()
+ .bind(this::getAllCatalogVertices);
+ }
+
+ @SuppressWarnings("unchecked")
+ private Either<List<GraphVertex>, TitanOperationStatus> getAllCatalogVertices(List<GraphVertex> allResourceCatalogVertices) {
+ LOGGER.info("number of resources: {}", allResourceCatalogVertices.size());
+ LOGGER.info("fetching all catalog services");
+ return toscaElementOperation.getListOfHighestComponents(ComponentTypeEnum.SERVICE, EXCLUDE_TYPES, JsonParseFlagEnum.ParseMetadata)
+ .left()
+ .map(allServiceVertices -> ListUtils.union(allServiceVertices, allResourceCatalogVertices));
+ }
+
+ private TitanOperationStatus associateCatalogRootToCatalogElements(GraphVertex root) {
+ return getAllCatalogVertices()
+ .either(catalogVertices -> associateCatalogRootToCatalogElements(root, catalogVertices),
+ err -> err);
+ }
+
+ private TitanOperationStatus associateCatalogRootToCatalogElements(GraphVertex root, List<GraphVertex> catalogElements) {
+ LOGGER.info("number of catalog elements: {}", catalogElements.size());
+ LOGGER.info("connect all catalog elements to root edge");
+ List<GraphVertex> nonConnectedElements = catalogElements.stream().filter(this::edgeNotAlreadyExists).collect(Collectors.toList());
+ int numOfCreatedEdges = 0;
+ for (GraphVertex catalogElement : nonConnectedElements) {
+ TitanOperationStatus edgeCreationStatus = titanDao.createEdge(root, catalogElement, EdgeLabelEnum.CATALOG_ELEMENT, null);
+ if (edgeCreationStatus != TitanOperationStatus.OK) {
+ LOGGER.error("failed to create edge from catalog element to vertex {}", catalogElement.getUniqueId());
+ return edgeCreationStatus;
+ }
+ LOGGER.debug("created edge from catalog root to element {}", catalogElement.getUniqueId());
+ numOfCreatedEdges++;
+ }
+ LOGGER.info("number edges created: {}", numOfCreatedEdges);
+ return TitanOperationStatus.OK;
+ }
+
+ private boolean edgeNotAlreadyExists(GraphVertex catalogElement) {
+ return !catalogElement.getVertex().edges(Direction.IN, EdgeLabelEnum.CATALOG_ELEMENT.name()).hasNext();
+ }
+
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/ImportCassandraTableTool.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/ImportCassandraTableTool.java
new file mode 100644
index 0000000..9ea2d5a
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/ImportCassandraTableTool.java
@@ -0,0 +1,69 @@
+package org.openecomp.sdc.asdctool.simulator.tenant;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.function.Consumer;
+
+import org.openecomp.sdc.be.config.ConfigurationManager;
+import org.openecomp.sdc.common.api.ConfigurationSource;
+import org.openecomp.sdc.common.impl.ExternalConfiguration;
+import org.openecomp.sdc.common.impl.FSConfigurationSource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Main class of utility imports CSV file into the specified table
+ * The old stuff of the table is removed.
+ *
+ * Accepts 3 mandatory arguments:
+ * 1. Path to configuration folder
+ * 2. Name of the table
+ * 3. Path to the CSV file
+ *
+ * Example of usage:
+ * \src\main\resources\config\ operationalenvironment "C:\Users\dr2032\Documents\env.csv"
+ *
+ * See relevant import handler for example of csv file line.
+ *
+ * The list of supported tables:
+ * 1. operationalenvironment
+ *
+ *
+ * @author dr2032
+ *
+ */
+public class ImportCassandraTableTool {
+ private static final Logger LOGGER = LoggerFactory.getLogger(ImportCassandraTableTool.class);
+
+ private static Map<String, Consumer<String>> mapHandlers = new HashMap<>();
+
+ static {
+ mapHandlers.put(OperationalEvnironmentImportHandler.getTableName().toLowerCase(), OperationalEvnironmentImportHandler::execute);
+ }
+
+ public static void main(String[] args) {
+ if(args.length == 3) {
+ String appConfigDir = args[0];
+ String tableName = args[1];
+ String fileName = args[2];
+
+ ConfigurationSource configurationSource = new FSConfigurationSource(ExternalConfiguration.getChangeListener(), appConfigDir);
+ new ConfigurationManager(configurationSource);
+
+ Consumer<String> executor = mapHandlers.get(tableName.toLowerCase());
+ if (executor != null) {
+ executor.accept(fileName);
+ }
+ else {
+ LOGGER.warn("Import to table [{}] is not supported yet!", tableName);
+ }
+ }
+ else {
+ LOGGER.warn("Invalid number of arguments. The 1st shoduld be path to config dir, the 2nd - table name and the 3rd - path to CSV file.");
+ }
+
+
+ System.exit(0);
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/ImportTableConfig.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/ImportTableConfig.java
new file mode 100644
index 0000000..5ce4314
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/ImportTableConfig.java
@@ -0,0 +1,19 @@
+package org.openecomp.sdc.asdctool.simulator.tenant;
+
+import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
+import org.openecomp.sdc.be.dao.cassandra.OperationalEnvironmentDao;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class ImportTableConfig {
+ @Bean(name = "cassandra-client")
+ public CassandraClient cassandraClient() {
+ return new CassandraClient();
+ }
+
+ @Bean(name = "operational-environment-dao")
+ public OperationalEnvironmentDao operationalEnvironmentDao() {
+ return new OperationalEnvironmentDao();
+ }
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEnvironment.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEnvironment.java
new file mode 100644
index 0000000..1caf073
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEnvironment.java
@@ -0,0 +1,119 @@
+package org.openecomp.sdc.asdctool.simulator.tenant;
+
+import org.openecomp.sdc.be.datatypes.enums.EnvironmentStatusEnum;
+
+import com.opencsv.bean.CsvBindByPosition;
+
+/**
+ * Represents line in CSV file should be imported into "operationalenvironment" table.
+ * @author dr2032
+ *
+ */
+public class OperationalEnvironment {
+ @CsvBindByPosition(position = 0)
+ private String environmentId;
+
+ @CsvBindByPosition(position = 1)
+ private String dmaapUebAddress;
+
+ @CsvBindByPosition(position = 2)
+ private String ecompWorkloadContext;
+
+ @CsvBindByPosition(position = 3)
+ private Boolean isProduction;
+
+ @CsvBindByPosition(position = 4)
+ private String lastModified;
+
+ @CsvBindByPosition(position = 5)
+ private String status;
+
+ @CsvBindByPosition(position = 6)
+ private String tenant;
+
+ @CsvBindByPosition(position = 7)
+ private String uebApikey;
+
+ @CsvBindByPosition(position = 8)
+ private String uebSecretKey;
+
+
+
+ public String getLastModified() {
+ return lastModified;
+ }
+
+ public void setLastModified(String lastModified) {
+ this.lastModified = lastModified;
+ }
+
+
+ public String getEnvironmentId() {
+ return environmentId;
+ }
+
+ public void setEnvironmentId(String environmentId) {
+ this.environmentId = environmentId;
+ }
+
+ public String getTenant() {
+ return tenant;
+ }
+
+ public void setTenant(String tenant) {
+ this.tenant = tenant;
+ }
+
+ public Boolean getIsProduction() {
+ return isProduction;
+ }
+
+ public void setIsProduction(Boolean production) {
+ isProduction = production;
+ }
+
+ public String getEcompWorkloadContext() {
+ return ecompWorkloadContext;
+ }
+
+ public void setEcompWorkloadContext(String ecompWorkloadContext) {
+ this.ecompWorkloadContext = ecompWorkloadContext;
+ }
+
+ public String getStatus() {
+ return status;
+ }
+
+ public void setStatus(String status) {
+ this.status = status;
+ }
+
+ public void setStatus(EnvironmentStatusEnum status) {
+ this.status = status.getName();
+ }
+
+ public String getDmaapUebAddress() {
+ return dmaapUebAddress;
+ }
+
+ public void setDmaapUebAddress(String dmaapUebAddress) {
+ this.dmaapUebAddress = dmaapUebAddress;
+ }
+
+ public String getUebApikey() {
+ return uebApikey;
+ }
+
+ public void setUebApikey(String uebApikey) {
+ this.uebApikey = uebApikey;
+ }
+
+ public String getUebSecretKey() {
+ return uebSecretKey;
+ }
+
+ public void setUebSecretKey(String uebSecretKey) {
+ this.uebSecretKey = uebSecretKey;
+ }
+
+}
diff --git a/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEvnironmentImportHandler.java b/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEvnironmentImportHandler.java
new file mode 100644
index 0000000..d4dbddd
--- /dev/null
+++ b/asdctool/src/main/java/org/openecomp/sdc/asdctool/simulator/tenant/OperationalEvnironmentImportHandler.java
@@ -0,0 +1,99 @@
+package org.openecomp.sdc.asdctool.simulator.tenant;
+
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import org.openecomp.sdc.be.dao.cassandra.OperationalEnvironmentDao;
+import org.openecomp.sdc.be.dao.cassandra.schema.Table;
+import org.openecomp.sdc.be.resources.data.OperationalEnvironmentEntry;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.context.annotation.AnnotationConfigApplicationContext;
+
+import com.opencsv.bean.CsvToBeanBuilder;
+
+/**
+ * Imports CSV file into
+ * Example of line in the file
+ * 00002,135.42.43.45:5757,Context,FALSE,2017-10-11 12:02:01,INITIAL,personal tenant,abcd123456789,bbbbbbbbbbb
+ * Date format is fixed: yyyy-MM-dd HH:mm:ss
+ * @author dr2032
+ *
+ */
+public class OperationalEvnironmentImportHandler {
+ private static final Logger LOGGER = LoggerFactory.getLogger(OperationalEvnironmentImportHandler.class);
+ private static final String TABLE_NAME = Table.SDC_OPERATIONAL_ENVIRONMENT.getTableDescription().getTableName();
+
+ private OperationalEvnironmentImportHandler() {
+
+ }
+
+ public static void execute(String fileName) {
+ try {
+ List<OperationalEnvironment> beans = new CsvToBeanBuilder<OperationalEnvironment>(new FileReader(fileName))
+ .withType(OperationalEnvironment.class).build().parse();
+
+ List<OperationalEnvironmentEntry> entries = map(beans);
+ modifyDb(entries);
+ LOGGER.info("File {} has been successfully imported into the [{}] table.", fileName, TABLE_NAME);
+ } catch (IllegalStateException | FileNotFoundException e) {
+ String errorMessage = String.format("Failed to import file: %s into the [%s] table ", fileName, TABLE_NAME);
+ LOGGER.error(errorMessage, e);
+ }
+ }
+
+ private static List<OperationalEnvironmentEntry> map(List<OperationalEnvironment> beans) {
+ return beans.stream()
+ .map(OperationalEvnironmentImportHandler::map)
+ .collect(Collectors.toList());
+
+ }
+
+ private static OperationalEnvironmentEntry map(OperationalEnvironment perationalEnvironment) {
+ OperationalEnvironmentEntry entry = new OperationalEnvironmentEntry();
+
+ entry.setEnvironmentId(perationalEnvironment.getEnvironmentId());
+ entry.addDmaapUebAddress(perationalEnvironment.getDmaapUebAddress());
+ entry.setEcompWorkloadContext(perationalEnvironment.getEcompWorkloadContext());
+ entry.setIsProduction(perationalEnvironment.getIsProduction());
+
+ SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ try {
+ entry.setLastModified(formatter.parse(perationalEnvironment.getLastModified()));
+ } catch (ParseException e) {
+ LOGGER.error("Faild to pase Date, expected format is [yyyy-MM-dd HH:mm:ss].", e);
+ throw new RuntimeException(e);
+ }
+
+ entry.setStatus(perationalEnvironment.getStatus());
+ entry.setTenant(perationalEnvironment.getTenant());
+ entry.setUebApikey(perationalEnvironment.getUebApikey());
+ entry.setUebSecretKey(perationalEnvironment.getUebSecretKey());
+
+ return entry;
+
+ }
+
+ private static OperationalEnvironmentDao createDaoObj() {
+ AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ImportTableConfig.class);
+ return (OperationalEnvironmentDao) context.getBean("operational-environment-dao");
+ }
+
+ private static void modifyDb(List<OperationalEnvironmentEntry> environments) {
+ OperationalEnvironmentDao daoObj = createDaoObj();
+
+ daoObj.deleteAll();
+
+ environments.forEach(daoObj::save);
+ }
+
+ public static String getTableName() {
+ return TABLE_NAME;
+ }
+
+
+}
diff --git a/asdctool/src/main/resources/config/configuration.yaml b/asdctool/src/main/resources/config/configuration.yaml
index 48529a7..c92827e 100644
--- a/asdctool/src/main/resources/config/configuration.yaml
+++ b/asdctool/src/main/resources/config/configuration.yaml
@@ -31,15 +31,12 @@
artifactGeneratorConfig: Artifact-Generator.properties
resourcesForUpgrade:
5.0:
- - org.openecomp.resource.cp.extCP
- - tosca.nodes.network.Network
- - tosca.nodes.network.Port
- - org.openecomp.resource.cp.nodes.network.SubInterface
+ - tosca.nodes.Root
skipUpgradeFailedVfs: true
skipUpgradeVSPs: true
autoHealingOwner: jh0003
-titanCfgFile: C:\Users\im453s\git\sdc\asdctool\src\main\resources\config\titan.properties
+titanCfgFile: src\main\resources\config\titan.properties
titanMigrationKeySpaceCfgFile: src\main\resources\config\titan-migration.properties
titanInMemoryGraph: false
titanLockTimeout: 1800
@@ -206,7 +203,6 @@
type: TOSCA_CSAR
description: TOSCA definition package of the asset
-
#Informational artifacts placeHolder
excludeResourceCategory:
- Generic
@@ -333,7 +329,7 @@
- xml
AAI_VF_INSTANCE_MODEL:
acceptedTypes:
- - xml
+ - xml
OTHER:
acceptedTypes:
diff --git a/asdctool/src/main/resources/config/error-configuration.yaml b/asdctool/src/main/resources/config/error-configuration.yaml
index d33876a..6a4aece 100644
--- a/asdctool/src/main/resources/config/error-configuration.yaml
+++ b/asdctool/src/main/resources/config/error-configuration.yaml
@@ -100,6 +100,12 @@
message: "Error: Invalid USER_ID '%1'.",
messageId: "SVC4008"
}
+#---------SVC ------------------------------
+ INVALID_SERVICE_STATE: {
+ code: 409,
+ message: "Error: Invalid service state. Expected state: %1, actual state: %2",
+ messageId: ""
+ }
#---------SVC4049------------------------------
# %1 - service/resource
COMPONENT_MISSING_CONTACT: {
@@ -593,7 +599,7 @@
}
#---------SVC4301------------------------------
RESTRICTED_OPERATION: {
- code: 409,
+ code: 403,
message: "Error: Restricted operation.",
messageId: "SVC4301"
}
@@ -1669,3 +1675,18 @@
messageId: "SVC4647"
}
+
+#---------SVC4673------------------------------
+ INVALID_SERVICE_STATE: {
+ code: 409,
+ message: "Error: Invalid service state. Expected state: %1, actual state: %2",
+ messageId: "SVC4673"
+ }
+
+#---------SVC4674------------------------------
+ INVALID_RESPONSE_FROM_PROXY: {
+ code: 502,
+ message: "Error: The server was acting as a gateway or proxy and received an invalid response from the upstream server",
+ messageId: "SVC4674"
+ }
+
diff --git a/asdctool/src/main/resources/scripts/getConsumers.sh b/asdctool/src/main/resources/scripts/getConsumers.sh
new file mode 100644
index 0000000..d02aac6
--- /dev/null
+++ b/asdctool/src/main/resources/scripts/getConsumers.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+
+##############################
+# Get list of SDC consumers
+##############################
+
+
+CURRENT_DIR=`pwd`
+BASEDIR=$(dirname $0)
+
+if [ ${BASEDIR:0:1} = "/" ]
+then
+ FULL_PATH=$BASEDIR
+else
+ FULL_PATH=$CURRENT_DIR/$BASEDIR
+fi
+
+source ${FULL_PATH}/baseOperation.sh
+
+mainClass="org.openecomp.sdc.asdctool.main.GetConsumersMenu"
+
+command="java $JVM_LOG_FILE -Xmx1024M -cp $JARS $mainClass $@"
+echo $command
+
+$command
+result=$?
+
+
+
+echo "***********************************"
+echo "***** $result *********************"
+echo "***********************************"
+
+exit $result
+
+
+
diff --git a/asdctool/src/main/resources/scripts/python/user/exportUsers.py b/asdctool/src/main/resources/scripts/python/user/exportUsers.py
index e32a3b0..9e695ad 100644
--- a/asdctool/src/main/resources/scripts/python/user/exportUsers.py
+++ b/asdctool/src/main/resources/scripts/python/user/exportUsers.py
@@ -4,17 +4,17 @@
import json
-################################################################################################################################################
-# #
-# Export all active users to file - for 1602+ #
-# #
-# activation : #
-# python exportUsers.py [-i <be host> | --ip=<be host>] [-p <be port> | --port=<be port> ] [-f <output file> | --ofile=<output file> ] #
-# #
-# shortest activation (be host = localhost, be port = 8080): # #
-# python exportUsers.py [-f <output file> | --ofile=<output file> ] #
-# #
-################################################################################################################################################
+####################################################################################################################################################################################
+# #
+# Export all active users to file - for 1602+ #
+# #
+# activation : #
+# python exportUsers.py [-s <scheme> | --scheme=<scheme> ] [-i <be host> | --ip=<be host>] [-p <be port> | --port=<be port> ] [-f <output file> | --ofile=<output file> ] #
+# #
+# shortest activation (be host = localhost, be port = 8080): #
+# python exportUsers.py [-f <output file> | --ofile=<output file> ] #
+# #
+####################################################################################################################################################################################
ALL_USERS_SUFFIX = '/sdc2/rest/v1/user/users'
@@ -25,19 +25,23 @@
print("status=" + str(errorCode))
sys.exit(errorCode)
-def getUsers(beHost, bePort, adminUser):
+def getUsers(scheme, beHost, bePort, adminUser):
try:
buffer = StringIO()
c = pycurl.Curl()
- url = 'http://' + beHost + ':' + bePort + ALL_USERS_SUFFIX
+ url = scheme + '://' + beHost + ':' + bePort + ALL_USERS_SUFFIX
print(url)
c.setopt(c.URL, url)
c.setopt(c.WRITEFUNCTION, buffer.write)
#c.setopt(c.WRITEFUNCTION, lambda x: None)
adminHeader = 'USER_ID: ' + adminUser
c.setopt(pycurl.HTTPHEADER, ['Content-Type: application/json', 'Accept: application/json', adminHeader])
+
+ if scheme == 'https':
+ c.setopt(c.SSL_VERIFYPEER, 0)
+
res = c.perform()
#print(res)
@@ -63,7 +67,7 @@
def usage():
- print sys.argv[0], '[-i <be host> | --ip=<be host>] [-p <be port> | --port=<be port> ] [-f <output file> | --ofile=<output file> ]'
+ print sys.argv[0], '[optional -s <scheme> | --scheme=<scheme>, default http] [-i <be host> | --ip=<be host>] [-p <be port> | --port=<be port> ] [-f <output file> | --ofile=<output file> ]'
def main(argv):
print 'Number of arguments:', len(sys.argv), 'arguments.'
@@ -72,9 +76,10 @@
beHost = 'localhost'
bePort = '8080'
outputfile = None
+ scheme = 'http'
try:
- opts, args = getopt.getopt(argv,"i:p:f:h:",["ip=","port=","ofile="])
+ opts, args = getopt.getopt(argv,"i:p:f:h:s:",["ip=","port=","ofile=","scheme="])
except getopt.GetoptError:
usage()
errorAndExit(2, 'Invalid input')
@@ -90,14 +95,16 @@
bePort = arg
elif opt in ("-f", "--ofile"):
outputfile = arg
+ elif opt in ("-s", "--scheme"):
+ scheme = arg
- print 'be host =',beHost,', be port =', bePort,', output file =',outputfile
+ print 'scheme =',scheme,', be host =',beHost,', be port =', bePort,', output file =',outputfile
if ( outputfile == None ):
usage()
sys.exit(3)
- users = getUsers(beHost, bePort, adminHeader)
+ users = getUsers(scheme, beHost, bePort, adminHeader)
error = users[1]
body = users[0]
diff --git a/asdctool/src/main/resources/scripts/python/user/importUsers.py b/asdctool/src/main/resources/scripts/python/user/importUsers.py
index 669cbbe..984b75b 100644
--- a/asdctool/src/main/resources/scripts/python/user/importUsers.py
+++ b/asdctool/src/main/resources/scripts/python/user/importUsers.py
@@ -4,20 +4,20 @@
import json
import copy
-################################################################################################################################################
-# #
-# Import all users from a given file #
-# #
-# activation : #
-# python importUsers.py [-i <be host> | --ip=<be host>] [-p <be port> | --port=<be port> ] [-f <input file> | --ifile=<input file> ] #
-# #
-# shortest activation (be host = localhost, be port = 8080): # #
-# python importUsers.py [-f <input file> | --ifile=<input file> ] #
-# #
-################################################################################################################################################
+#####################################################################################################################################################################################
+# #
+# Import all users from a given file #
+# #
+# activation : #
+# python importUsers.py [-s <scheme> | --scheme=<scheme> ] [-i <be host> | --ip=<be host>] [-p <be port> | --port=<be port> ] [-f <input file> | --ifile=<input file> ] #
+# #
+# shortest activation (be host = localhost, be port = 8080): #
+# python importUsers.py [-f <input file> | --ifile=<input file> ] #
+# #
+#####################################################################################################################################################################################
-def importUsers(beHost, bePort, users, adminUser):
+def importUsers(scheme, beHost, bePort, users, adminUser):
result = []
@@ -25,12 +25,12 @@
#print("Going to add user " + user['userId'])
- getRes = getUser(beHost, bePort, user)
+ getRes = getUser(scheme, beHost, bePort, user)
userId = getRes[0]
error = getRes[1]
#print error
if ( error != None and error == 404 ):
- res = createUser(beHost, bePort, user ,adminUser)
+ res = createUser(scheme, beHost, bePort, user ,adminUser)
result.append(res)
else:
if ( error == 200 ):
@@ -54,7 +54,7 @@
return cloneUsers
-def getUser(beHost, bePort, user):
+def getUser(scheme, beHost, bePort, user):
userId = user['userId']
try:
@@ -62,12 +62,16 @@
c = pycurl.Curl()
#print type(userId)
- url = 'http://' + beHost + ':' + bePort + '/sdc2/rest/v1/user/' + str(userId)
+ url = scheme + '://' + beHost + ':' + bePort + '/sdc2/rest/v1/user/' + str(userId)
c.setopt(c.URL, url)
#adminHeader = 'USER_ID: ' + adminUser
c.setopt(pycurl.HTTPHEADER, ['Content-Type: application/json', 'Accept: application/json'])
c.setopt(c.WRITEFUNCTION, lambda x: None)
+
+ if scheme == 'https':
+ c.setopt(c.SSL_VERIFYPEER, 0)
+
res = c.perform()
#print("Before get response code")
@@ -87,14 +91,14 @@
-def createUser(beHost, bePort, user, adminUser):
+def createUser(scheme, beHost, bePort, user, adminUser):
userId = user['userId']
try:
buffer = StringIO()
c = pycurl.Curl()
- url = 'http://' + beHost + ':' + bePort + '/sdc2/rest/v1/user'
+ url = scheme + '://' + beHost + ':' + bePort + '/sdc2/rest/v1/user'
c.setopt(c.URL, url)
c.setopt(c.POST, 1)
@@ -105,6 +109,10 @@
c.setopt(c.POSTFIELDS, data)
c.setopt(c.WRITEFUNCTION, lambda x: None)
+
+ if scheme == 'https':
+ c.setopt(c.SSL_VERIFYPEER, 0)
+
#print("before perform")
res = c.perform()
#print(res)
@@ -133,7 +141,7 @@
sys.exit(errorCode)
def usage():
- print sys.argv[0], '[-i <be host> | --ip=<be host>] [-p <be port> | --port=<be port> ] [-f <input file> | --ifile=<input file> ]'
+ print sys.argv[0], '[optional -s <scheme> | --scheme=<scheme>, default http] [-i <be host> | --ip=<be host>] [-p <be port> | --port=<be port> ] [-f <input file> | --ifile=<input file> ]'
def main(argv):
print 'Number of arguments:', len(sys.argv), 'arguments.'
@@ -141,11 +149,11 @@
beHost = 'localhost'
bePort = '8080'
inputfile = None
-
+ scheme = 'http'
adminUser = 'jh0003'
try:
- opts, args = getopt.getopt(argv,"i:p:f:h:",["ip=","port=","ifile="])
+ opts, args = getopt.getopt(argv,"i:p:f:h:s:",["ip=","port=","ifile=","scheme="])
except getopt.GetoptError:
usage()
errorAndExit(2, 'Invalid input')
@@ -161,8 +169,10 @@
bePort = arg
elif opt in ("-f", "--ifile"):
inputfile = arg
+ elif opt in ("-s", "--scheme"):
+ scheme = arg
- print 'be host =',beHost,', be port =', bePort,', users file =',inputfile
+ print 'scheme =',scheme,', be host =',beHost,', be port =', bePort,', users file =',inputfile
if ( inputfile == None ):
usage()
@@ -182,7 +192,7 @@
#print activeUsers
- resultTable = importUsers(beHost, bePort, activeUsers, adminUser)
+ resultTable = importUsers(scheme, beHost, bePort, activeUsers, adminUser)
g = lambda x: x[1] != 201 and x[1] != 409
diff --git a/asdctool/src/main/resources/scripts/sdc-migration.sh b/asdctool/src/main/resources/scripts/sdc-migration.sh
index bbdd6f0..15e6d6b 100644
--- a/asdctool/src/main/resources/scripts/sdc-migration.sh
+++ b/asdctool/src/main/resources/scripts/sdc-migration.sh
@@ -5,7 +5,7 @@
##############################
# in 1802E we do not want to execute automatic post process
-exit 0
+#exit 0
CURRENT_DIR=`pwd`
BASEDIR=$(dirname $0)
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfigTest.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfigTest.java
deleted file mode 100644
index c97bffc..0000000
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/config/MigrationSpringConfigTest.java
+++ /dev/null
@@ -1,76 +0,0 @@
-package org.openecomp.sdc.asdctool.migration.config;
-
-import org.junit.Test;
-import org.openecomp.sdc.asdctool.migration.core.SdcMigrationTool;
-import org.openecomp.sdc.asdctool.migration.dao.MigrationTasksDao;
-import org.openecomp.sdc.asdctool.migration.resolver.MigrationResolver;
-import org.openecomp.sdc.asdctool.migration.resolver.SpringBeansMigrationResolver;
-import org.openecomp.sdc.asdctool.migration.service.SdcRepoService;
-import org.openecomp.sdc.be.dao.cassandra.CassandraClient;
-
-
-public class MigrationSpringConfigTest {
-
- private MigrationSpringConfig createTestSubject() {
- return new MigrationSpringConfig();
- }
-
-
- @Test
- public void testSdcMigrationTool() throws Exception {
- MigrationSpringConfig testSubject;
- MigrationResolver migrationResolver = null;
- SdcRepoService sdcRepoService = null;
- SdcMigrationTool result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.sdcMigrationTool(migrationResolver, sdcRepoService);
- }
-
-
- @Test
- public void testMigrationResolver() throws Exception {
- MigrationSpringConfig testSubject;
- SdcRepoService sdcRepoService = null;
- SpringBeansMigrationResolver result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.migrationResolver(sdcRepoService);
- }
-
-
- @Test
- public void testSdcRepoService() throws Exception {
- MigrationSpringConfig testSubject;
- MigrationTasksDao migrationTasksDao = null;
- SdcRepoService result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.sdcRepoService(migrationTasksDao);
- }
-
-
- @Test
- public void testMigrationTasksDao() throws Exception {
- MigrationSpringConfig testSubject;
- MigrationTasksDao result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.migrationTasksDao();
- }
-
-
- @Test
- public void testCassandraClient() throws Exception {
- MigrationSpringConfig testSubject;
- CassandraClient result;
-
- // default test
- testSubject = createTestSubject();
- result = testSubject.cassandraClient();
- }
-}
\ No newline at end of file
diff --git a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710Test.java b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710Test.java
index 2cb5e29..0c1b32e 100644
--- a/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710Test.java
+++ b/asdctool/src/test/java/org/openecomp/sdc/asdctool/migration/tasks/mig1710/UpgradeMigration1710Test.java
@@ -1,26 +1,17 @@
package org.openecomp.sdc.asdctool.migration.tasks.mig1710;
-import static org.junit.Assert.assertEquals;
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.ArgumentMatchers.anyString;
-import static org.mockito.ArgumentMatchers.eq;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.when;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-
+import com.google.common.collect.Lists;
+import fj.data.Either;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
+import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import org.openecomp.sdc.asdctool.migration.core.task.MigrationResult;
+import org.openecomp.sdc.asdctool.migration.tasks.handlers.XlsOutputHandler;
import org.openecomp.sdc.be.components.lifecycle.LifecycleBusinessLogic;
import org.openecomp.sdc.be.config.Configuration;
import org.openecomp.sdc.be.config.ConfigurationManager;
@@ -31,21 +22,19 @@
import org.openecomp.sdc.be.datatypes.enums.JsonPresentationFields;
import org.openecomp.sdc.be.datatypes.enums.OriginTypeEnum;
import org.openecomp.sdc.be.impl.ComponentsUtils;
-import org.openecomp.sdc.be.model.Component;
-import org.openecomp.sdc.be.model.ComponentInstance;
-import org.openecomp.sdc.be.model.LifecycleStateEnum;
-import org.openecomp.sdc.be.model.Resource;
-import org.openecomp.sdc.be.model.User;
+import org.openecomp.sdc.be.model.*;
import org.openecomp.sdc.be.model.jsontitan.operations.ToscaOperationFacade;
import org.openecomp.sdc.be.model.operations.api.IUserAdminOperation;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.openecomp.sdc.common.api.ConfigurationSource;
import org.openecomp.sdc.exception.ResponseFormat;
-import com.google.common.collect.Lists;
+import java.util.*;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
-import fj.data.Either;
-
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.class)
public class UpgradeMigration1710Test {
@@ -53,7 +42,9 @@
private final static String CONF_LEVEL = "5.0";
private final User user = new User();
- private UpgradeMigration1710 migration;
+
+ @InjectMocks
+ private UpgradeMigration1710 migration = new UpgradeMigration1710();
@Mock
private IUserAdminOperation userAdminOperation;
@Mock
@@ -66,6 +57,9 @@
private ComponentsUtils componentUtils;
@Mock
private ConfigurationSource configurationSource;
+ @Mock
+ private XlsOutputHandler outputHandler;
+
private static ConfigurationManager configurationManager;
private static List<String> resources = Stream.of("org.openecomp.resource.cp.extCP").collect(Collectors.toList());
private static Map<String, List<String>> resourcesForUpgrade;
@@ -78,12 +72,6 @@
@Before
public void setUp() {
- migration = new UpgradeMigration1710();
- migration.setUserAdminOperation(userAdminOperation);
- migration.setTitanDao(titanDao);
- migration.setTosckaOperationFacade(toscaOperationFacade);
- migration.setLifecycleBusinessLogic(lifecycleBusinessLogic);
-
user.setUserId(USER);
configurationManager = new ConfigurationManager(configurationSource);
configurationManager.setConfiguration(new Configuration());
@@ -106,11 +94,13 @@
final boolean failOnVfUpgrade = true;
final boolean upgradeServices = false;
final boolean exceptionOnVfUpgrade = false;
- final boolean upgradeFVC = false;
+ final boolean upgradeVFC = false;
configurationManager.getConfiguration().setSkipUpgradeFailedVfs(false);
resolveUserAndDefineUpgradeLevel();
- upgradeRules(failOnVfUpgrade, exceptionOnVfUpgrade, upgradeServices, upgradeFVC);
+ upgradeRules(failOnVfUpgrade, exceptionOnVfUpgrade, upgradeServices, upgradeVFC);
assertEquals(MigrationResult.MigrationStatus.FAILED, migration.migrate().getMigrationStatus());
+ verify(titanDao, times(1)).commit();
+ verify(titanDao, times(2)).rollback();
}
@Test
@@ -118,11 +108,13 @@
final boolean failOnVfUpgrade = false;
final boolean upgradeServices = false;
final boolean exceptionOnVfUpgrade = true;
- final boolean upgradeFVC = false;
+ final boolean upgradeVFC = false;
configurationManager.getConfiguration().setSkipUpgradeFailedVfs(false);
resolveUserAndDefineUpgradeLevel();
- upgradeRules(failOnVfUpgrade, exceptionOnVfUpgrade, upgradeServices, upgradeFVC);
+ upgradeRules(failOnVfUpgrade, exceptionOnVfUpgrade, upgradeServices, upgradeVFC);
assertEquals(MigrationResult.MigrationStatus.FAILED, migration.migrate().getMigrationStatus());
+ verify(titanDao, times(1)).commit();
+ verify(titanDao, times(2)).rollback();
}
@Test
@@ -130,11 +122,13 @@
final boolean failOnVfUpgrade = false;
final boolean upgradeServices = true;
final boolean exceptionOnFvUpgrade = true;
- final boolean upgradeFVC = false;
+ final boolean upgradeVFC = false;
configurationManager.getConfiguration().setSkipUpgradeFailedVfs(true);
resolveUserAndDefineUpgradeLevel();
- upgradeRules(failOnVfUpgrade, exceptionOnFvUpgrade, upgradeServices, upgradeFVC);
+ upgradeRules(failOnVfUpgrade, exceptionOnFvUpgrade, upgradeServices, upgradeVFC);
assertEquals(MigrationResult.MigrationStatus.COMPLETED, migration.migrate().getMigrationStatus());
+ verify(titanDao, times(2)).commit();
+ verify(titanDao, times(3)).rollback();
}
@@ -143,10 +137,12 @@
final boolean failOnVfUpgrade = false;
final boolean upgradeServices = true;
final boolean exceptionOnFvUpgrade = false;
- final boolean upgradeFVC = false;
+ final boolean upgradeVFC = false;
resolveUserAndDefineUpgradeLevel();
- upgradeRules(failOnVfUpgrade, exceptionOnFvUpgrade, upgradeServices, upgradeFVC);
+ upgradeRules(failOnVfUpgrade, exceptionOnFvUpgrade, upgradeServices, upgradeVFC);
assertEquals(MigrationResult.MigrationStatus.COMPLETED, migration.migrate().getMigrationStatus());
+ verify(titanDao, times(2)).commit();
+ verify(titanDao, times(3)).rollback();
}
@Test
@@ -154,11 +150,11 @@
final boolean failOnVfUpgrade = false;
final boolean upgradeServices = true;
final boolean exceptionOnFvUpgrade = false;
- final boolean upgradeFVC = true;
+ final boolean upgradeVFC = true;
resolveUserAndDefineUpgradeLevel();
- upgradeRules(failOnVfUpgrade, exceptionOnFvUpgrade, upgradeServices, upgradeFVC);
+ upgradeRules(failOnVfUpgrade, exceptionOnFvUpgrade, upgradeServices, upgradeVFC);
configurationManager.getConfiguration().setSkipUpgradeVSPs(false);
- migration.setComponentsUtils(componentUtils);
+// migration.setComponentsUtils(componentUtils);
assertEquals(MigrationResult.MigrationStatus.COMPLETED, migration.migrate().getMigrationStatus());
}
@@ -187,7 +183,7 @@
when(titanDao.getByCriteria(any(), any(), any(), any()))
.thenReturn(Either.left(components));
- when(titanDao.getParentVertecies(any(), any(), any()))
+ when(titanDao.getParentVertecies(any(GraphVertex.class), any(), any()))
//1th node to upgrade
.thenReturn(Either.left(components))
//parent of the 1th node - stop recursion