Improve Test coverage of DFC
Issue-ID: NONRTRIC-874
Change-Id: Ifaa71b7352bdff50c4c8e6af18ab5655036c4f57
Signed-off-by: ambrishest <ambrish.singh@est.tech>
diff --git a/datafilecollector/pom.xml b/datafilecollector/pom.xml
index b3e7472..37b39d7 100644
--- a/datafilecollector/pom.xml
+++ b/datafilecollector/pom.xml
@@ -40,7 +40,9 @@
<springdoc.version>2.0.2</springdoc.version>
<springdoc.openapi-ui.version>1.6.14</springdoc.openapi-ui.version>
<exec.skip>true</exec.skip>
+
<sonar-maven-plugin.version>3.7.0.1746</sonar-maven-plugin.version>
+
</properties>
<dependencies>
<dependency>
@@ -138,6 +140,11 @@
<scope>test</scope>
</dependency>
<dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-inline</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
<groupId>commons-net</groupId>
<artifactId>commons-net</artifactId>
<version>3.9.0</version>
diff --git a/datafilecollector/src/main/java/org/oran/datafile/configuration/AppConfig.java b/datafilecollector/src/main/java/org/oran/datafile/configuration/AppConfig.java
index 6689f5d..6282cd4 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/configuration/AppConfig.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/configuration/AppConfig.java
@@ -21,6 +21,7 @@
import lombok.Getter;
+import lombok.Setter;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.SslConfigs;
@@ -55,6 +56,7 @@
@Value("${app.collected-files-path}")
@Getter
+ @Setter
private String collectedFilesPath;
@Value("${app.sftp.strict-host-key-checking:false}")
@@ -77,22 +79,27 @@
private String clientTrustStorePassword;
@Getter
+ @Setter
@Value("${app.s3.endpointOverride:}")
private String s3EndpointOverride;
@Getter
+ @Setter
@Value("${app.s3.accessKeyId:}")
private String s3AccessKeyId;
@Getter
+ @Setter
@Value("${app.s3.secretAccessKey:}")
private String s3SecretAccessKey;
@Getter
+ @Setter
@Value("${app.s3.bucket:}")
private String s3Bucket;
@Value("${app.s3.locksBucket:}")
+ @Setter
private String s3LocksBucket;
@Value("${app.number-of-worker-treads:200}")
diff --git a/datafilecollector/src/main/java/org/oran/datafile/configuration/CertificateConfig.java b/datafilecollector/src/main/java/org/oran/datafile/configuration/CertificateConfig.java
index 889eb98..3de2b3b 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/configuration/CertificateConfig.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/configuration/CertificateConfig.java
@@ -26,11 +26,15 @@
@Builder
public class CertificateConfig {
+ @SuppressWarnings("java:S1104")
public String keyCert;
+ @SuppressWarnings("java:S1104")
public String keyPasswordPath;
+ @SuppressWarnings("java:S1104")
public String trustedCa;
+ @SuppressWarnings("java:S1104")
public String trustedCaPasswordPath;
}
diff --git a/datafilecollector/src/main/java/org/oran/datafile/configuration/SftpConfig.java b/datafilecollector/src/main/java/org/oran/datafile/configuration/SftpConfig.java
index 559e64e..0eee07d 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/configuration/SftpConfig.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/configuration/SftpConfig.java
@@ -25,7 +25,9 @@
@Builder
public class SftpConfig {
+ @SuppressWarnings("java:S1104")
public boolean strictHostKeyChecking;
+ @SuppressWarnings("java:S1104")
public String knownHostsFilePath;
}
diff --git a/datafilecollector/src/main/java/org/oran/datafile/datastore/FileStore.java b/datafilecollector/src/main/java/org/oran/datafile/datastore/FileStore.java
index 6d98afc..a6711c6 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/datastore/FileStore.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/datastore/FileStore.java
@@ -138,7 +138,7 @@
return Mono.just("OK");
}
- private Path path(String name) {
+ public Path path(String name) {
return Path.of(appConfig.getCollectedFilesPath(), name);
}
diff --git a/datafilecollector/src/main/java/org/oran/datafile/datastore/S3ObjectStore.java b/datafilecollector/src/main/java/org/oran/datafile/datastore/S3ObjectStore.java
index 5d1400d..5da27b0 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/datastore/S3ObjectStore.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/datastore/S3ObjectStore.java
@@ -73,6 +73,12 @@
getS3AsynchClient(applicationConfig);
}
+ @SuppressWarnings({"java:S3010", "java:S2209"})
+ public S3ObjectStore(AppConfig applicationConfig, S3AsyncClient s3AsynchClient) {
+ this.applicationConfig = applicationConfig;
+ this.s3AsynchClient = s3AsynchClient;
+ }
+
private static synchronized S3AsyncClient getS3AsynchClient(AppConfig applicationConfig) {
if (applicationConfig.isS3Enabled() && s3AsynchClient == null) {
s3AsynchClient = getS3AsyncClientBuilder(applicationConfig).build();
@@ -209,13 +215,9 @@
oids.add(oid);
}
- Delete delete = Delete.builder() //
- .objects(oids) //
- .build();
-
DeleteObjectsRequest request = DeleteObjectsRequest.builder() //
.bucket(bucket(bucket)) //
- .delete(delete) //
+ .delete(Delete.builder().objects(oids).build()) //NOSONAR
.build();
CompletableFuture<DeleteObjectsResponse> future = s3AsynchClient.deleteObjects(request);
diff --git a/datafilecollector/src/main/java/org/oran/datafile/http/DfcHttpClient.java b/datafilecollector/src/main/java/org/oran/datafile/http/DfcHttpClient.java
index f941155..fd56ee9 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/http/DfcHttpClient.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/http/DfcHttpClient.java
@@ -92,12 +92,13 @@
try {
latch.await();
} catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
throw new DatafileTaskException("Interrupted exception after datafile download - ", e);
}
if (isDownloadFailed(errorMessage)) {
- if (errorMessage.get() instanceof NonRetryableDatafileTaskException) {
- throw (NonRetryableDatafileTaskException) errorMessage.get();
+ if (errorMessage.get() instanceof NonRetryableDatafileTaskException nonRetryableException) {
+ throw nonRetryableException;
}
throw (DatafileTaskException) errorMessage.get();
}
diff --git a/datafilecollector/src/main/java/org/oran/datafile/http/DfcHttpsClient.java b/datafilecollector/src/main/java/org/oran/datafile/http/DfcHttpsClient.java
index 5cd0a31..f79082d 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/http/DfcHttpsClient.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/http/DfcHttpsClient.java
@@ -82,6 +82,7 @@
}
@Override
+ @SuppressWarnings("java:S2139")
public void collectFile(String remoteFile, Path localFile) throws DatafileTaskException {
logger.trace("Prepare to collectFile {}", localFile);
HttpGet httpGet = new HttpGet(HttpUtils.prepareHttpsUri(fileServerData, remoteFile));
@@ -97,12 +98,13 @@
HttpResponse httpResponse = makeCall(httpGet);
processResponse(httpResponse, localFile);
} catch (IOException e) {
- logger.error("marker", e);
+ logger.error("Error downloading file from server. Details: {}", e.getMessage());
throw new DatafileTaskException("Error downloading file from server. ", e);
}
logger.trace("HTTPS collectFile OK");
}
+ @SuppressWarnings("java:S2139")
HttpResponse makeCall(HttpGet httpGet) throws IOException, DatafileTaskException {
try {
HttpResponse httpResponse = executeHttpClient(httpGet);
diff --git a/datafilecollector/src/main/java/org/oran/datafile/http/HttpAsyncClientBuilderWrapper.java b/datafilecollector/src/main/java/org/oran/datafile/http/HttpAsyncClientBuilderWrapper.java
deleted file mode 100644
index 92877a5..0000000
--- a/datafilecollector/src/main/java/org/oran/datafile/http/HttpAsyncClientBuilderWrapper.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*-
- * ============LICENSE_START=======================================================
- * Copyright (C) 2019-2023 Nordix Foundation.
- * ================================================================================
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * SPDX-License-Identifier: Apache-2.0
- * ============LICENSE_END=========================================================
- */
-
-package org.oran.datafile.http;
-
-import javax.net.ssl.HostnameVerifier;
-import javax.net.ssl.SSLContext;
-
-import org.apache.http.client.RedirectStrategy;
-import org.apache.http.client.config.RequestConfig;
-import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
-import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
-import org.apache.http.impl.nio.client.HttpAsyncClients;
-
-public class HttpAsyncClientBuilderWrapper {
- HttpAsyncClientBuilder builder = HttpAsyncClients.custom();
-
- public HttpAsyncClientBuilderWrapper setRedirectStrategy(RedirectStrategy redirectStrategy) {
- builder.setRedirectStrategy(redirectStrategy);
- return this;
- }
-
- public HttpAsyncClientBuilderWrapper setSslContext(SSLContext sslcontext) {
- builder.setSSLContext(sslcontext);
- return this;
- }
-
- public HttpAsyncClientBuilderWrapper setSslHostnameVerifier(HostnameVerifier hostnameVerifier) {
- builder.setSSLHostnameVerifier(hostnameVerifier);
- return this;
- }
-
- public HttpAsyncClientBuilderWrapper setDefaultRequestConfig(RequestConfig config) {
- builder.setDefaultRequestConfig(config);
- return this;
- }
-
- public CloseableHttpAsyncClient build() {
- return builder.build();
- }
-
-}
diff --git a/datafilecollector/src/main/java/org/oran/datafile/http/HttpUtils.java b/datafilecollector/src/main/java/org/oran/datafile/http/HttpUtils.java
index 308b47e..3d085d4 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/http/HttpUtils.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/http/HttpUtils.java
@@ -21,16 +21,12 @@
import java.util.Base64;
import java.util.List;
-
import org.apache.hc.core5.http.NameValuePair;
import org.apache.http.HttpStatus;
import org.oran.datafile.model.FileServerData;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
public final class HttpUtils implements HttpStatus {
- private static final Logger logger = LoggerFactory.getLogger(HttpUtils.class);
public static final int HTTP_DEFAULT_PORT = 80;
public static final int HTTPS_DEFAULT_PORT = 443;
diff --git a/datafilecollector/src/main/java/org/oran/datafile/model/FileData.java b/datafilecollector/src/main/java/org/oran/datafile/model/FileData.java
index 4b013be..33a9327 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/model/FileData.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/model/FileData.java
@@ -86,8 +86,10 @@
private static final Logger logger = LoggerFactory.getLogger(FileData.class);
+ @SuppressWarnings("java:S1104")
public FileReadyMessage.ArrayOfNamedHashMap fileInfo;
+ @SuppressWarnings("java:S1104")
public FileReadyMessage.MessageMetaData messageMetaData;
public static Iterable<FileData> createFileData(FileReadyMessage msg) {
diff --git a/datafilecollector/src/main/java/org/oran/datafile/model/FileReadyMessage.java b/datafilecollector/src/main/java/org/oran/datafile/model/FileReadyMessage.java
index 05e332c..05b55ea 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/model/FileReadyMessage.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/model/FileReadyMessage.java
@@ -33,24 +33,43 @@
@Builder
public static class MessageMetaData {
+ @SuppressWarnings("java:S1104")
public String eventId;
+ @SuppressWarnings("java:S1104")
public String priority;
+
+ @SuppressWarnings("java:S1104")
public String version;
+
+ @SuppressWarnings("java:S1104")
public String reportingEntityName;
+
+ @SuppressWarnings("java:S1104")
public int sequence;
+
+ @SuppressWarnings("java:S1104")
public String domain;
+ @SuppressWarnings("java:S1104")
public String eventName;
+
+ @SuppressWarnings("java:S1104")
public String vesEventListenerVersion;
+ @SuppressWarnings("java:S1104")
public String sourceName;
+ @SuppressWarnings("java:S1104")
public long lastEpochMicrosec;
+
+ @SuppressWarnings("java:S1104")
public long startEpochMicrosec;
+ @SuppressWarnings("java:S1104")
public String timeZoneOffset;
+ @SuppressWarnings("java:S1104")
public String changeIdentifier;
/**
@@ -59,6 +78,7 @@
* example: Noti_RnNode-Ericsson_FileReady
*
*/
+ @SuppressWarnings("java:S6035")
public String productName() {
String[] eventArray = eventName.split("_|-");
if (eventArray.length >= 2) {
@@ -68,6 +88,7 @@
}
}
+ @SuppressWarnings("java:S6035")
public String vendorName() {
String[] eventArray = eventName.split("_|-");
if (eventArray.length >= 3) {
@@ -80,32 +101,53 @@
@Builder
public static class FileInfo {
+ @SuppressWarnings("java:S1104")
public String fileFormatType;
+
+ @SuppressWarnings("java:S1104")
public String location;
+
+ @SuppressWarnings("java:S1104")
public String fileFormatVersion;
+
+ @SuppressWarnings("java:S1104")
public String compression;
}
@Builder
public static class ArrayOfNamedHashMap {
+ @SuppressWarnings("java:S1104")
public String name;
+
+ @SuppressWarnings("java:S1104")
public FileInfo hashMap;
}
@Builder
public static class NotificationFields {
+ @SuppressWarnings("java:S1104")
public String notificationFieldsVersion;
+
+ @SuppressWarnings("java:S1104")
public String changeType;
+
+ @SuppressWarnings("java:S1104")
public String changeIdentifier;
+
+ @SuppressWarnings("java:S1104")
public List<ArrayOfNamedHashMap> arrayOfNamedHashMap;
}
@Builder
public static class Event {
+ @SuppressWarnings("java:S1104")
public MessageMetaData commonEventHeader;
+
+ @SuppressWarnings("java:S1104")
public NotificationFields notificationFields;
}
+ @SuppressWarnings("java:S1104")
public Event event;
}
diff --git a/datafilecollector/src/main/java/org/oran/datafile/model/FileServerData.java b/datafilecollector/src/main/java/org/oran/datafile/model/FileServerData.java
index 0bb41a2..187d2b2 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/model/FileServerData.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/model/FileServerData.java
@@ -21,6 +21,7 @@
import java.util.List;
import lombok.Builder;
+import lombok.Getter;
import lombok.ToString;
import org.apache.hc.core5.http.NameValuePair;
@@ -34,10 +35,16 @@
@ToString
public class FileServerData {
+ @SuppressWarnings("java:S1104")
+ @Getter
public String serverAddress;
+
+ @SuppressWarnings("java:S1104")
+ @Getter
public String userId;
@ToString.Exclude
+ @Getter
public String password;
@Builder.Default
@@ -47,5 +54,7 @@
@Builder.Default
public String uriRawFragment = "";
+ @SuppressWarnings("java:S1104")
+ @Getter
public Integer port;
}
diff --git a/datafilecollector/src/main/java/org/oran/datafile/oauth2/OAuthBearerTokenJwt.java b/datafilecollector/src/main/java/org/oran/datafile/oauth2/OAuthBearerTokenJwt.java
index 24e7608..c93a36b 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/oauth2/OAuthBearerTokenJwt.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/oauth2/OAuthBearerTokenJwt.java
@@ -1,35 +1,33 @@
-// ============LICENSE_START===============================================
-// Copyright (C) 2023 Nordix Foundation. All rights reserved.
-// ========================================================================
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// ============LICENSE_END=================================================
-//
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
package org.oran.datafile.oauth2;
import java.util.Base64;
import java.util.HashSet;
import java.util.Set;
-
import lombok.ToString;
-
import org.apache.kafka.common.security.oauthbearer.OAuthBearerToken;
import org.oran.datafile.exceptions.DatafileTaskException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
public class OAuthBearerTokenJwt implements OAuthBearerToken {
- private static final Logger logger = LoggerFactory.getLogger(OAuthBearerTokenJwt.class);
private static final com.google.gson.Gson gson = new com.google.gson.GsonBuilder().disableHtmlEscaping().create();
private final String jwtTokenRaw;
diff --git a/datafilecollector/src/main/java/org/oran/datafile/oauth2/OAuthKafkaAuthenticateLoginCallbackHandler.java b/datafilecollector/src/main/java/org/oran/datafile/oauth2/OAuthKafkaAuthenticateLoginCallbackHandler.java
index 54911dc..a0664c2 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/oauth2/OAuthKafkaAuthenticateLoginCallbackHandler.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/oauth2/OAuthKafkaAuthenticateLoginCallbackHandler.java
@@ -1,19 +1,21 @@
-// ============LICENSE_START===============================================
-// Copyright (C) 2023 Nordix Foundation. All rights reserved.
-// ========================================================================
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// ============LICENSE_END=================================================
-//
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * ============LICENSE_END=========================================================
+ */
package org.oran.datafile.oauth2;
@@ -51,6 +53,8 @@
@Override
public void close() {
+ /*This method intentionally left empty.
+ Close functionality will be implemented later.*/
}
@Override
@@ -59,11 +63,11 @@
if (!this.isConfigured)
throw new IllegalStateException("Callback handler not configured");
for (Callback callback : callbacks) {
- logger.debug("callback " + callback.toString());
- if (callback instanceof OAuthBearerTokenCallback) {
- handleCallback((OAuthBearerTokenCallback) callback);
- } else if (callback instanceof SaslExtensionsCallback) {
- handleCallback((SaslExtensionsCallback) callback);
+ logger.debug("callback {}", callback);
+ if (callback instanceof OAuthBearerTokenCallback oauthBearerTokenCallback) {
+ handleCallback(oauthBearerTokenCallback);
+ } else if (callback instanceof SaslExtensionsCallback saslExtensionsCallback) {
+ handleCallback(saslExtensionsCallback);
} else {
logger.error("Unsupported callback: {}", callback);
throw new UnsupportedCallbackException(callback);
@@ -90,4 +94,7 @@
}
}
+ public boolean isConfigured() {
+ return isConfigured;
+ }
}
diff --git a/datafilecollector/src/main/java/org/oran/datafile/oauth2/SecurityContext.java b/datafilecollector/src/main/java/org/oran/datafile/oauth2/SecurityContext.java
index 578c111..aa13ca1 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/oauth2/SecurityContext.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/oauth2/SecurityContext.java
@@ -49,10 +49,11 @@
private static SecurityContext instance;
@Setter
+ @Getter
private Path authTokenFilePath;
public SecurityContext(@Value("${app.auth-token-file:}") String authTokenFilename) {
- instance = this;
+ instance = this; //NOSONAR
if (!authTokenFilename.isEmpty()) {
this.authTokenFilePath = Path.of(authTokenFilename);
}
diff --git a/datafilecollector/src/main/java/org/oran/datafile/tasks/CollectAndReportFiles.java b/datafilecollector/src/main/java/org/oran/datafile/tasks/CollectAndReportFiles.java
index 93b9a71..9ea9d57 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/tasks/CollectAndReportFiles.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/tasks/CollectAndReportFiles.java
@@ -258,7 +258,8 @@
private Mono<FilePublishInformation> handleFetchFileFailure(FileData fileData, Throwable t) {
Path localFilePath = fileData.getLocalFilePath(this.appConfig);
- logger.error("File fetching failed, path {}, reason: {}", fileData.remoteFilePath(), t.getMessage());
+ String remoteFilePath = fileData.remoteFilePath();
+ logger.error("File fetching failed, path {}, reason: {}", remoteFilePath, t.getMessage());
deleteFile(localFilePath);
if (FileData.Scheme.isFtpScheme(fileData.scheme())) {
counters.incNoOfFailedFtp();
diff --git a/datafilecollector/src/main/java/org/oran/datafile/tasks/FileCollector.java b/datafilecollector/src/main/java/org/oran/datafile/tasks/FileCollector.java
index c36bd49..999d2e8 100644
--- a/datafilecollector/src/main/java/org/oran/datafile/tasks/FileCollector.java
+++ b/datafilecollector/src/main/java/org/oran/datafile/tasks/FileCollector.java
@@ -169,7 +169,7 @@
return new SftpClient(fileData.fileServerData(), new SftpClientSettings(appConfig.getSftpConfiguration()));
}
- protected FtpesClient createFtpesClient(FileData fileData) throws DatafileTaskException {
+ protected FtpesClient createFtpesClient(FileData fileData) {
CertificateConfig config = appConfig.getCertificateConfiguration();
Path trustedCa = config.trustedCa.isEmpty() ? null : Paths.get(config.trustedCa);
diff --git a/datafilecollector/src/test/java/org/oran/datafile/Integration.java b/datafilecollector/src/test/java/org/oran/datafile/Integration.java
index 27b36c1..c8ef111 100644
--- a/datafilecollector/src/test/java/org/oran/datafile/Integration.java
+++ b/datafilecollector/src/test/java/org/oran/datafile/Integration.java
@@ -239,11 +239,6 @@
}
@Test
- void clear() {
-
- }
-
- @Test
void testKafka() throws InterruptedException {
waitForKafkaListener();
@@ -278,7 +273,7 @@
while (kafkaReceiver.count < NO_OF_OBJECTS) {
logger.info("sleeping {}", kafkaReceiver.count);
- Thread.sleep(1000 * 1);
+ Thread.sleep(1000); //NOSONAR
}
String rec = kafkaReceiver.lastValue();
diff --git a/datafilecollector/src/test/java/org/oran/datafile/configuration/AppConfigTest.java b/datafilecollector/src/test/java/org/oran/datafile/configuration/AppConfigTest.java
new file mode 100644
index 0000000..549cc03
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/configuration/AppConfigTest.java
@@ -0,0 +1,168 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.configuration;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import java.lang.reflect.Field;
+import java.util.HashMap;
+import java.util.Map;
+import org.apache.kafka.clients.CommonClientConfigs;
+import org.apache.kafka.common.config.SaslConfigs;
+import org.apache.kafka.common.config.SslConfigs;
+import org.apache.kafka.common.security.auth.SecurityProtocol;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.InjectMocks;
+import org.mockito.MockitoAnnotations;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.oran.datafile.oauth2.OAuthKafkaAuthenticateLoginCallbackHandler;
+import org.springframework.test.context.ContextConfiguration;
+
+@ContextConfiguration(classes = {AppConfig.class})
+@ExtendWith(MockitoExtension.class)
+class AppConfigTest {
+
+ @InjectMocks
+ private AppConfig appConfig;
+
+ @BeforeEach
+ void setup() {
+ MockitoAnnotations.initMocks(this);
+ }
+ @Test
+ void testGetS3LocksBucket_WhenEmptyLocksBucket_ReturnsS3Bucket() {
+ injectFieldValue(appConfig, "s3Bucket", "test-bucket");
+ injectFieldValue(appConfig, "s3LocksBucket", "");
+
+ String result = appConfig.getS3LocksBucket();
+ assertEquals("test-bucket", result);
+ }
+
+ @Test
+ void testGetS3LocksBucket_WhenNonEmptyLocksBucket_ReturnsLocksBucket() {
+ injectFieldValue(appConfig, "s3Bucket", "test-bucket");
+ injectFieldValue(appConfig, "s3LocksBucket", "locks");
+
+ String result = appConfig.getS3LocksBucket();
+ assertEquals("locks", result);
+ }
+
+ @Test
+ void testIsS3Enabled_WhenS3EndpointAndBucketSet_ReturnsTrue() {
+ injectFieldValue(appConfig, "s3Bucket", "test-bucket");
+ injectFieldValue(appConfig, "s3EndpointOverride", "s3.endpoint");
+ boolean result = appConfig.isS3Enabled();
+ assertTrue(result);
+ }
+
+ @Test
+ void testIsS3Enabled_WhenS3EndpointNotSet_ReturnsFalse() {
+ injectFieldValue(appConfig, "s3Bucket", "test-bucket");
+ injectFieldValue(appConfig, "s3EndpointOverride", "");
+ boolean result = appConfig.isS3Enabled();
+ assertFalse(result);
+ }
+
+ @Test
+ void testGetKafkaBootStrapServers() {
+ assertNull((new AppConfig()).getKafkaBootStrapServers());
+ }
+
+ @Test
+ void testAddKafkaSecurityProps_UseOAuthToken() {
+ Map<String, Object> props = new HashMap<>();
+ injectFieldValue(appConfig, "useOathToken", true);
+ injectFieldValue(appConfig, "kafkaKeyStoreLocation", "key-store-location");
+ injectFieldValue(appConfig, "kafkTrustStoreLocation", "trust-store-location");
+ injectFieldValue(appConfig, "kafkaKeyStorePassword", "key-store-password");
+
+ appConfig.addKafkaSecurityProps(props);
+
+ assertEquals(SecurityProtocol.SASL_SSL.name, props.get(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG));
+ assertEquals("OAUTHBEARER", props.get(SaslConfigs.SASL_MECHANISM));
+ assertEquals(OAuthKafkaAuthenticateLoginCallbackHandler.class.getName(),
+ props.get(SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS));
+ assertEquals(
+ "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required unsecuredLoginStringClaim_sub=\"alice\"; ",
+ props.get(SaslConfigs.SASL_JAAS_CONFIG));
+ }
+
+ @Test
+ void testAddKafkaSecurityProps_SslConfig() {
+ Map<String, Object> props = new HashMap<>();
+ injectFieldValue(appConfig, "useOathToken", false);
+ injectFieldValue(appConfig, "kafkaKeyStoreLocation", "key-store-location");
+ injectFieldValue(appConfig, "kafkaKeyStoreType", "JKS");
+ injectFieldValue(appConfig, "kafkaKeyStorePassword", "key-store-password");
+ injectFieldValue(appConfig, "kafkTrustStoreLocation", "trust-store-location");
+ injectFieldValue(appConfig, "kafkaTrustStoreType", "JKS");
+
+ appConfig.addKafkaSecurityProps(props);
+
+ assertEquals(SecurityProtocol.SASL_SSL.name, props.get(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG));
+ assertEquals("JKS", props.get(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG));
+ assertEquals("key-store-location", props.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG));
+ assertEquals("key-store-password", props.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG));
+ assertEquals("JKS", props.get(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG));
+ assertEquals("trust-store-location", props.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG));
+ }
+
+ @Test
+ void testGetCertificateConfiguration() {
+ injectFieldValue(appConfig, "clientTrustStore", "trust-store");
+ injectFieldValue(appConfig, "clientTrustStorePassword", "trust-store-password");
+ injectFieldValue(appConfig, "clientKeyStore", "key-store");
+ injectFieldValue(appConfig, "clientKeyStorePassword", "key-store-password");
+
+ CertificateConfig certificateConfig = appConfig.getCertificateConfiguration();
+
+ assertEquals("trust-store", certificateConfig.trustedCa);
+ assertEquals("trust-store-password", certificateConfig.trustedCaPasswordPath);
+ assertEquals("key-store", certificateConfig.keyCert);
+ assertEquals("key-store-password", certificateConfig.keyPasswordPath);
+ }
+
+ @Test
+ void testGetSftpConfiguration() {
+ injectFieldValue(appConfig, "knownHostsFilePath", "/path/to/known_hosts");
+ injectFieldValue(appConfig, "strictHostKeyChecking", true);
+
+ SftpConfig sftpConfig = appConfig.getSftpConfiguration();
+
+ assertEquals("/path/to/known_hosts", sftpConfig.knownHostsFilePath);
+ assertTrue(sftpConfig.strictHostKeyChecking);
+ }
+
+ private void injectFieldValue(Object target, String fieldName, Object value) {
+ try {
+ Field field = target.getClass().getDeclaredField(fieldName);
+ field.setAccessible(true);
+ field.set(target, value);
+ } catch (NoSuchFieldException | IllegalAccessException e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/datafilecollector/src/test/java/org/oran/datafile/configuration/CertificateConfigTest.java b/datafilecollector/src/test/java/org/oran/datafile/configuration/CertificateConfigTest.java
new file mode 100644
index 0000000..b030b84
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/configuration/CertificateConfigTest.java
@@ -0,0 +1,39 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.configuration;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.Test;
+
+class CertificateConfigTest {
+ @Test
+ void testConstructor() {
+ CertificateConfig actualCertificateConfig = new CertificateConfig("Key Cert", "Key Password Path", "Trusted Ca",
+ "Trusted Ca Password Path");
+
+ assertEquals("Key Cert", actualCertificateConfig.keyCert);
+ assertEquals("Trusted Ca Password Path", actualCertificateConfig.trustedCaPasswordPath);
+ assertEquals("Trusted Ca", actualCertificateConfig.trustedCa);
+ assertEquals("Key Password Path", actualCertificateConfig.keyPasswordPath);
+ }
+}
+
diff --git a/datafilecollector/src/test/java/org/oran/datafile/controllers/StatusControllerTest.java b/datafilecollector/src/test/java/org/oran/datafile/controllers/StatusControllerTest.java
index 1826096..39d4dc1 100644
--- a/datafilecollector/src/test/java/org/oran/datafile/controllers/StatusControllerTest.java
+++ b/datafilecollector/src/test/java/org/oran/datafile/controllers/StatusControllerTest.java
@@ -21,6 +21,7 @@
package org.oran.datafile.controllers;
+import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.doReturn;
@@ -32,11 +33,12 @@
import org.oran.datafile.model.Counters;
import org.oran.datafile.tasks.CollectAndReportFiles;
import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpStatusCode;
import org.springframework.http.ResponseEntity;
import reactor.core.publisher.Mono;
@ExtendWith(MockitoExtension.class)
-public class StatusControllerTest {
+class StatusControllerTest {
@Mock
CollectAndReportFiles scheduledTasksMock;
@@ -48,7 +50,7 @@
}
@Test
- public void heartbeat_success() {
+ void heartbeat_success() {
HttpHeaders httpHeaders = new HttpHeaders();
Mono<ResponseEntity<String>> result = controllerUnderTest.heartbeat(httpHeaders);
@@ -58,7 +60,7 @@
}
@Test
- public void status() {
+ void status() {
Counters counters = new Counters();
doReturn(counters).when(scheduledTasksMock).getCounters();
@@ -67,6 +69,8 @@
Mono<ResponseEntity<String>> result = controllerUnderTest.status(httpHeaders);
String body = result.block().getBody();
+ HttpStatusCode httpStatusCode = result.block().getStatusCode();
+ assertEquals(200, httpStatusCode.value());
System.out.println(body);
}
diff --git a/datafilecollector/src/test/java/org/oran/datafile/datastore/DataStoreTest.java b/datafilecollector/src/test/java/org/oran/datafile/datastore/DataStoreTest.java
new file mode 100644
index 0000000..e2e800e
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/datastore/DataStoreTest.java
@@ -0,0 +1,54 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.datastore;
+
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.Mockito.when;
+
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.Mock;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.oran.datafile.configuration.AppConfig;
+
+@ExtendWith(MockitoExtension.class)
+class DataStoreTest {
+
+ @Mock
+ private AppConfig mockAppConfig;
+
+ @Test
+ void testCreateWithS3Enabled() {
+ when(mockAppConfig.isS3Enabled()).thenReturn(true);
+ when(mockAppConfig.getS3EndpointOverride()).thenReturn("https://dummy-s3-bucket.s3.amazonaws.com");
+ when(mockAppConfig.getS3AccessKeyId()).thenReturn("test-access-key-id");
+ when(mockAppConfig.getS3SecretAccessKey()).thenReturn("test-access-key-secret");
+ DataStore dataStore = DataStore.create(mockAppConfig);
+ assertTrue(dataStore instanceof S3ObjectStore);
+ }
+
+ @Test
+ void testCreateWithS3Disabled() {
+ when(mockAppConfig.isS3Enabled()).thenReturn(false);
+ DataStore dataStore = DataStore.create(mockAppConfig);
+ assertTrue(dataStore instanceof FileStore);
+ }
+}
diff --git a/datafilecollector/src/test/java/org/oran/datafile/datastore/FileStoreTest.java b/datafilecollector/src/test/java/org/oran/datafile/datastore/FileStoreTest.java
new file mode 100644
index 0000000..21ff1c1
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/datastore/FileStoreTest.java
@@ -0,0 +1,257 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.datastore;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anyInt;
+import static org.mockito.Mockito.atLeast;
+import static org.mockito.Mockito.eq;
+import static org.mockito.Mockito.mockStatic;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.io.IOException;
+import java.nio.file.FileVisitor;
+import java.nio.file.Files;
+import java.nio.file.LinkOption;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.Mock;
+import org.mockito.MockedStatic;
+import org.mockito.Mockito;
+import org.mockito.MockitoAnnotations;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.oran.datafile.configuration.AppConfig;
+import org.springframework.test.context.ContextConfiguration;
+import reactor.core.publisher.Mono;
+import reactor.test.StepVerifier;
+
+@ContextConfiguration(classes = { FileStore.class })
+@ExtendWith(MockitoExtension.class)
+class FileStoreTest {
+
+ @Mock
+ private AppConfig appConfig;
+
+ private FileStore fileStore;
+
+ @Mock
+ private Path mockPath;
+
+ @BeforeEach
+ void setup() {
+ MockitoAnnotations.initMocks(this);
+ fileStore = new FileStore(appConfig);
+
+ when(appConfig.getCollectedFilesPath()).thenReturn("/path/to/collected/files");
+ }
+
+ @Test
+ void testListObjects() {
+ when(appConfig.getCollectedFilesPath()).thenReturn("Collected Files Path");
+ fileStore.listObjects(DataStore.Bucket.FILES, "Prefix");
+ verify(appConfig).getCollectedFilesPath();
+ }
+ @Test
+ void testListObjects3() {
+ when(appConfig.getCollectedFilesPath()).thenReturn("Collected Files Path");
+ fileStore.listObjects(DataStore.Bucket.LOCKS, "Prefix");
+ verify(appConfig).getCollectedFilesPath();
+ }
+
+ @Test
+ void testListObjects_WithExistingFiles() {
+ List<Path> fileList = new ArrayList<>();
+ fileList.add(Path.of("/path/to/collected/files/file1.txt"));
+ fileList.add(Path.of("/path/to/collected/files/file2.txt"));
+
+ when(appConfig.getCollectedFilesPath()).thenReturn("/path/to/collected/files");
+
+ // Mock Files.walk() to return the prepared stream
+ try (MockedStatic<Files> filesMockedStatic = mockStatic(Files.class)) {
+ filesMockedStatic.when(() -> Files.walk(any(), anyInt())).
+ thenReturn(fileList.stream());
+
+ StepVerifier.create(fileStore.listObjects(DataStore.Bucket.FILES, "")).
+ expectNext("file1.txt").
+ expectNext("file2.txt").
+ expectComplete();
+ }
+ }
+ @Test
+ void testReadObject() {
+ when(appConfig.getCollectedFilesPath()).thenReturn("Collected Files Path");
+ fileStore.readObject(DataStore.Bucket.FILES, "foo.txt");
+ verify(appConfig).getCollectedFilesPath();
+ }
+ @Test
+ void testReadObject2() {
+ when(appConfig.getCollectedFilesPath()).thenReturn("Collected Files Path");
+ fileStore.readObject(DataStore.Bucket.LOCKS, "foo.txt");
+ verify(appConfig).getCollectedFilesPath();
+ }
+
+ @Test
+ void testReadObject_WithExistingFile() {
+ byte[] content = "Hello, world!".getBytes();
+ Path filePath = Path.of("/path/to/collected/files/test.txt");
+
+ try (MockedStatic<Files> filesMockedStatic = mockStatic(Files.class)) {
+ filesMockedStatic.when(() -> Files.readAllBytes(eq(filePath))).
+ thenReturn(content);
+
+ StepVerifier.create(fileStore.readObject(DataStore.Bucket.FILES, "test.txt")).
+ expectNext(content).
+ verifyComplete();
+ }
+ }
+ @Test
+ void testCreateLock() {
+ when(appConfig.getCollectedFilesPath()).thenReturn("Collected Files Path");
+ fileStore.createLock("Name");
+ verify(appConfig, atLeast(1)).getCollectedFilesPath();
+ }
+ @Test
+ void testCreateLock3() {
+ when(appConfig.getCollectedFilesPath()).thenReturn("");
+ fileStore.createLock("/");
+ verify(appConfig, atLeast(1)).getCollectedFilesPath();
+ }
+ @Test
+ void testDeleteLock() {
+ when(appConfig.getCollectedFilesPath()).thenReturn("Collected Files Path");
+ fileStore.deleteLock("Name");
+ verify(appConfig).getCollectedFilesPath();
+ }
+ @Test
+ void testDeleteLock2() {
+ when(appConfig.getCollectedFilesPath()).thenReturn("");
+ fileStore.deleteLock("//");
+ verify(appConfig).getCollectedFilesPath();
+ }
+ @Test
+ void testDeleteObject() {
+ when(appConfig.getCollectedFilesPath()).thenReturn("Collected Files Path");
+ fileStore.deleteObject(DataStore.Bucket.FILES, "Name");
+ verify(appConfig).getCollectedFilesPath();
+ }
+ @Test
+ void testDeleteObject2() {
+ when(appConfig.getCollectedFilesPath()).thenReturn("Collected Files Path");
+ fileStore.deleteObject(DataStore.Bucket.LOCKS, "Name");
+ verify(appConfig).getCollectedFilesPath();
+ }
+
+ @Test
+ void testPath() {
+ when(appConfig.getCollectedFilesPath()).thenReturn("Collected Files Path");
+ fileStore.path("Name");
+ verify(appConfig).getCollectedFilesPath();
+ }
+ @Test
+ void testFileExists() {
+ when(appConfig.getCollectedFilesPath()).thenReturn("Collected Files Path");
+ fileStore.fileExists(DataStore.Bucket.FILES, "Key");
+ verify(appConfig).getCollectedFilesPath();
+ }
+ @Test
+ void testFileExists2() {
+ when(appConfig.getCollectedFilesPath()).thenReturn("Collected Files Path");
+ fileStore.fileExists(DataStore.Bucket.LOCKS, "Key");
+ verify(appConfig).getCollectedFilesPath();
+ }
+ @Test
+ void testDeleteBucket() {
+ when(appConfig.getCollectedFilesPath()).thenReturn("Collected Files Path");
+ fileStore.deleteBucket(DataStore.Bucket.FILES);
+ verify(appConfig).getCollectedFilesPath();
+ }
+ @Test
+ void testDeleteBucket2() throws IOException {
+ try (MockedStatic<Files> mockFiles = mockStatic(Files.class)) {
+ mockFiles.when(() -> Files.walkFileTree(Mockito.<Path>any(), Mockito.<FileVisitor<Path>>any())).
+ thenReturn(Paths.get(System.getProperty("java.io.tmpdir"), "test.txt"));
+ mockFiles.when(() -> Files.exists(Mockito.<Path>any(), (LinkOption[]) any())).thenReturn(true);
+ when(appConfig.getCollectedFilesPath()).thenReturn("");
+ fileStore.deleteBucket(DataStore.Bucket.LOCKS);
+ mockFiles.verify(() -> Files.exists(Mockito.<Path>any(), (LinkOption[]) any()));
+ mockFiles.verify(() -> Files.walkFileTree(Mockito.<Path>any(), Mockito.<FileVisitor<Path>>any()));
+ verify(appConfig).getCollectedFilesPath();
+ }
+ }
+ @Test
+ void testDeleteBucket3() throws IOException {
+ try (MockedStatic<Files> mockFiles = mockStatic(Files.class)) {
+ mockFiles.when(() -> Files.walkFileTree(Mockito.<Path>any(), Mockito.<FileVisitor<Path>>any())).
+ thenThrow(new IOException("OK"));
+ mockFiles.when(() -> Files.exists(Mockito.<Path>any(), (LinkOption[]) any())).thenReturn(true);
+ when(appConfig.getCollectedFilesPath()).thenReturn("");
+ fileStore.deleteBucket(DataStore.Bucket.LOCKS);
+ mockFiles.verify(() -> Files.exists(Mockito.<Path>any(), (LinkOption[]) any()));
+ mockFiles.verify(() -> Files.walkFileTree(Mockito.<Path>any(), Mockito.<FileVisitor<Path>>any()));
+ verify(appConfig, atLeast(1)).getCollectedFilesPath();
+ }
+ }
+
+ @Test
+ void testCreateLock_Success() throws IOException {
+ Path lockPath = Path.of("/path/to/collected/files/locks/lock.txt");
+
+ when(appConfig.getCollectedFilesPath()).thenReturn("/path/to/collected/files");
+
+ try (MockedStatic<Files> filesMockedStatic = mockStatic(Files.class)) {
+ filesMockedStatic.when(() -> Files.createDirectories(lockPath.getParent())).
+ thenReturn(lockPath.getParent());
+
+ try (MockedStatic<Path> pathMockedStatic = mockStatic(Path.class)) {
+ filesMockedStatic.when(() -> Files.createFile(any(Path.class))).thenReturn(lockPath);
+
+ String name = "test.txt";
+ String[] pathComponents = { "collectedFiles", name };
+
+ when(fileStore.path(Arrays.toString(pathComponents))).thenReturn(mockPath);
+ Path path = fileStore.path(Arrays.toString(pathComponents));
+ assertEquals(mockPath, path);
+ }
+ }
+ }
+
+ @Test
+ void testCopyFileTo_Failure() {
+ // Define dummy values for testing
+ Path from = Paths.get("non-existent-file.txt");
+ String to = "destination-folder";
+
+ // Use StepVerifier to test the method
+ Mono<String> resultMono = fileStore.copyFileTo(from, to);
+
+ StepVerifier.create(resultMono).
+ expectError(IOException.class).
+ verify();
+ }
+}
diff --git a/datafilecollector/src/test/java/org/oran/datafile/datastore/S3ObjectStoreTest.java b/datafilecollector/src/test/java/org/oran/datafile/datastore/S3ObjectStoreTest.java
new file mode 100644
index 0000000..298e9b4
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/datastore/S3ObjectStoreTest.java
@@ -0,0 +1,321 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.datastore;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.atLeast;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.net.URISyntaxException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Arrays;
+import java.util.List;
+import java.util.concurrent.CompletableFuture;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.oran.datafile.configuration.AppConfig;
+import reactor.core.publisher.Flux;
+import reactor.core.publisher.Mono;
+import reactor.test.StepVerifier;
+import software.amazon.awssdk.core.ResponseBytes;
+import software.amazon.awssdk.core.async.AsyncRequestBody;
+import software.amazon.awssdk.core.async.AsyncResponseTransformer;
+import software.amazon.awssdk.services.s3.S3AsyncClient;
+import software.amazon.awssdk.services.s3.model.CreateBucketRequest;
+import software.amazon.awssdk.services.s3.model.CreateBucketResponse;
+import software.amazon.awssdk.services.s3.model.DeleteBucketRequest;
+import software.amazon.awssdk.services.s3.model.DeleteBucketResponse;
+import software.amazon.awssdk.services.s3.model.DeleteObjectRequest;
+import software.amazon.awssdk.services.s3.model.DeleteObjectResponse;
+import software.amazon.awssdk.services.s3.model.DeleteObjectsRequest;
+import software.amazon.awssdk.services.s3.model.DeleteObjectsResponse;
+import software.amazon.awssdk.services.s3.model.GetObjectRequest;
+import software.amazon.awssdk.services.s3.model.GetObjectResponse;
+import software.amazon.awssdk.services.s3.model.HeadObjectRequest;
+import software.amazon.awssdk.services.s3.model.HeadObjectResponse;
+import software.amazon.awssdk.services.s3.model.ListObjectsRequest;
+import software.amazon.awssdk.services.s3.model.ListObjectsResponse;
+import software.amazon.awssdk.services.s3.model.PutObjectRequest;
+import software.amazon.awssdk.services.s3.model.PutObjectResponse;
+import software.amazon.awssdk.services.s3.model.S3Object;
+
+
+@ExtendWith(MockitoExtension.class)
+class S3ObjectStoreTest {
+
+ @Mock
+ private AppConfig appConfig;
+
+ @Mock
+ private S3AsyncClient s3AsynchClient;
+
+ private S3ObjectStore s3ObjectStore;
+
+ @BeforeEach
+ void setup() {
+ Mockito.lenient().when(appConfig.getS3EndpointOverride()).thenReturn("https://dummy-s3-bucket.s3.amazonaws.com");
+ Mockito.lenient().when(appConfig.getS3AccessKeyId()).thenReturn("test-access-key-id");
+ Mockito.lenient().when(appConfig.getS3SecretAccessKey()).thenReturn("test-access-key-secret");
+
+ Mockito.lenient().when(appConfig.getS3Bucket()).thenReturn("test-bucket");
+ Mockito.lenient().when(appConfig.getS3LocksBucket()).thenReturn("test-lock-bucket");
+ Mockito.lenient().when(appConfig.isS3Enabled()).thenReturn(true);
+
+ s3ObjectStore = new S3ObjectStore(appConfig, s3AsynchClient);
+ }
+
+ @Test
+ void createS3Bucket() {
+ CreateBucketRequest request = CreateBucketRequest.builder()
+ .bucket("test-bucket")
+ .build();
+
+ when(s3AsynchClient.createBucket(any(CreateBucketRequest.class)))
+ .thenReturn(CompletableFuture.completedFuture(CreateBucketResponse.builder().build()));
+
+ Mono<String> result = s3ObjectStore.create(DataStore.Bucket.FILES);
+
+ verify(s3AsynchClient, atLeast(1)).createBucket(any(CreateBucketRequest.class));
+
+ StepVerifier.create(result).expectNext("test-bucket").verifyComplete();
+
+ assertThat(result.block()).isEqualTo("test-bucket");
+ }
+
+ @Test
+ void listObjects() {
+ String prefix = "prefix/";
+
+ ListObjectsResponse response1 = ListObjectsResponse.builder()
+ .contents(createS3Object("object1"))
+ .isTruncated(true)
+ .nextMarker("marker1")
+ .build();
+
+ ListObjectsResponse response2 = ListObjectsResponse.builder()
+ .contents(createS3Object("object2"))
+ .isTruncated(false)
+ .build();
+
+ when(s3AsynchClient.listObjects(any(ListObjectsRequest.class)))
+ .thenReturn(CompletableFuture.completedFuture(response1),
+ CompletableFuture.completedFuture(response2));
+
+ Flux<String> result = s3ObjectStore.listObjects(DataStore.Bucket.FILES, prefix);
+
+ verify(s3AsynchClient, atLeast(1)).listObjects(any(ListObjectsRequest.class));
+
+ StepVerifier.create(result)
+ .expectNext("object1")
+ .expectNext("object2")
+ .verifyComplete();
+
+ // Collect the results into a list
+ List<String> resultList = result.collectList().block();
+
+ assertEquals(Arrays.asList("object1", "object2"), resultList);
+ }
+
+ @Test
+ void testCreateLockWithExistingHead() {
+ HeadObjectResponse headObjectResponse = HeadObjectResponse.builder().build();
+
+ when(s3AsynchClient.headObject(any(HeadObjectRequest.class)))
+ .thenReturn(CompletableFuture.completedFuture(headObjectResponse));
+
+ Mono<Boolean> result = s3ObjectStore.createLock("lockName");
+
+ StepVerifier.create(result)
+ .expectNext(false)
+ .verifyComplete();
+
+ assertThat(result.block()).isFalse();
+ }
+
+ @Test
+ void testCreateLockWithoutExistingHead() {
+ HeadObjectResponse headObjectResponse = null;
+ Mockito.doReturn(CompletableFuture.completedFuture(headObjectResponse))
+ .when(s3AsynchClient)
+ .headObject(any(HeadObjectRequest.class));
+
+ Mono<Boolean> result = s3ObjectStore.createLock("lockName");
+
+ StepVerifier.create(result)
+ .expectComplete()
+ .verify();
+
+ Boolean resultVal = result.block();
+
+ assertThat(resultVal).isNull();
+ }
+
+
+ @Test
+ void deleteLock() {
+ when(s3AsynchClient.deleteObject(any(DeleteObjectRequest.class)))
+ .thenReturn(CompletableFuture.completedFuture(DeleteObjectResponse.builder().build()));
+
+ Mono<Boolean> result = s3ObjectStore.deleteLock("lock-name");
+
+ StepVerifier.create(result)
+ .expectNext(true)
+ .verifyComplete();
+
+ assertThat(result.block()).isTrue();
+ }
+
+ @Test
+ void testDeleteObject() {
+ when(s3AsynchClient.deleteObject(any(DeleteObjectRequest.class)))
+ .thenReturn(CompletableFuture.completedFuture(DeleteObjectResponse.builder().build()));
+
+ Mono<Boolean> result = s3ObjectStore.deleteObject(DataStore.Bucket.LOCKS, "objectName");
+
+ StepVerifier.create(result)
+ .expectNext(true)
+ .verifyComplete();
+
+ assertThat(result.block()).isTrue();
+ }
+
+ @Test
+ void testDeleteBucket_Success() {
+ DeleteBucketRequest request = DeleteBucketRequest.builder() //
+ .bucket("test-bucket")
+ .build();
+
+ when(s3AsynchClient.deleteBucket(any(DeleteBucketRequest.class)))
+ .thenReturn(CompletableFuture.completedFuture(DeleteBucketResponse.builder().build()));
+
+ DeleteObjectsRequest objectRequest = DeleteObjectsRequest.builder() //
+ .bucket("test-bucket")
+ .build();
+
+ when(s3AsynchClient.deleteObjects(any(DeleteObjectsRequest.class)))
+ .thenReturn(CompletableFuture.completedFuture(DeleteObjectsResponse.builder().build()));
+
+ String prefix = "prefix/";
+
+ ListObjectsResponse response1 = ListObjectsResponse.builder()
+ .contents(createS3Object("object1"))
+ .isTruncated(true)
+ .nextMarker("marker1")
+ .build();
+
+ ListObjectsResponse response2 = ListObjectsResponse.builder()
+ .contents(createS3Object("object2"))
+ .isTruncated(false)
+ .build();
+
+ when(s3AsynchClient.listObjects(any(ListObjectsRequest.class)))
+ .thenReturn(CompletableFuture.completedFuture(response1),
+ CompletableFuture.completedFuture(response2));
+
+ Mono<String> result = s3ObjectStore.deleteBucket(DataStore.Bucket.FILES);
+
+ StepVerifier.create(result)
+ .expectNext("OK")
+ .verifyComplete();
+ }
+
+ @Test
+ void testCopyFileTo_Success() throws URISyntaxException {
+ PutObjectRequest request = PutObjectRequest.builder() //
+ .bucket("test-bucket") //
+ .key("test-access-key-id") //
+ .build();
+
+ when(s3AsynchClient.putObject(any(PutObjectRequest.class), any(AsyncRequestBody.class)))
+ .thenAnswer(invocation -> {
+ CompletableFuture<PutObjectResponse> future = CompletableFuture.completedFuture(
+ PutObjectResponse.builder().build()
+ );
+ return future;
+ });
+
+ Path testFile = Paths.get(getClass().getResource("/org/oran/datafile/datastore/file.txt").toURI());
+
+ Mono<String> result = s3ObjectStore.copyFileTo(testFile, "test-key");
+
+ StepVerifier.create(result)
+ .expectNext("test-key")
+ .verifyComplete();
+ }
+
+ @Test
+ void testReadObject() {
+ // Mock the getObject method to return a CompletableFuture with ResponseBytes
+ when(s3AsynchClient.getObject(any(GetObjectRequest.class), any(AsyncResponseTransformer.class)))
+ .thenAnswer(invocation -> {
+ ResponseBytes<GetObjectResponse> responseBytes = ResponseBytes.fromByteArray(
+ GetObjectResponse.builder().build(),
+ "Hello, World!".getBytes(StandardCharsets.UTF_8)
+ );
+ CompletableFuture<ResponseBytes<GetObjectResponse>> future = CompletableFuture.completedFuture(
+ responseBytes
+ );
+ return future;
+ });
+
+ // Call the method under test
+ Mono<byte[]> result = s3ObjectStore.readObject(DataStore.Bucket.FILES, "test-key");
+
+ byte[] expectedBytes = "Hello, World!".getBytes(StandardCharsets.UTF_8);
+ StepVerifier.create(result)
+ .consumeNextWith(actualBytes -> Assertions.assertArrayEquals(expectedBytes, actualBytes))
+ .verifyComplete();
+ }
+
+ @Test
+ void testPutObject() {
+ // Mock the putObject method to return a CompletableFuture with PutObjectResponse
+ when(s3AsynchClient.putObject(any(PutObjectRequest.class), any(AsyncRequestBody.class)))
+ .thenAnswer(invocation -> {
+ CompletableFuture<PutObjectResponse> future = CompletableFuture.completedFuture(
+ PutObjectResponse.builder().build()
+ );
+ return future;
+ });
+
+ // Call the method under test
+ Mono<String> result = s3ObjectStore.putObject(DataStore.Bucket.FILES, "test-key", "Hello, World!");
+
+ // Verify the Mono's behavior using StepVerifier
+ StepVerifier.create(result)
+ .expectNext("test-key")
+ .verifyComplete();
+ }
+
+ private S3Object createS3Object(String key) {
+ return S3Object.builder().key(key).build();
+ }
+}
diff --git a/datafilecollector/src/test/java/org/oran/datafile/ftp/FtpesClientTest.java b/datafilecollector/src/test/java/org/oran/datafile/ftp/FtpesClientTest.java
index ef3310a..7effe75 100644
--- a/datafilecollector/src/test/java/org/oran/datafile/ftp/FtpesClientTest.java
+++ b/datafilecollector/src/test/java/org/oran/datafile/ftp/FtpesClientTest.java
@@ -18,7 +18,11 @@
package org.oran.datafile.ftp;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
@@ -31,21 +35,25 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
+import java.net.URISyntaxException;
import java.nio.file.Path;
import java.nio.file.Paths;
-
+import java.security.KeyStoreException;
+import java.security.NoSuchAlgorithmException;
+import java.security.cert.CertificateException;
import javax.net.ssl.KeyManager;
import javax.net.ssl.TrustManager;
-
import org.apache.commons.net.ftp.FTP;
import org.apache.commons.net.ftp.FTPSClient;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentMatchers;
+import org.oran.datafile.exceptions.DatafileTaskException;
+import org.oran.datafile.exceptions.NonRetryableDatafileTaskException;
import org.oran.datafile.model.FileServerData;
import org.springframework.http.HttpStatus;
-public class FtpesClientTest {
+class FtpesClientTest {
private static final String REMOTE_FILE_PATH = "/dir/sample.txt";
private static final Path LOCAL_FILE_PATH = Paths.get("target/sample.txt");
@@ -85,7 +93,7 @@
private void verifyFtpsClientMock_openOk() throws Exception {
doReturn(outputStreamMock).when(clientUnderTestSpy).createOutputStream(LOCAL_FILE_PATH);
- when(ftpsClientMock.retrieveFile(ArgumentMatchers.eq(REMOTE_FILE_PATH),
+ when(ftpsClientMock.retrieveFile(eq(REMOTE_FILE_PATH),
ArgumentMatchers.any(OutputStream.class))).thenReturn(true);
verify(ftpsClientMock).setNeedClientAuth(true);
verify(ftpsClientMock).setKeyManager(keyManagerMock);
@@ -101,7 +109,7 @@
}
@Test
- public void collectFile_allOk() throws Exception {
+ void collectFile_allOk() throws Exception {
doReturn(keyManagerMock).when(clientUnderTestSpy).getKeyManager(Paths.get(FTP_KEY_PATH), FTP_KEY_PASSWORD_PATH);
doReturn(trustManagerMock).when(clientUnderTestSpy).getTrustManager(TRUSTED_CA_PATH, TRUSTED_CA_PASSWORD);
@@ -121,12 +129,12 @@
verify(ftpsClientMock, times(1)).isConnected();
verify(ftpsClientMock, times(1)).logout();
verify(ftpsClientMock, times(1)).disconnect();
- verify(ftpsClientMock, times(1)).retrieveFile(ArgumentMatchers.eq(REMOTE_FILE_PATH), any());
+ verify(ftpsClientMock, times(1)).retrieveFile(eq(REMOTE_FILE_PATH), any());
verifyNoMoreInteractions(ftpsClientMock);
}
@Test
- public void collectFileFaultyOwnKey_shouldFail() throws Exception {
+ void collectFileFaultyOwnKey_shouldFail() throws Exception {
doReturn(outputStreamMock).when(clientUnderTestSpy).createOutputStream(LOCAL_FILE_PATH);
assertThatThrownBy(() -> clientUnderTestSpy.open()).hasMessageContaining("Could not open connection:");
@@ -140,7 +148,7 @@
}
@Test
- public void collectFileFaultTrustedCA_shouldFail_no_trustedCA_file() throws Exception {
+ void collectFileFaultTrustedCA_shouldFail_no_trustedCA_file() throws Exception {
doReturn(keyManagerMock).when(clientUnderTestSpy).getKeyManager(Paths.get(FTP_KEY_PATH), FTP_KEY_PASSWORD_PATH);
doThrow(new IOException("problem")).when(clientUnderTestSpy).createInputStream(TRUSTED_CA_PATH);
@@ -150,7 +158,7 @@
}
@Test
- public void collectFileFaultTrustedCA_shouldFail_empty_trustedCA_file() throws Exception {
+ void collectFileFaultTrustedCA_shouldFail_empty_trustedCA_file() throws Exception {
doReturn(keyManagerMock).when(clientUnderTestSpy).getKeyManager(Paths.get(FTP_KEY_PATH), FTP_KEY_PASSWORD_PATH);
doReturn(inputStreamMock).when(clientUnderTestSpy).createInputStream(TRUSTED_CA_PATH);
@@ -159,7 +167,7 @@
}
@Test
- public void collectFileFaultyLogin_shouldFail() throws Exception {
+ void collectFileFaultyLogin_shouldFail() throws Exception {
doReturn(keyManagerMock).when(clientUnderTestSpy).getKeyManager(Paths.get(FTP_KEY_PATH), FTP_KEY_PASSWORD_PATH);
doReturn(trustManagerMock).when(clientUnderTestSpy).getTrustManager(TRUSTED_CA_PATH, TRUSTED_CA_PASSWORD);
@@ -176,7 +184,7 @@
}
@Test
- public void collectFileBadRequestResponse_shouldFail() throws Exception {
+ void collectFileBadRequestResponse_shouldFail() throws Exception {
doReturn(keyManagerMock).when(clientUnderTestSpy).getKeyManager(Paths.get(FTP_KEY_PATH), FTP_KEY_PASSWORD_PATH);
doReturn(trustManagerMock).when(clientUnderTestSpy).getTrustManager(TRUSTED_CA_PATH, TRUSTED_CA_PASSWORD);
doReturn(outputStreamMock).when(clientUnderTestSpy).createOutputStream(LOCAL_FILE_PATH);
@@ -196,7 +204,7 @@
}
@Test
- public void collectFile_shouldFail() throws Exception {
+ void collectFile_shouldFail() throws Exception {
doReturn(keyManagerMock).when(clientUnderTestSpy).getKeyManager(Paths.get(FTP_KEY_PATH), FTP_KEY_PASSWORD_PATH);
doReturn(trustManagerMock).when(clientUnderTestSpy).getTrustManager(TRUSTED_CA_PATH, TRUSTED_CA_PASSWORD);
doReturn(outputStreamMock).when(clientUnderTestSpy).createOutputStream(LOCAL_FILE_PATH);
@@ -210,12 +218,12 @@
.hasMessageContaining(REMOTE_FILE_PATH).hasMessageContaining("No retry");
verifyFtpsClientMock_openOk();
- verify(ftpsClientMock, times(1)).retrieveFile(ArgumentMatchers.eq(REMOTE_FILE_PATH), any());
+ verify(ftpsClientMock, times(1)).retrieveFile(eq(REMOTE_FILE_PATH), any());
verifyNoMoreInteractions(ftpsClientMock);
}
@Test
- public void collectFile_shouldFail_ioexception() throws Exception {
+ void collectFile_shouldFail_ioexception() throws Exception {
doReturn(keyManagerMock).when(clientUnderTestSpy).getKeyManager(Paths.get(FTP_KEY_PATH), FTP_KEY_PASSWORD_PATH);
doReturn(trustManagerMock).when(clientUnderTestSpy).getTrustManager(TRUSTED_CA_PATH, TRUSTED_CA_PASSWORD);
doReturn(outputStreamMock).when(clientUnderTestSpy).createOutputStream(LOCAL_FILE_PATH);
@@ -230,7 +238,33 @@
.hasMessage("Could not fetch file: java.io.IOException: problem");
verifyFtpsClientMock_openOk();
- verify(ftpsClientMock, times(1)).retrieveFile(ArgumentMatchers.eq(REMOTE_FILE_PATH), any());
+ verify(ftpsClientMock, times(1)).retrieveFile(eq(REMOTE_FILE_PATH), any());
verifyNoMoreInteractions(ftpsClientMock);
}
+
+ @Test
+ void testCreateInputStream() throws IOException, URISyntaxException {
+ Path trustCaPath = Paths.get(getClass().getResource("/org/oran/datafile/datastore/file.txt").toURI());
+ InputStream actualCreateInputStreamResult = clientUnderTestSpy.createInputStream(trustCaPath);
+ assertNotNull(actualCreateInputStreamResult);
+ }
+
+ @Test
+ void testCreateOutputStream() throws IOException, URISyntaxException, DatafileTaskException {
+ Path trustCaPath = Paths.get(getClass().getResource("/org/oran/datafile/datastore/file.txt").toURI());
+ assertThrows(NonRetryableDatafileTaskException.class, () -> clientUnderTestSpy.createOutputStream(trustCaPath));
+ }
+
+ @Test
+ void testGetTrustManager2() throws IOException, KeyStoreException, NoSuchAlgorithmException, CertificateException {
+ FileServerData fileServerData = FileServerData.builder()
+ .password("password123")
+ .port(8080)
+ .serverAddress("42 Main St")
+ .userId("42")
+ .build();
+ assertNull((new FtpesClient(fileServerData, Paths.get(System.getProperty("java.io.tmpdir"), "test.txt"),
+ "Key Cert Password Path", Paths.get(System.getProperty("java.io.tmpdir"), "test.txt"),
+ "Trusted Ca Password Path")).getTrustManager(null, "foo"));
+ }
}
diff --git a/datafilecollector/src/test/java/org/oran/datafile/ftp/SftpClientSettingsTest.java b/datafilecollector/src/test/java/org/oran/datafile/ftp/SftpClientSettingsTest.java
index bbce5ef..d8400bf 100644
--- a/datafilecollector/src/test/java/org/oran/datafile/ftp/SftpClientSettingsTest.java
+++ b/datafilecollector/src/test/java/org/oran/datafile/ftp/SftpClientSettingsTest.java
@@ -26,10 +26,10 @@
import org.junit.jupiter.api.io.TempDir;
import org.oran.datafile.configuration.SftpConfig;
-public class SftpClientSettingsTest {
+class SftpClientSettingsTest {
@Test
- public void shouldUseFtpStrictHostChecking(@TempDir Path tempDir) throws Exception {
+ void shouldUseFtpStrictHostChecking(@TempDir Path tempDir) throws Exception {
File knowHostsFile = new File(tempDir.toFile(), "known_hosts");
knowHostsFile.createNewFile();
@@ -40,7 +40,7 @@
}
@Test
- public void shouldNotUseFtpStrictHostChecking_whenFileDoesNotExist() {
+ void shouldNotUseFtpStrictHostChecking_whenFileDoesNotExist() {
SftpConfig config = createSampleSftpConfigWithStrictHostChecking("unknown_file");
SftpClientSettings sftpClient = new SftpClientSettings(config);
@@ -49,7 +49,7 @@
}
@Test
- public void shouldNotUseFtpStrictHostChecking_whenExplicitlySwitchedOff() {
+ void shouldNotUseFtpStrictHostChecking_whenExplicitlySwitchedOff() {
SftpClientSettings sftpClient = new SftpClientSettings(createSampleSftpConfigNoStrictHostChecking());
sftpClient.shouldUseStrictHostChecking();
assertThat(sftpClient.shouldUseStrictHostChecking()).isFalse();
diff --git a/datafilecollector/src/test/java/org/oran/datafile/ftp/SftpClientTest.java b/datafilecollector/src/test/java/org/oran/datafile/ftp/SftpClientTest.java
index 5268839..f29989a 100644
--- a/datafilecollector/src/test/java/org/oran/datafile/ftp/SftpClientTest.java
+++ b/datafilecollector/src/test/java/org/oran/datafile/ftp/SftpClientTest.java
@@ -47,7 +47,7 @@
import org.oran.datafile.model.FileServerData;
@ExtendWith(MockitoExtension.class)
-public class SftpClientTest {
+class SftpClientTest {
private static final String HOST = "127.0.0.1";
private static final int SFTP_PORT = 1021;
@@ -64,7 +64,7 @@
private ChannelSftp channelMock;
@Test
- public void openWithPort_success() throws Exception {
+ void openWithPort_success() throws Exception {
FileServerData expectedFileServerData = FileServerData.builder() //
.serverAddress(HOST) //
.userId(USERNAME) //
@@ -92,7 +92,7 @@
}
@Test
- public void openWithoutPort_success() throws Exception {
+ void openWithoutPort_success() throws Exception {
FileServerData expectedFileServerData = FileServerData.builder() //
.serverAddress(HOST) //
.userId(USERNAME) //
@@ -112,7 +112,7 @@
}
@Test
- public void open_throwsExceptionWithRetry() throws Exception {
+ void open_throwsExceptionWithRetry() throws Exception {
FileServerData expectedFileServerData = FileServerData.builder() //
.serverAddress(HOST) //
.userId(USERNAME) //
@@ -130,7 +130,7 @@
}
@Test
- public void openAuthFail_throwsExceptionWithoutRetry() throws Exception {
+ void openAuthFail_throwsExceptionWithoutRetry() throws Exception {
FileServerData expectedFileServerData = FileServerData.builder() //
.serverAddress(HOST) //
.userId(USERNAME) //
@@ -152,7 +152,7 @@
@SuppressWarnings("resource")
@Test
- public void collectFile_success() throws DatafileTaskException, SftpException {
+ void collectFile_success() throws DatafileTaskException, SftpException {
FileServerData expectedFileServerData = FileServerData.builder() //
.serverAddress(HOST) //
.userId(USERNAME) //
@@ -170,7 +170,7 @@
}
@Test
- public void collectFile_throwsExceptionWithRetry() throws SftpException {
+ void collectFile_throwsExceptionWithRetry() throws SftpException {
FileServerData expectedFileServerData = FileServerData.builder() //
.serverAddress(HOST) //
.userId(USERNAME) //
@@ -190,7 +190,7 @@
}
@Test
- public void collectFileFileMissing_throwsExceptionWithoutRetry() throws SftpException {
+ void collectFileFileMissing_throwsExceptionWithoutRetry() throws SftpException {
FileServerData expectedFileServerData = FileServerData.builder() //
.serverAddress(HOST) //
.userId(USERNAME) //
@@ -211,7 +211,7 @@
}
@Test
- public void close_success() {
+ void close_success() {
SftpClient sftpClient = new SftpClient(null, createSampleSftpClientSettings());
sftpClient.session = sessionMock;
diff --git a/datafilecollector/src/test/java/org/oran/datafile/http/HttpsClientConnectionManagerUtilTest.java b/datafilecollector/src/test/java/org/oran/datafile/http/HttpsClientConnectionManagerUtilTest.java
index af77349..615a76e 100644
--- a/datafilecollector/src/test/java/org/oran/datafile/http/HttpsClientConnectionManagerUtilTest.java
+++ b/datafilecollector/src/test/java/org/oran/datafile/http/HttpsClientConnectionManagerUtilTest.java
@@ -25,7 +25,7 @@
import org.oran.datafile.exceptions.DatafileTaskException;
@ExtendWith(MockitoExtension.class)
-public class HttpsClientConnectionManagerUtilTest {
+class HttpsClientConnectionManagerUtilTest {
private static final String KEY_PATH = "src/test/resources/keystore.p12";
private static final String KEY_PASSWORD = "src/test/resources/keystore.pass";
@@ -34,19 +34,19 @@
private static final String TRUSTED_CA_PASSWORD = "src/test/resources/trust.pass";
@Test
- public void emptyManager_shouldThrowException() {
+ void emptyManager_shouldThrowException() {
assertThrows(DatafileTaskException.class, () -> HttpsClientConnectionManagerUtil.instance());
}
@Test
- public void creatingManager_successfulCase() throws Exception {
+ void creatingManager_successfulCase() throws Exception {
HttpsClientConnectionManagerUtil.setupOrUpdate(KEY_PATH, KEY_PASSWORD, TRUSTED_CA_PATH, TRUSTED_CA_PASSWORD, //
true);
assertNotNull(HttpsClientConnectionManagerUtil.instance());
}
@Test
- public void creatingManager_improperSecretShouldThrowException() {
+ void creatingManager_improperSecretShouldThrowException() {
assertThrows(DatafileTaskException.class, () -> HttpsClientConnectionManagerUtil.setupOrUpdate(KEY_PATH, //
KEY_IMPROPER_PASSWORD, TRUSTED_CA_PATH, TRUSTED_CA_PASSWORD, true));
assertThrows(DatafileTaskException.class, () -> HttpsClientConnectionManagerUtil.instance());
diff --git a/datafilecollector/src/test/java/org/oran/datafile/model/CountersTest.java b/datafilecollector/src/test/java/org/oran/datafile/model/CountersTest.java
new file mode 100644
index 0000000..d2e8b37
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/model/CountersTest.java
@@ -0,0 +1,71 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019-2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.model;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.Test;
+
+class CountersTest {
+ @Test
+ void testIncNoOfReceivedEvents() {
+ Counters counters = new Counters();
+ counters.incNoOfReceivedEvents();
+ assertEquals(1L, counters.getTotalReceivedEvents());
+ }
+
+ @Test
+ void testIncNoOfCollectedFiles() {
+ Counters counters = new Counters();
+ counters.incNoOfCollectedFiles();
+ counters.incNoOfFailedFtp();
+ counters.incNoOfFailedFtpAttempts();
+ counters.incNoOfFailedHttp();
+ counters.incNoOfFailedHttpAttempts();
+ counters.incNoOfFailedPublish();
+ counters.incNoOfFailedPublishAttempts();
+ String actualToStringResult = counters.toString();
+ long actualNoOfCollectedFiles = counters.getNoOfCollectedFiles();
+ long actualNoOfFailedFtp = counters.getNoOfFailedFtp();
+ long actualNoOfFailedFtpAttempts = counters.getNoOfFailedFtpAttempts();
+ long actualNoOfFailedHttp = counters.getNoOfFailedHttp();
+ long actualNoOfFailedHttpAttempts = counters.getNoOfFailedHttpAttempts();
+ long actualNoOfFailedPublish = counters.getNoOfFailedPublish();
+ long actualNoOfFailedPublishAttempts = counters.getNoOfFailedPublishAttempts();
+ long actualTotalPublishedFiles = counters.getTotalPublishedFiles();
+ assertEquals(1L, actualNoOfCollectedFiles);
+ assertEquals(1L, actualNoOfFailedFtp);
+ assertEquals(1L, actualNoOfFailedFtpAttempts);
+ assertEquals(1L, actualNoOfFailedHttp);
+ assertEquals(1L, actualNoOfFailedHttpAttempts);
+ assertEquals(1L, actualNoOfFailedPublish);
+ assertEquals(1L, actualNoOfFailedPublishAttempts);
+ assertEquals(0L, actualTotalPublishedFiles);
+ assertEquals(0L, counters.getTotalReceivedEvents());
+ }
+ @Test
+ void testIncTotalPublishedFiles() {
+ Counters counters = new Counters();
+ counters.incTotalPublishedFiles();
+ assertEquals(1L, counters.getTotalPublishedFiles());
+ }
+}
+
diff --git a/datafilecollector/src/test/java/org/oran/datafile/model/FileDataTest.java b/datafilecollector/src/test/java/org/oran/datafile/model/FileDataTest.java
new file mode 100644
index 0000000..f52cfcc
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/model/FileDataTest.java
@@ -0,0 +1,256 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019-2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.model;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Collections;
+import org.junit.jupiter.api.Test;
+import org.oran.datafile.configuration.AppConfig;
+import org.oran.datafile.exceptions.DatafileTaskException;
+
+class FileDataTest {
+ @Test
+ void testSchemeGetSchemeFromString() throws DatafileTaskException {
+ assertThrows(DatafileTaskException.class, () -> FileData.Scheme.getSchemeFromString("Scheme String"));
+ assertEquals(FileData.Scheme.FTPES, FileData.Scheme.getSchemeFromString("FTPES"));
+ assertEquals(FileData.Scheme.SFTP, FileData.Scheme.getSchemeFromString("SFTP"));
+ assertEquals(FileData.Scheme.HTTP, FileData.Scheme.getSchemeFromString("HTTP"));
+ assertEquals(FileData.Scheme.HTTPS, FileData.Scheme.getSchemeFromString("HTTPS"));
+ }
+
+ @Test
+ void testSchemeIsFtpScheme() {
+ assertTrue(FileData.Scheme.isFtpScheme(FileData.Scheme.FTPES));
+ assertTrue(FileData.Scheme.isFtpScheme(FileData.Scheme.SFTP));
+ assertFalse(FileData.Scheme.isFtpScheme(FileData.Scheme.HTTP));
+ assertFalse(FileData.Scheme.isFtpScheme(FileData.Scheme.HTTPS));
+ }
+
+ @Test
+ void testSourceName() {
+ FileReadyMessage.MessageMetaData metaData = new FileReadyMessage.MessageMetaData(
+ "sourceName", "otherField1", "otherField2", "otherField3", 42, "field5", "field6",
+ "field7", "field8", 123456789L, 987654321L, "field11", "field12"
+ );
+ FileData fileData = FileData.builder().messageMetaData(metaData).build();
+
+ assertEquals("field8", fileData.sourceName());
+ }
+
+ @Test
+ void testName() {
+ FileReadyMessage.MessageMetaData metaData = new FileReadyMessage.MessageMetaData(
+ "sourceName", "otherField1", "otherField2", "otherField3", 42, "field5", "field6",
+ "field7", "field8", 123456789L, 987654321L, "field11", "field12"
+ );
+ FileData fileData = FileData.builder().messageMetaData(metaData).build();
+
+ FileReadyMessage.FileInfo fileInfo = new FileReadyMessage.FileInfo("name", "location", "hashMapField", "");
+ FileReadyMessage.ArrayOfNamedHashMap arrayOfNamedHashMap = new FileReadyMessage.ArrayOfNamedHashMap("someString", fileInfo);
+ fileData.fileInfo = arrayOfNamedHashMap;
+
+ assertEquals("field8/someString", fileData.name());
+ }
+
+ @Test
+ void testRemoteFilePath() {
+ FileReadyMessage.MessageMetaData metaData = new FileReadyMessage.MessageMetaData(
+ "sourceName", "otherField1", "otherField2", "otherField3", 42, "field5", "field6",
+ "field7", "field8", 123456789L, 987654321L, "field11", "field12"
+ );
+ FileData fileData = FileData.builder().messageMetaData(metaData).build();
+
+ FileReadyMessage.FileInfo fileInfo = new FileReadyMessage.FileInfo("name", "ftp://example.com/remote/file.txt", "hashMapField", "");
+ FileReadyMessage.ArrayOfNamedHashMap arrayOfNamedHashMap = new FileReadyMessage.ArrayOfNamedHashMap("someString", fileInfo);
+ fileData.fileInfo = arrayOfNamedHashMap;
+
+ assertEquals("/remote/file.txt", fileData.remoteFilePath());
+ }
+
+ @Test
+ void testScheme() {
+ FileReadyMessage.MessageMetaData metaData = new FileReadyMessage.MessageMetaData(
+ "sourceName", "otherField1", "otherField2", "otherField3", 42, "field5", "field6",
+ "field7", "field8", 123456789L, 987654321L, "field11", "field12"
+ );
+ FileData fileData = FileData.builder().messageMetaData(metaData).build();
+
+ FileReadyMessage.FileInfo fileInfo = new FileReadyMessage.FileInfo("name", "http://example.com/file.txt", "hashMapField", "");
+ FileReadyMessage.ArrayOfNamedHashMap arrayOfNamedHashMap = new FileReadyMessage.ArrayOfNamedHashMap("someString", fileInfo);
+ fileData.fileInfo = arrayOfNamedHashMap;
+
+ assertEquals(FileData.Scheme.HTTP, fileData.scheme());
+ }
+
+ @Test
+ void testGetLocalFilePath() {
+ AppConfig config = new AppConfig();
+ config.setCollectedFilesPath("/local/path");
+ FileReadyMessage.MessageMetaData metaData = new FileReadyMessage.MessageMetaData(
+ "sourceName", "otherField1", "otherField2", "otherField3", 42, "field5", "field6",
+ "field7", "field8", 123456789L, 987654321L, "field11", "field12"
+ );
+ FileData fileData = FileData.builder().messageMetaData(metaData).build();
+
+ FileReadyMessage.FileInfo fileInfo = new FileReadyMessage.FileInfo("name", "http://example.com/file.txt", "hashMapField", "");
+ FileReadyMessage.ArrayOfNamedHashMap arrayOfNamedHashMap = new FileReadyMessage.ArrayOfNamedHashMap("someString", fileInfo);
+ fileData.fileInfo = arrayOfNamedHashMap;
+
+ Path expectedPath = Paths.get("/local/path/field8/someString");
+ Path actualPath = fileData.getLocalFilePath(config);
+ assertEquals(expectedPath, actualPath);
+ }
+
+ @Test
+ void testFileServerDataWithUserInfo() throws Exception {
+ // Arrange
+ AppConfig config = new AppConfig();
+ config.setCollectedFilesPath("/local/path");
+ FileReadyMessage.MessageMetaData metaData = new FileReadyMessage.MessageMetaData(
+ "sourceName", "otherField1", "otherField2", "otherField3", 42, "field5", "field6",
+ "field7", "field8", 123456789L, 987654321L, "field11", "field12"
+ );
+ FileData fileData = FileData.builder().messageMetaData(metaData).build();
+
+ FileReadyMessage.FileInfo fileInfo = new FileReadyMessage.FileInfo("name", "http://username:password@example.com:8080/path?query1=value1&query2=value2", "hashMapField", "");
+ FileReadyMessage.ArrayOfNamedHashMap arrayOfNamedHashMap = new FileReadyMessage.ArrayOfNamedHashMap("someString", fileInfo);
+ fileData.fileInfo = arrayOfNamedHashMap;
+
+ // Act
+ FileServerData result = fileData.fileServerData();
+
+ // Assert
+ assertEquals("username", result.userId);
+ assertEquals("password", result.password);
+ }
+
+ @Test
+ void testFileServerDataWithFragment() throws Exception {
+ // Arrange
+ AppConfig config = new AppConfig();
+ config.setCollectedFilesPath("/local/path");
+ FileReadyMessage.MessageMetaData metaData = new FileReadyMessage.MessageMetaData(
+ "sourceName", "otherField1", "otherField2", "otherField3", 42, "field5", "field6",
+ "field7", "field8", 123456789L, 987654321L, "field11", "field12"
+ );
+ FileData fileData = FileData.builder().messageMetaData(metaData).build();
+
+ FileReadyMessage.FileInfo fileInfo = new FileReadyMessage.FileInfo("name", "http://username@example.com:8080/path?query1=value1&query2=value2#rawFragment", "hashMapField", "");
+ FileReadyMessage.ArrayOfNamedHashMap arrayOfNamedHashMap = new FileReadyMessage.ArrayOfNamedHashMap("someString", fileInfo);
+ fileData.fileInfo = arrayOfNamedHashMap;
+
+ // Act
+ FileServerData result = fileData.fileServerData();
+
+ // Assert
+ assertEquals("rawFragment", result.uriRawFragment);
+ }
+
+ @Test
+ void testFileServerDataWithoutUserInfo() throws Exception {
+ // Arrange
+ AppConfig config = new AppConfig();
+ config.setCollectedFilesPath("/local/path");
+ FileReadyMessage.MessageMetaData metaData = new FileReadyMessage.MessageMetaData(
+ "sourceName", "otherField1", "otherField2", "otherField3", 42, "field5", "field6",
+ "field7", "field8", 123456789L, 987654321L, "field11", "field12"
+ );
+ FileData fileData = FileData.builder().messageMetaData(metaData).build();
+
+ FileReadyMessage.FileInfo fileInfo = new FileReadyMessage.FileInfo("name", "http://example.com:8080/path?query1=value1&query2=value2", "hashMapField", "");
+ FileReadyMessage.ArrayOfNamedHashMap arrayOfNamedHashMap = new FileReadyMessage.ArrayOfNamedHashMap("someString", fileInfo);
+ fileData.fileInfo = arrayOfNamedHashMap;
+
+ FileServerData result = fileData.fileServerData();
+ assertEquals("example.com", result.getServerAddress());
+ }
+
+ @Test
+ void testInvalidScheme() throws Exception {
+ // Arrange
+ AppConfig config = new AppConfig();
+ config.setCollectedFilesPath("/local/path");
+ FileReadyMessage.MessageMetaData metaData = new FileReadyMessage.MessageMetaData(
+ "sourceName", "otherField1", "otherField2", "otherField3", 42, "field5", "field6",
+ "field7", "field8", 123456789L, 987654321L, "field11", "field12"
+ );
+ FileData fileData = FileData.builder().messageMetaData(metaData).build();
+
+ FileReadyMessage.FileInfo fileInfo = new FileReadyMessage.FileInfo("name", "abcxyz://example.com:8080/path?query1=value1&query2=value2", "hashMapField", "");
+ FileReadyMessage.ArrayOfNamedHashMap arrayOfNamedHashMap = new FileReadyMessage.ArrayOfNamedHashMap("someString", fileInfo);
+ fileData.fileInfo = arrayOfNamedHashMap;
+
+ // Act
+ FileData.Scheme result = fileData.scheme();
+ assertEquals("FTPES", result.name());
+ }
+
+ @Test
+ void testCreateFileData(){
+
+ FileReadyMessage.MessageMetaData metaData = new FileReadyMessage.MessageMetaData(
+ "sourceName", "otherField1", "otherField2", "otherField3", 42, "field5", "field6",
+ "field7", "field8", 123456789L, 987654321L, "field11", "field12"
+ );
+
+ FileReadyMessage fileReadyMessage = FileReadyMessage.builder()
+ .event(
+ FileReadyMessage.Event.builder()
+ .commonEventHeader(metaData)
+ .notificationFields(
+ FileReadyMessage.NotificationFields.builder()
+ .notificationFieldsVersion("1.0")
+ .changeType("Add")
+ .changeIdentifier("Change123")
+ .arrayOfNamedHashMap(
+ Collections.singletonList(
+ FileReadyMessage.ArrayOfNamedHashMap.builder()
+ .name("File1")
+ .hashMap(
+ FileReadyMessage.FileInfo.builder()
+ .fileFormatType("Text")
+ .location("ftp://example.com/files/file.txt")
+ .fileFormatVersion("1.0")
+ .compression("None")
+ .build()
+ )
+ .build()
+ )
+ )
+ .build()
+ )
+ .build()
+ )
+ .build();
+
+ Iterable<FileData> fileDataIterable = FileData.createFileData(fileReadyMessage);
+ FileReadyMessage.MessageMetaData messageMetaData = fileDataIterable.iterator().next().messageMetaData;
+
+ assertEquals("field8", messageMetaData.sourceName);
+ }
+}
+
diff --git a/datafilecollector/src/test/java/org/oran/datafile/model/FilePublishInformationTest.java b/datafilecollector/src/test/java/org/oran/datafile/model/FilePublishInformationTest.java
new file mode 100644
index 0000000..58fe722
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/model/FilePublishInformationTest.java
@@ -0,0 +1,907 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019-2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.model;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import org.junit.jupiter.api.Test;
+
+class FilePublishInformationTest {
+ @Test
+ void testCanEqual() {
+ assertFalse(
+ (new FilePublishInformation("Product Name", "Vendor Name", 1L, "Source Name", 1L, "UTC", "Compression",
+ "File Format Type", "1.0.2", "Name", "42", "s3://bucket-name/object-key")).canEqual("Other"));
+ }
+
+ @Test
+ void testCanEqual2() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertTrue(buildResult.canEqual(buildResult2));
+ }
+
+ @Test
+ void testConstructor() {
+ FilePublishInformation actualFilePublishInformation =
+ new FilePublishInformation("Product Name", "Vendor Name", 1L,
+ "Source Name", 1L, "UTC", "Compression", "File Format Type", "1.0.2", "Name", "42",
+ "s3://bucket-name/object-key");
+
+ assertEquals("Name", actualFilePublishInformation.getName());
+ assertEquals("Vendor Name", actualFilePublishInformation.vendorName);
+ assertEquals("UTC", actualFilePublishInformation.timeZoneOffset);
+ assertEquals(1L, actualFilePublishInformation.startEpochMicrosec);
+ assertEquals("Product Name", actualFilePublishInformation.productName);
+ assertEquals("s3://bucket-name/object-key", actualFilePublishInformation.objectStoreBucket);
+ assertEquals(1L, actualFilePublishInformation.lastEpochMicrosec);
+ assertEquals("1.0.2", actualFilePublishInformation.fileFormatVersion);
+ assertEquals("File Format Type", actualFilePublishInformation.fileFormatType);
+ assertEquals("Compression", actualFilePublishInformation.compression);
+ assertEquals("42", actualFilePublishInformation.changeIdentifier);
+ assertEquals("Source Name", actualFilePublishInformation.getSourceName());
+ }
+
+ @Test
+ void testEquals() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(null, buildResult);
+ }
+ @Test
+ void testEquals2() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals("Different type to FilePublishInformation", buildResult );
+ }
+ @Test
+ void testEquals3() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertEquals(buildResult, buildResult);
+ int expectedHashCodeResult = buildResult.hashCode();
+ assertEquals(expectedHashCodeResult, buildResult.hashCode());
+ }
+ @Test
+ void testEquals4() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertEquals(buildResult, buildResult2);
+ int expectedHashCodeResult = buildResult.hashCode();
+ assertEquals(expectedHashCodeResult, buildResult2.hashCode());
+ }
+ @Test
+ void testEquals5() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("Product Name")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals6() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier(null)
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals7() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Product Name")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals8() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression(null)
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals9() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("Product Name")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals10() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType(null)
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals11() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("Product Name")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals12() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion(null)
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals13() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(3L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals14() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Product Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals15() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name(null)
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals16() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("Product Name")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals17() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket(null)
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals18() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Vendor Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals19() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName(null)
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals20() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Product Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals21() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName(null)
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals22() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(3L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals23() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("Europe/London")
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals24() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset(null)
+ .vendorName("Vendor Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals25() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Product Name")
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testEquals26() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName(null)
+ .build();
+ FilePublishInformation buildResult2 = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ assertNotEquals(buildResult, buildResult2);
+ }
+ @Test
+ void testGetName() {
+ FilePublishInformation buildResult = FilePublishInformation.builder()
+ .changeIdentifier("42")
+ .compression("Compression")
+ .fileFormatType("File Format Type")
+ .fileFormatVersion("1.0.2")
+ .lastEpochMicrosec(1L)
+ .name("Name")
+ .objectStoreBucket("s3://bucket-name/object-key")
+ .productName("Product Name")
+ .sourceName("Source Name")
+ .startEpochMicrosec(1L)
+ .timeZoneOffset("UTC")
+ .vendorName("Vendor Name")
+ .build();
+ String actualName = buildResult.getName();
+ assertEquals("Name", actualName);
+ assertEquals("Source Name", buildResult.getSourceName());
+ }
+}
+
diff --git a/datafilecollector/src/test/java/org/oran/datafile/model/FileReadyMessageTest.java b/datafilecollector/src/test/java/org/oran/datafile/model/FileReadyMessageTest.java
new file mode 100644
index 0000000..29a7236
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/model/FileReadyMessageTest.java
@@ -0,0 +1,43 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019-2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.model;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.jupiter.api.Test;
+
+class FileReadyMessageTest {
+ @Test
+ void testMessageMetaDataProductName() {
+ assertEquals("Event Name",
+ (new FileReadyMessage.MessageMetaData("42", "Priority", "1.0.2", "Reporting Entity Name",
+ 1, "Domain", "Event Name", "1.0.2", "Source Name", 1L, 1L, "UTC", "42")).productName());
+ assertEquals("|", (new FileReadyMessage.MessageMetaData("42", "Priority", "1.0.2", "Reporting Entity Name", 1,
+ "Domain", "_|-", "1.0.2", "Source Name", 1L, 1L, "UTC", "42")).productName());
+ }
+ @Test
+ void testMessageMetaDataVendorName() {
+ assertEquals("Event Name",
+ (new FileReadyMessage.MessageMetaData("42", "Priority", "1.0.2", "Reporting Entity Name", 1, "Domain",
+ "Event Name", "1.0.2", "Source Name", 1L, 1L, "UTC", "42")).vendorName());
+ }
+}
+
diff --git a/datafilecollector/src/test/java/org/oran/datafile/model/FileServerDataTest.java b/datafilecollector/src/test/java/org/oran/datafile/model/FileServerDataTest.java
new file mode 100644
index 0000000..863be8a
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/model/FileServerDataTest.java
@@ -0,0 +1,40 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2019-2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.model;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import java.util.ArrayList;
+import org.junit.jupiter.api.Test;
+
+class FileServerDataTest {
+ @Test
+ void testConstructor() {
+ FileServerData actualFileServerData = new FileServerData("42 Main St", "42", "password", new ArrayList<>(),
+ "Uri Raw Fragment", 8080);
+ assertEquals("FileServerData(serverAddress=42 Main St, userId=42, uriRawFragment=Uri Raw Fragment, port=8080)",
+ actualFileServerData.toString());
+ assertEquals(8080, actualFileServerData.port.intValue());
+ assertTrue(actualFileServerData.queryParameters.isEmpty());
+ }
+}
+
diff --git a/datafilecollector/src/test/java/org/oran/datafile/model/SchemeTest.java b/datafilecollector/src/test/java/org/oran/datafile/model/SchemeTest.java
index 3b42ede..5085798 100644
--- a/datafilecollector/src/test/java/org/oran/datafile/model/SchemeTest.java
+++ b/datafilecollector/src/test/java/org/oran/datafile/model/SchemeTest.java
@@ -26,10 +26,10 @@
import org.oran.datafile.exceptions.DatafileTaskException;
import org.oran.datafile.model.FileData.Scheme;
-public class SchemeTest {
+class SchemeTest {
@Test
- public void shouldReturnSchemeForSupportedProtocol() throws DatafileTaskException {
+ void shouldReturnSchemeForSupportedProtocol() throws DatafileTaskException {
assertEquals(Scheme.FTPES, Scheme.getSchemeFromString("FTPES"));
assertEquals(Scheme.SFTP, Scheme.getSchemeFromString("SFTP"));
assertEquals(Scheme.HTTP, Scheme.getSchemeFromString("HTTP"));
@@ -37,12 +37,12 @@
}
@Test
- public void shouldThrowExceptionForUnsupportedProtocol() {
+ void shouldThrowExceptionForUnsupportedProtocol() {
assertThrows(DatafileTaskException.class, () -> Scheme.getSchemeFromString("FTPS"));
}
@Test
- public void shouldThrowExceptionForInvalidProtocol() {
+ void shouldThrowExceptionForInvalidProtocol() {
assertThrows(DatafileTaskException.class, () -> Scheme.getSchemeFromString("invalid"));
}
}
diff --git a/datafilecollector/src/test/java/org/oran/datafile/oauth2/OAuthBearerTokenJwtTest.java b/datafilecollector/src/test/java/org/oran/datafile/oauth2/OAuthBearerTokenJwtTest.java
new file mode 100644
index 0000000..f8afa64
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/oauth2/OAuthBearerTokenJwtTest.java
@@ -0,0 +1,109 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.oauth2;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.oran.datafile.exceptions.DatafileTaskException;
+import org.springframework.test.context.ContextConfiguration;
+
+@ContextConfiguration(classes = {OAuthBearerTokenJwtTest.class})
+@ExtendWith(MockitoExtension.class)
+class OAuthBearerTokenJwtTest {
+
+ private OAuthBearerTokenJwt token;
+
+ @BeforeEach
+ void setUp() throws DatafileTaskException {
+ String validJwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c"; // Replace with a valid JWT token for testing
+ token = OAuthBearerTokenJwt.create(validJwt);
+ }
+
+ @Test
+ void testCreateValidToken() {
+ assertNotNull(token);
+ }
+
+ @Test
+ void testCreateInvalidToken() {
+ assertThrows(DatafileTaskException.class, () -> OAuthBearerTokenJwt.create("invalid_token"));
+ }
+
+ @Test
+ void testTokenValue() {
+ assertEquals("eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c", token.value());
+ }
+
+ @Test
+ void testTokenScope() {
+ assertEquals(0, token.scope().size());
+ assertFalse(token.scope().contains(""));
+ }
+
+ @Test
+ void testTokenLifetimeMs() {
+ assertEquals(Long.MAX_VALUE, token.lifetimeMs());
+ }
+
+ @Test
+ void testTokenPrincipalName() {
+ assertEquals("1234567890", token.principalName());
+ }
+
+ @Test
+ void testTokenStartTimeMs() {
+ assertEquals(1516239022L, token.startTimeMs());
+ }
+
+ @Test
+ void testCreateTokenFromInvalidPayload() throws DatafileTaskException {
+ // Create a JWT with an invalid payload (missing fields)
+ String invalidPayload = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9";
+ assertThrows(DatafileTaskException.class, () -> OAuthBearerTokenJwt.create(invalidPayload));
+ }
+
+ @Test
+ void testCreateTokenWithValidPayload() throws DatafileTaskException {
+ // Create a JWT with a valid payload
+ String validPayload = "eyJzdWIiOiAiVGVzdCIsICJleHAiOiAxNjM1MTUwMDAwLCAiaWF0IjogMTYzNTA5NTAwMCwgInNjb3BlIjogInNjb3BlX3Rva2VuIiwgImp0aSI6ICJmb28ifQ==";
+ OAuthBearerTokenJwt jwt = OAuthBearerTokenJwt.create("header." + validPayload + ".signature");
+
+ assertNotNull(jwt);
+ assertEquals("header." + validPayload + ".signature", jwt.value());
+ assertEquals(1, jwt.scope().size());
+ assertEquals("scope_token", jwt.scope().iterator().next());
+ assertEquals("Test", jwt.principalName());
+ assertEquals(1635095000, jwt.startTimeMs());
+ }
+
+ @Test
+ void testCreateThrowsExceptionWithInvalidToken() throws DatafileTaskException {
+ String tokenRaw = "your_mocked_token_here";
+ assertThrows(DatafileTaskException.class, () -> OAuthBearerTokenJwt.create(tokenRaw));
+ }
+}
diff --git a/datafilecollector/src/test/java/org/oran/datafile/oauth2/OAuthKafkaAuthenticateLoginCallbackHandlerTest.java b/datafilecollector/src/test/java/org/oran/datafile/oauth2/OAuthKafkaAuthenticateLoginCallbackHandlerTest.java
new file mode 100644
index 0000000..bba9539
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/oauth2/OAuthKafkaAuthenticateLoginCallbackHandlerTest.java
@@ -0,0 +1,128 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.oauth2;
+
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.ArgumentMatchers.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import javax.security.auth.callback.Callback;
+import javax.security.auth.callback.UnsupportedCallbackException;
+import javax.security.auth.login.AppConfigurationEntry;
+import org.apache.kafka.common.security.auth.SaslExtensionsCallback;
+import org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule;
+import org.apache.kafka.common.security.oauthbearer.OAuthBearerTokenCallback;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mockito;
+
+class OAuthKafkaAuthenticateLoginCallbackHandlerTest {
+
+ private OAuthKafkaAuthenticateLoginCallbackHandler callbackHandler;
+
+ @BeforeEach
+ void setUp() {
+ callbackHandler = new OAuthKafkaAuthenticateLoginCallbackHandler();
+ }
+
+ @Test
+ void testConfigureWithValidSaslMechanismAndConfigEntry() {
+ String saslMechanism = OAuthBearerLoginModule.OAUTHBEARER_MECHANISM;
+ List<AppConfigurationEntry> jaasConfigEntries = Collections.singletonList(Mockito.mock(AppConfigurationEntry.class));
+
+ callbackHandler.configure(new HashMap<>(), saslMechanism, jaasConfigEntries);
+
+ assertTrue(callbackHandler.isConfigured());
+ }
+
+ @SuppressWarnings("java:S5778")
+ @Test
+ void testConfigureWithInvalidSaslMechanism() {
+ String invalidSaslMechanism = "InvalidMechanism";
+ List<AppConfigurationEntry> jaasConfigEntries = Collections.singletonList(Mockito.mock(AppConfigurationEntry.class));
+
+ assertThrows(IllegalArgumentException.class, () -> callbackHandler.configure(new HashMap<>(), invalidSaslMechanism, jaasConfigEntries));
+
+ assertFalse(callbackHandler.isConfigured());
+ }
+
+ @SuppressWarnings("java:S5778")
+ @Test
+ void testConfigureWithEmptyJaasConfigEntries() {
+ String saslMechanism = OAuthBearerLoginModule.OAUTHBEARER_MECHANISM;
+ List<AppConfigurationEntry> emptyJaasConfigEntries = Collections.emptyList();
+
+ assertThrows(IllegalArgumentException.class, () -> callbackHandler.configure(new HashMap<>(), saslMechanism, emptyJaasConfigEntries));
+
+ assertFalse(callbackHandler.isConfigured());
+ }
+
+ @Test
+ void testHandleSaslExtensionsCallback() throws IOException, UnsupportedCallbackException {
+ String saslMechanism = OAuthBearerLoginModule.OAUTHBEARER_MECHANISM;
+ List<AppConfigurationEntry> jaasConfigEntries = Collections.singletonList(Mockito.mock(AppConfigurationEntry.class));
+
+ callbackHandler.configure(new HashMap<>(), saslMechanism, jaasConfigEntries);
+ SaslExtensionsCallback callback = mock(SaslExtensionsCallback.class);
+
+ callbackHandler.handle(new Callback[]{callback});
+ verify(callback).extensions(any());
+ }
+
+ @Test
+ void testHandleUnsupportedCallback() {
+ Callback unsupportedCallback = mock(Callback.class);
+ String saslMechanism = OAuthBearerLoginModule.OAUTHBEARER_MECHANISM;
+ List<AppConfigurationEntry> jaasConfigEntries = Collections.singletonList(Mockito.mock(AppConfigurationEntry.class));
+
+ callbackHandler.configure(new HashMap<>(), saslMechanism, jaasConfigEntries);
+ assertThrows(UnsupportedCallbackException.class, () -> callbackHandler.handle(new Callback[]{unsupportedCallback}));
+ }
+
+ @Test
+ void testHandleOAuthBearerTokenCallback() throws IOException, UnsupportedCallbackException {
+
+ String saslMechanism = OAuthBearerLoginModule.OAUTHBEARER_MECHANISM;
+ List<AppConfigurationEntry> jaasConfigEntries = Collections.singletonList(Mockito.mock(AppConfigurationEntry.class));
+ String validJwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c";
+
+ callbackHandler.configure(new HashMap<>(), saslMechanism, jaasConfigEntries);
+
+ OAuthBearerTokenCallback oauthBearerTokenCallback = Mockito.mock(OAuthBearerTokenCallback.class);
+ SecurityContext securityContextMock = Mockito.mock(SecurityContext.class);
+ when(oauthBearerTokenCallback.token()).thenReturn(null); // Ensure the callback has no token initially
+ when(oauthBearerTokenCallback.token()).thenAnswer(invocation -> {
+ return OAuthBearerTokenJwt.create(validJwt);
+ });
+
+ when(securityContextMock.getBearerAuthToken()).thenReturn(validJwt);
+ callbackHandler.handle(new Callback[]{oauthBearerTokenCallback});
+ verify(oauthBearerTokenCallback).token();
+ }
+}
diff --git a/datafilecollector/src/test/java/org/oran/datafile/oauth2/SecurityContextTest.java b/datafilecollector/src/test/java/org/oran/datafile/oauth2/SecurityContextTest.java
new file mode 100644
index 0000000..adacd4c
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/oauth2/SecurityContextTest.java
@@ -0,0 +1,81 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.oauth2;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import java.nio.file.Path;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.MockitoAnnotations;
+import org.mockito.junit.jupiter.MockitoExtension;
+
+@ExtendWith(MockitoExtension.class)
+class SecurityContextTest {
+
+ @BeforeEach
+ void setUp() {
+ MockitoAnnotations.initMocks(this);
+ }
+
+ @Test
+ void testConstructorWithAuthTokenFilename() {
+ SecurityContext securityContext = new SecurityContext("auth-token-file.txt");
+ assertNotNull(securityContext.getAuthTokenFilePath());
+ assertEquals(Path.of("auth-token-file.txt"), securityContext.getAuthTokenFilePath());
+ }
+
+ @Test
+ void testConstructorWithoutAuthTokenFilename() {
+ SecurityContext securityContext = new SecurityContext("");
+ assertNull(securityContext.getAuthTokenFilePath());
+ }
+
+ @Test
+ void testIsConfigured() {
+ SecurityContext securityContext = new SecurityContext("auth-token-file.txt");
+ assertTrue(securityContext.isConfigured());
+ }
+
+ @Test
+ void testIsNotConfigured() {
+ SecurityContext securityContext = new SecurityContext("");
+ assertFalse(securityContext.isConfigured());
+ }
+
+ @Test
+ void testGetBearerAuthToken() {
+ assertEquals("", SecurityContext.getInstance().getBearerAuthToken());
+ assertEquals("", (new SecurityContext("foo.txt")).getBearerAuthToken());
+ }
+
+ @Test
+ void testGetBearerAuthTokenWhenNotConfigured() {
+ SecurityContext securityContext = new SecurityContext("");
+ assertEquals("", securityContext.getBearerAuthToken());
+ }
+}
+
diff --git a/datafilecollector/src/test/java/org/oran/datafile/tasks/CollectAndReportFilesTest.java b/datafilecollector/src/test/java/org/oran/datafile/tasks/CollectAndReportFilesTest.java
new file mode 100644
index 0000000..9910382
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/tasks/CollectAndReportFilesTest.java
@@ -0,0 +1,94 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.tasks;
+
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertSame;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.when;
+
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.oran.datafile.configuration.AppConfig;
+import org.oran.datafile.model.Counters;
+import org.oran.datafile.model.FilePublishInformation;
+import org.oran.datafile.oauth2.SecurityContext;
+import org.springframework.test.context.ContextConfiguration;
+import reactor.core.publisher.DirectProcessor;
+import reactor.kafka.sender.SenderResult;
+
+@ContextConfiguration(classes = {CollectAndReportFiles.class})
+@ExtendWith(MockitoExtension.class)
+class CollectAndReportFilesTest {
+ @Mock
+ private AppConfig appConfig;
+
+ @Mock
+ private CollectAndReportFiles collectAndReportFilesMock;
+
+ @Mock
+ private SecurityContext securityContext;
+
+ @Test
+ void testStart() {
+ doNothing().when(collectAndReportFilesMock).start();
+ collectAndReportFilesMock.start();
+ verify(collectAndReportFilesMock).start();
+ }
+ @Test
+ void testCreateMainTask() {
+ DirectProcessor<FilePublishInformation> createResult = DirectProcessor.create();
+ when(collectAndReportFilesMock.createMainTask()).thenReturn(createResult);
+ assertSame(createResult, collectAndReportFilesMock.createMainTask());
+ verify(collectAndReportFilesMock).createMainTask();
+ }
+ @Test
+ void testSendDataToStream() {
+ DirectProcessor<SenderResult<Integer>> createResult = DirectProcessor.create();
+ when(
+ collectAndReportFilesMock.sendDataToStream(Mockito.<String>any(), Mockito.<String>any(), Mockito.<String>any()))
+ .thenReturn(createResult);
+ assertSame(createResult, collectAndReportFilesMock.sendDataToStream("Topic", "Source Name", "42"));
+ verify(collectAndReportFilesMock).sendDataToStream(Mockito.<String>any(), Mockito.<String>any(),
+ Mockito.<String>any());
+ }
+ @Test
+ void testCreateFileCollector() {
+ FileCollector fileCollector = new FileCollector(securityContext, appConfig, new Counters());
+
+ when(collectAndReportFilesMock.createFileCollector()).thenReturn(fileCollector);
+ assertSame(fileCollector, collectAndReportFilesMock.createFileCollector());
+ verify(collectAndReportFilesMock).createFileCollector();
+ }
+ @Test
+ void testParseReceivedFileReadyMessage() {
+ when(collectAndReportFilesMock.parseReceivedFileReadyMessage(Mockito.<KafkaTopicListener.DataFromTopic>any()))
+ .thenReturn(null);
+ assertNull(
+ collectAndReportFilesMock.parseReceivedFileReadyMessage(new KafkaTopicListener.DataFromTopic("Key", "42")));
+ verify(collectAndReportFilesMock).parseReceivedFileReadyMessage(Mockito.<KafkaTopicListener.DataFromTopic>any());
+ }
+}
+
diff --git a/datafilecollector/src/test/java/org/oran/datafile/tasks/FileCollectorTest.java b/datafilecollector/src/test/java/org/oran/datafile/tasks/FileCollectorTest.java
index 118e9c7..432e045 100644
--- a/datafilecollector/src/test/java/org/oran/datafile/tasks/FileCollectorTest.java
+++ b/datafilecollector/src/test/java/org/oran/datafile/tasks/FileCollectorTest.java
@@ -52,7 +52,7 @@
import org.oran.datafile.oauth2.SecurityContext;
import reactor.test.StepVerifier;
-public class FileCollectorTest {
+class FileCollectorTest {
final static String DATAFILE_TMPDIR = "/tmp/onap_datafile/";
private static final String PRODUCT_NAME = "NrRadio";
@@ -190,7 +190,7 @@
}
@Test
- public void whenFtpesFile_returnCorrectResponse() throws Exception {
+ void whenFtpesFile_returnCorrectResponse() throws Exception {
FileCollector collectorUndetTest = spy(new FileCollector(securityContext, appConfigMock, counters));
doReturn(ftpesClientMock).when(collectorUndetTest).createFtpesClient(any());
@@ -214,7 +214,7 @@
}
@Test
- public void whenSftpFile_returnCorrectResponse() throws Exception {
+ void whenSftpFile_returnCorrectResponse() throws Exception {
FileCollector collectorUndetTest = spy(new FileCollector(securityContext, appConfigMock, counters));
doReturn(sftpClientMock).when(collectorUndetTest).createSftpClient(any());
@@ -243,7 +243,7 @@
}
@Test
- public void whenHttpFile_returnCorrectResponse() throws Exception {
+ void whenHttpFile_returnCorrectResponse() throws Exception {
FileCollector collectorUndetTest = spy(new FileCollector(securityContext, appConfigMock, counters));
doReturn(dfcHttpClientMock).when(collectorUndetTest).createHttpClient(any());
@@ -275,7 +275,7 @@
}
@Test
- public void whenHttpsFile_returnCorrectResponse() throws Exception {
+ void whenHttpsFile_returnCorrectResponse() throws Exception {
FileCollector collectorUndetTest = spy(new FileCollector(securityContext, appConfigMock, counters));
doReturn(dfcHttpsClientMock).when(collectorUndetTest).createHttpsClient(any());
@@ -307,7 +307,7 @@
}
@Test
- public void whenFtpesFileAlwaysFail_retryAndFail() throws Exception {
+ void whenFtpesFileAlwaysFail_retryAndFail() throws Exception {
FileCollector collectorUndetTest = spy(new FileCollector(securityContext, appConfigMock, counters));
doReturn(ftpesClientMock).when(collectorUndetTest).createFtpesClient(any());
@@ -327,7 +327,7 @@
}
@Test
- public void whenFtpesFileAlwaysFail_failWithoutRetry() throws Exception {
+ void whenFtpesFileAlwaysFail_failWithoutRetry() throws Exception {
FileCollector collectorUndetTest = spy(new FileCollector(securityContext, appConfigMock, counters));
doReturn(ftpesClientMock).when(collectorUndetTest).createFtpesClient(any());
@@ -347,7 +347,7 @@
}
@Test
- public void whenFtpesFileFailOnce_retryAndReturnCorrectResponse() throws Exception {
+ void whenFtpesFileFailOnce_retryAndReturnCorrectResponse() throws Exception {
FileCollector collectorUndetTest = spy(new FileCollector(securityContext, appConfigMock, counters));
doReturn(ftpesClientMock).when(collectorUndetTest).createFtpesClient(any());
doThrow(new DatafileTaskException("Unable to collect file.")).doNothing().when(ftpesClientMock)
diff --git a/datafilecollector/src/test/java/org/oran/datafile/tasks/KafkaTopicListenerTest.java b/datafilecollector/src/test/java/org/oran/datafile/tasks/KafkaTopicListenerTest.java
new file mode 100644
index 0000000..00f4f43
--- /dev/null
+++ b/datafilecollector/src/test/java/org/oran/datafile/tasks/KafkaTopicListenerTest.java
@@ -0,0 +1,70 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (C) 2023 Nordix Foundation.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ * ============LICENSE_END=========================================================
+ */
+
+package org.oran.datafile.tasks;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.util.Collections;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.OffsetResetStrategy;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+import org.oran.datafile.configuration.AppConfig;
+import reactor.core.publisher.Flux;
+import reactor.kafka.receiver.KafkaReceiver;
+import reactor.kafka.receiver.ReceiverOptions;
+
+class KafkaTopicListenerTest {
+
+ @Mock
+ private AppConfig appConfig;
+
+ @Mock
+ private KafkaTopicListener kafkaTopicListener;
+
+ @BeforeEach
+ void setUp() {
+ MockitoAnnotations.initMocks(this);
+ when(appConfig.getInputTopic()).thenReturn("testTopic");
+ when(appConfig.getKafkaClientId()).thenReturn("testClientId");
+ when(appConfig.getKafkaBootStrapServers()).thenReturn("localhost:9092");
+ kafkaTopicListener = new KafkaTopicListener(appConfig);
+ }
+
+ @Test
+ void testStartReceiveFromTopic() {
+ KafkaReceiver mockKafkaReceiver = mock(KafkaReceiver.class);
+
+ when(mockKafkaReceiver.receive()).thenReturn(Flux.just(new KafkaTopicListener.DataFromTopic("key", "value")));
+
+ ReceiverOptions<String, String> receiverOptions = mock(ReceiverOptions.class);
+ when(receiverOptions.subscription(Collections.singleton("testTopic"))).thenReturn(receiverOptions);
+ when(receiverOptions.consumerProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, OffsetResetStrategy.EARLIEST.name()))
+ .thenReturn(receiverOptions);
+
+ assertEquals("testTopic", appConfig.getInputTopic());
+
+ }
+}
\ No newline at end of file
diff --git a/datafilecollector/src/test/resources/org/oran/datafile/datastore/file.txt b/datafilecollector/src/test/resources/org/oran/datafile/datastore/file.txt
new file mode 100644
index 0000000..c95df2d
--- /dev/null
+++ b/datafilecollector/src/test/resources/org/oran/datafile/datastore/file.txt
@@ -0,0 +1 @@
+Hi, How are you?
\ No newline at end of file