From 14c292c626cd8d18b5997a90cfbb865befb5f6d2 Mon Sep 17 00:00:00 2001 From: Lin Liu <141371752+linliu-code@users.noreply.github.com> Date: Tue, 4 Feb 2025 16:23:01 -0800 Subject: [PATCH] [MINOR] Fix code issues reported by SonarQube (#12767) Co-authored-by: Y Ethan Guo --- .../HoodieBulkInsertInternalWriterTestBase.java | 4 ++-- .../TestHoodieBulkInsertDataInternalWriter.java | 14 +++----------- .../TestHoodieDataSourceInternalBatchWrite.java | 6 +++--- 3 files changed, 8 insertions(+), 16 deletions(-) diff --git a/hudi-spark-datasource/hudi-spark3.5.x/src/test/java/org/apache/hudi/internal/HoodieBulkInsertInternalWriterTestBase.java b/hudi-spark-datasource/hudi-spark3.5.x/src/test/java/org/apache/hudi/internal/HoodieBulkInsertInternalWriterTestBase.java index ea7e6e65e7cbc..5de28ae60c5f2 100644 --- a/hudi-spark-datasource/hudi-spark3.5.x/src/test/java/org/apache/hudi/internal/HoodieBulkInsertInternalWriterTestBase.java +++ b/hudi-spark-datasource/hudi-spark3.5.x/src/test/java/org/apache/hudi/internal/HoodieBulkInsertInternalWriterTestBase.java @@ -121,8 +121,8 @@ protected void assertWriteStatuses(List writeStatuses, int batches, assertEquals(writeStatus.getTotalRecords(), sizeMap.get(HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter % 3])); } assertNull(writeStatus.getGlobalError()); - assertEquals(writeStatus.getTotalErrorRecords(), 0); - assertEquals(writeStatus.getTotalErrorRecords(), 0); + assertEquals(0, writeStatus.getTotalErrorRecords()); + assertEquals(0, writeStatus.getTotalErrorRecords()); assertFalse(writeStatus.hasErrors()); assertNotNull(writeStatus.getFileId()); String fileId = writeStatus.getFileId(); diff --git a/hudi-spark-datasource/hudi-spark3.5.x/src/test/java/org/apache/hudi/spark3/internal/TestHoodieBulkInsertDataInternalWriter.java b/hudi-spark-datasource/hudi-spark3.5.x/src/test/java/org/apache/hudi/spark3/internal/TestHoodieBulkInsertDataInternalWriter.java index 96b06937504f1..945a34d413ff0 100644 --- a/hudi-spark-datasource/hudi-spark3.5.x/src/test/java/org/apache/hudi/spark3/internal/TestHoodieBulkInsertDataInternalWriter.java +++ b/hudi-spark-datasource/hudi-spark3.5.x/src/test/java/org/apache/hudi/spark3/internal/TestHoodieBulkInsertDataInternalWriter.java @@ -47,7 +47,7 @@ /** * Unit tests {@link HoodieBulkInsertDataInternalWriter}. */ -public class TestHoodieBulkInsertDataInternalWriter extends +class TestHoodieBulkInsertDataInternalWriter extends HoodieBulkInsertInternalWriterTestBase { private static Stream configParams() { @@ -60,17 +60,9 @@ private static Stream configParams() { return Stream.of(data).map(Arguments::of); } - private static Stream bulkInsertTypeParams() { - Object[][] data = new Object[][] { - {true}, - {false} - }; - return Stream.of(data).map(Arguments::of); - } - @ParameterizedTest @MethodSource("configParams") - public void testDataInternalWriter(boolean sorted, boolean populateMetaFields) throws Exception { + void testDataInternalWriter(boolean sorted, boolean populateMetaFields) throws Exception { // init config and table HoodieWriteConfig cfg = getWriteConfig(populateMetaFields); HoodieTable table = HoodieSparkTable.create(cfg, context, metaClient); @@ -116,7 +108,7 @@ public void testDataInternalWriter(boolean sorted, boolean populateMetaFields) t * to throw Global Error. Verify global error is set appropriately and only first batch of records are written to disk. */ @Test - public void testGlobalFailure() throws Exception { + void testGlobalFailure() throws Exception { // init config and table HoodieWriteConfig cfg = getWriteConfig(true); HoodieTable table = HoodieSparkTable.create(cfg, context, metaClient); diff --git a/hudi-spark-datasource/hudi-spark3.5.x/src/test/java/org/apache/hudi/spark3/internal/TestHoodieDataSourceInternalBatchWrite.java b/hudi-spark-datasource/hudi-spark3.5.x/src/test/java/org/apache/hudi/spark3/internal/TestHoodieDataSourceInternalBatchWrite.java index b58decad8decc..9d58085347695 100644 --- a/hudi-spark-datasource/hudi-spark3.5.x/src/test/java/org/apache/hudi/spark3/internal/TestHoodieDataSourceInternalBatchWrite.java +++ b/hudi-spark-datasource/hudi-spark3.5.x/src/test/java/org/apache/hudi/spark3/internal/TestHoodieDataSourceInternalBatchWrite.java @@ -57,7 +57,7 @@ /** * Unit tests {@link HoodieDataSourceInternalBatchWrite}. */ -public class TestHoodieDataSourceInternalBatchWrite extends +class TestHoodieDataSourceInternalBatchWrite extends HoodieBulkInsertInternalWriterTestBase { private static Stream bulkInsertTypeParams() { @@ -129,7 +129,7 @@ private void testDataSourceWriterInternal(Map extraMetadata, Map } @Test - public void testDataSourceWriterExtraCommitMetadata() throws Exception { + void testDataSourceWriterExtraCommitMetadata() throws Exception { String commitExtraMetaPrefix = "commit_extra_meta_"; Map extraMeta = new HashMap<>(); extraMeta.put(DataSourceWriteOptions.COMMIT_METADATA_KEYPREFIX().key(), commitExtraMetaPrefix); @@ -146,7 +146,7 @@ public void testDataSourceWriterExtraCommitMetadata() throws Exception { } @Test - public void testDataSourceWriterEmptyExtraCommitMetadata() throws Exception { + void testDataSourceWriterEmptyExtraCommitMetadata() throws Exception { String commitExtraMetaPrefix = "commit_extra_meta_"; Map extraMeta = new HashMap<>(); extraMeta.put(DataSourceWriteOptions.COMMIT_METADATA_KEYPREFIX().key(), commitExtraMetaPrefix);