Skip to content

Commit

Permalink
[MINOR] Fix code issues reported by SonarQube (#12767)
Browse files Browse the repository at this point in the history
Co-authored-by: Y Ethan Guo <ethan@onehouse.ai>
  • Loading branch information
linliu-code and yihua authored Feb 5, 2025
1 parent 995508d commit 14c292c
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -121,8 +121,8 @@ protected void assertWriteStatuses(List<WriteStatus> writeStatuses, int batches,
assertEquals(writeStatus.getTotalRecords(), sizeMap.get(HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[counter % 3]));
}
assertNull(writeStatus.getGlobalError());
assertEquals(writeStatus.getTotalErrorRecords(), 0);
assertEquals(writeStatus.getTotalErrorRecords(), 0);
assertEquals(0, writeStatus.getTotalErrorRecords());
assertEquals(0, writeStatus.getTotalErrorRecords());
assertFalse(writeStatus.hasErrors());
assertNotNull(writeStatus.getFileId());
String fileId = writeStatus.getFileId();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
/**
* Unit tests {@link HoodieBulkInsertDataInternalWriter}.
*/
public class TestHoodieBulkInsertDataInternalWriter extends
class TestHoodieBulkInsertDataInternalWriter extends
HoodieBulkInsertInternalWriterTestBase {

private static Stream<Arguments> configParams() {
Expand All @@ -60,17 +60,9 @@ private static Stream<Arguments> configParams() {
return Stream.of(data).map(Arguments::of);
}

private static Stream<Arguments> bulkInsertTypeParams() {
Object[][] data = new Object[][] {
{true},
{false}
};
return Stream.of(data).map(Arguments::of);
}

@ParameterizedTest
@MethodSource("configParams")
public void testDataInternalWriter(boolean sorted, boolean populateMetaFields) throws Exception {
void testDataInternalWriter(boolean sorted, boolean populateMetaFields) throws Exception {
// init config and table
HoodieWriteConfig cfg = getWriteConfig(populateMetaFields);
HoodieTable table = HoodieSparkTable.create(cfg, context, metaClient);
Expand Down Expand Up @@ -116,7 +108,7 @@ public void testDataInternalWriter(boolean sorted, boolean populateMetaFields) t
* to throw Global Error. Verify global error is set appropriately and only first batch of records are written to disk.
*/
@Test
public void testGlobalFailure() throws Exception {
void testGlobalFailure() throws Exception {
// init config and table
HoodieWriteConfig cfg = getWriteConfig(true);
HoodieTable table = HoodieSparkTable.create(cfg, context, metaClient);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@
/**
* Unit tests {@link HoodieDataSourceInternalBatchWrite}.
*/
public class TestHoodieDataSourceInternalBatchWrite extends
class TestHoodieDataSourceInternalBatchWrite extends
HoodieBulkInsertInternalWriterTestBase {

private static Stream<Arguments> bulkInsertTypeParams() {
Expand Down Expand Up @@ -129,7 +129,7 @@ private void testDataSourceWriterInternal(Map<String, String> extraMetadata, Map
}

@Test
public void testDataSourceWriterExtraCommitMetadata() throws Exception {
void testDataSourceWriterExtraCommitMetadata() throws Exception {
String commitExtraMetaPrefix = "commit_extra_meta_";
Map<String, String> extraMeta = new HashMap<>();
extraMeta.put(DataSourceWriteOptions.COMMIT_METADATA_KEYPREFIX().key(), commitExtraMetaPrefix);
Expand All @@ -146,7 +146,7 @@ public void testDataSourceWriterExtraCommitMetadata() throws Exception {
}

@Test
public void testDataSourceWriterEmptyExtraCommitMetadata() throws Exception {
void testDataSourceWriterEmptyExtraCommitMetadata() throws Exception {
String commitExtraMetaPrefix = "commit_extra_meta_";
Map<String, String> extraMeta = new HashMap<>();
extraMeta.put(DataSourceWriteOptions.COMMIT_METADATA_KEYPREFIX().key(), commitExtraMetaPrefix);
Expand Down

0 comments on commit 14c292c

Please sign in to comment.