Skip to content

Commit

Permalink
Remove redundant aray initializers
Browse files Browse the repository at this point in the history
  • Loading branch information
elharo committed Jun 4, 2024
1 parent 07c1b33 commit cf90097
Show file tree
Hide file tree
Showing 14 changed files with 22 additions and 22 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
public class BooleanLexicoder
implements Lexicoder<Boolean>
{
public static final byte[] TRUE = new byte[] {1};
public static final byte[] FALSE = new byte[] {0};
public static final byte[] TRUE = {1};
public static final byte[] FALSE = {0};

@Override
public byte[] encode(Boolean v)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -349,7 +349,7 @@ public Block getSingleValueBlock(int position)
newBlocks[i] = getRawFieldBlocks()[i].copyRegion(startFieldBlockOffset, fieldBlockLength);
}
boolean[] newRowIsNull = isNull(position) ? new boolean[] {true} : null;
int[] newOffsets = new int[] {0, fieldBlockLength};
int[] newOffsets = {0, fieldBlockLength};

return createRowBlockInternal(0, 1, newRowIsNull, newOffsets, newBlocks);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,8 @@ public HiveFileInfo next()
throws IOException
{
FileStatus fileStatus = hoodieBaseFileIterator.next().getFileStatus();
String[] name = new String[] {"localhost:" + DFS_DATANODE_DEFAULT_PORT};
String[] host = new String[] {"localhost"};
String[] name = {"localhost:" + DFS_DATANODE_DEFAULT_PORT};
String[] host = {"localhost"};
LocatedFileStatus hoodieFileStatus = new LocatedFileStatus(fileStatus,
new BlockLocation[] {new BlockLocation(name, host, 0L, fileStatus.getLen())});
return createHiveFileInfo(hoodieFileStatus, Optional.empty());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,8 +62,8 @@ public class ManifestPartitionLoader
extends PartitionLoader
{
// The following constants are referred from FileSystem.getFileBlockLocations in Hadoop
private static final String[] BLOCK_LOCATION_NAMES = new String[] {"localhost:50010"};
private static final String[] BLOCK_LOCATION_HOSTS = new String[] {"localhost"};
private static final String[] BLOCK_LOCATION_NAMES = {"localhost:50010"};
private static final String[] BLOCK_LOCATION_HOSTS = {"localhost"};

private final Table table;
private final Optional<Domain> pathDomain;
Expand Down Expand Up @@ -122,7 +122,7 @@ public ListenableFuture<?> loadPartition(HivePartitionMetadata partition, HiveSp
Path filePath = new Path(path, fileNames.get(i));
FileStatus fileStatus = new FileStatus(fileSizes.get(i), false, 1, getMaxSplitSize(session).toBytes(), 0, filePath);
try {
BlockLocation[] locations = new BlockLocation[] {new BlockLocation(BLOCK_LOCATION_NAMES, BLOCK_LOCATION_HOSTS, 0, fileSizes.get(i))};
BlockLocation[] locations = {new BlockLocation(BLOCK_LOCATION_NAMES, BLOCK_LOCATION_HOSTS, 0, fileSizes.get(i))};

// It is safe to set extraFileContext as empty because downstream code always checks if its present before proceeding.
fileListBuilder.add(HiveFileInfo.createHiveFileInfo(new LocatedFileStatus(fileStatus, locations), Optional.empty()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -289,7 +289,7 @@ public TestData createTestData(FileFormat format)
PageBuilder pageBuilder = new PageBuilder(ImmutableList.of(type));
ImmutableList.Builder<Page> pages = ImmutableList.builder();

int[] keys = new int[] {1, 2, 3, 4, 5};
int[] keys = {1, 2, 3, 4, 5};

long dataSize = 0;
while (dataSize < MIN_DATA_SIZE) {
Expand Down Expand Up @@ -363,7 +363,7 @@ public TestData createTestData(FileFormat format)
PageBuilder pageBuilder = new PageBuilder(ImmutableList.of(type));
ImmutableList.Builder<Page> pages = ImmutableList.builder();

int[] keys = new int[] {1, 2, 3, 4, 5};
int[] keys = {1, 2, 3, 4, 5};

long dataSize = 0;
while (dataSize < MIN_DATA_SIZE) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

public class TestRangePartitionSerialization
{
private String[] testInputs = new String[] {
private String[] testInputs = {
"{\"lower\":1,\"upper\":null}",
"{\"lower\":12345678901234567890,\"upper\":1.234567890123457E-13}",
"{\"lower\":\"abc\",\"upper\":\"abf\"}",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ public final class HashCollisionsEstimator
{
private static final int NUMBER_OF_ESTIMATES = 500;
private static final int NUMBER_OF_VALUES = 10000;
private static final double[] COLLISION_ESTIMATES = new double[] {
private static final double[] COLLISION_ESTIMATES = {
0.0, 9.77, 20.024, 30.252, 40.554, 50.588, 61.114, 71.45, 81.688, 92.836, 101.336, 112.032, 123.372, 133.756, 145.64, 154.32, 164.952, 174.632, 187.62, 196.424,
208.184, 219.716, 230.614, 242.264, 252.744, 263.926, 274.3, 285.274, 296.816, 307.424, 318.646, 329.938, 341.668, 353.956, 363.388, 376.644, 386.668, 399.914, 410.468,
422.378, 436.078, 447.45, 456.398, 469.622, 480.428, 494.58, 506.178, 518.132, 532.714, 541.288, 558.324, 566.92, 580.938, 592.824, 604.56, 617.244, 630.956, 642.238,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
public class TestArrayBlock
extends AbstractTestBlock
{
private static final int[] ARRAY_SIZES = new int[] {16, 0, 13, 1, 2, 11, 4, 7};
private static final int[] ARRAY_SIZES = {16, 0, 13, 1, 2, 11, 4, 7};

@Test
public void testWithFixedWidthBlock()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
public class TestGroupByHash
{
private static final int MAX_GROUP_ID = 500;
private static final int[] CONTAINS_CHANNELS = new int[] {0};
private static final int[] CONTAINS_CHANNELS = {0};
private static final Session TEST_SESSION = TestingSession.testSessionBuilder().build();
private static final JoinCompiler JOIN_COMPILER = new JoinCompiler(MetadataManager.createTestMetadataManager());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ public void testInsertIntoSpecialPartitionName()

// For special character in partition name, without correct handling, it would throw errors like 'Invalid partition spec: nationkey=A/B'
// In this test, verify those partition names can be successfully created
String[] specialCharacters = new String[] {"\"", "#", "%", "''", "*", "/", ":", "=", "?", "\\", "\\x7F", "{", "[", "]", "^"}; // escape single quote for sql
String[] specialCharacters = {"\"", "#", "%", "''", "*", "/", ":", "=", "?", "\\", "\\x7F", "{", "[", "]", "^"}; // escape single quote for sql
for (String specialCharacter : specialCharacters) {
getQueryRunner().execute(writeSession, String.format("INSERT INTO %s VALUES ('name', 'A%sB')", tmpTableName, specialCharacter));
assertQuery(String.format("SELECT nationkey FROM %s", tmpTableName), String.format("VALUES('A%sB')", specialCharacter));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ public class BenchmarkBytesValues
{
// NOT IN ("apple", "grape", "orange")
// "abc", "apple", "banana", "grape", "orange", "peach"
private final byte[][] testWords = new byte[][] {"abc".getBytes(), "apple".getBytes(), "banana".getBytes(), "grape".getBytes(), "orange".getBytes(), "peach".getBytes()};
private final byte[][] testWords = {"abc".getBytes(), "apple".getBytes(), "banana".getBytes(), "grape".getBytes(), "orange".getBytes(), "peach".getBytes()};

@Benchmark
public int lookupInExclusive(BenchmarkBytesValues.BenchmarkData data)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ public void testBufferReuse(long[] buffer, int items, boolean reused)
throws IOException
{
NodeId nodeId = new NodeId(1, 0);
long[] data = new long[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
long[] data = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16};
TestingHiveOrcAggregatedMemoryContext aggregatedMemoryContext = new TestingHiveOrcAggregatedMemoryContext();
LongDictionaryProvider dictionaryProvider = new LongDictionaryProvider(createLongDictionaryStreamSources(ImmutableMap.of(nodeId, data), aggregatedMemoryContext));
StreamId streamId = nodeId.toDictionaryDataStreamId();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
public class TextRcFileEncoding
implements RcFileEncoding
{
public static final byte[] DEFAULT_SEPARATORS = new byte[] {
public static final byte[] DEFAULT_SEPARATORS = {
1, // Start of Heading
2, // Start of text
3, // End of Text
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6172,7 +6172,7 @@ public void testDefaultSamplingPercent()
@Test
public void testKeyBasedSampling()
{
String[] queries = new String[] {
String[] queries = {
"select count(1) from orders join lineitem using(orderkey)",
"select count(1) from (select custkey, max(orderkey) from orders group by custkey)",
"select count_if(m >= 1) from (select max(orderkey) over(partition by custkey) m from orders)",
Expand All @@ -6182,7 +6182,7 @@ public void testKeyBasedSampling()
"select count(1) from (select distinct orderkey, custkey from orders)",
};

int[] unsampledResults = new int[] {60175, 1000, 15000, 5408941, 60175, 9256, 15000};
int[] unsampledResults = {60175, 1000, 15000, 5408941, 60175, 9256, 15000};
for (int i = 0; i < queries.length; i++) {
assertQuery(queries[i], "select " + unsampledResults[i]);
}
Expand All @@ -6192,7 +6192,7 @@ public void testKeyBasedSampling()
.setSystemProperty(KEY_BASED_SAMPLING_PERCENTAGE, "0.2")
.build();

int[] sampled20PercentResults = new int[] {37170, 616, 9189, 5408941, 37170, 5721, 9278};
int[] sampled20PercentResults = {37170, 616, 9189, 5408941, 37170, 5721, 9278};
for (int i = 0; i < queries.length; i++) {
assertQuery(sessionWithKeyBasedSampling, queries[i], "select " + sampled20PercentResults[i]);
}
Expand All @@ -6202,7 +6202,7 @@ public void testKeyBasedSampling()
.setSystemProperty(KEY_BASED_SAMPLING_PERCENTAGE, "0.1")
.build();

int[] sampled10PercentResults = new int[] {33649, 557, 8377, 4644937, 33649, 5098, 8397};
int[] sampled10PercentResults = {33649, 557, 8377, 4644937, 33649, 5098, 8397};
for (int i = 0; i < queries.length; i++) {
assertQuery(sessionWithKeyBasedSampling, queries[i], "select " + sampled10PercentResults[i]);
}
Expand Down

0 comments on commit cf90097

Please sign in to comment.