From a8152c95462ef1e6391ded31330da2213f83ed22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Isma=C3=ABl=20Mej=C3=ADa?= Date: Sat, 11 Apr 2026 19:31:46 +0000 Subject: [PATCH] GH-3499: Cache hashCode() for non-reused Binary instances PLAIN_DICTIONARY encoding of BINARY columns repeatedly hashes Binary keys during dictionary map lookups, but the existing Binary.hashCode() implementations (in ByteArraySliceBackedBinary, ByteArrayBackedBinary, and ByteBufferBackedBinary) recompute the hash byte-by-byte on every call. For columns with many repeated values this is the dominant cost of encodeDictionary -- we observed up to 73x slowdown vs. the cached version on the existing JMH benchmark. Cache the hash code in a single int field on Binary. Reused Binary instances (those whose backing array can be mutated by the producer between calls) do not cache, preserving the existing mutable-buffer semantics. Thread safety follows the java.lang.String.hashCode() idiom: the cache is a single int field with sentinel value 0 meaning "not yet computed". Two threads racing on the first hashCode() call may both compute and write the same deterministic value, which is benign. A Binary whose true hash equals 0 is recomputed on every call (acceptably rare and still correct). No volatile or synchronization is needed; both the field load and the field store are atomic per JLS, and the value is deterministic given the immutable byte content. Implementation notes: - The cache field is package-private (not private) so the three nested Binary subclasses can read it directly in their hashCode() hot path, avoiding an extra method-call layer that would otherwise be needed since inherited private fields are not accessible from nested subclasses. - A package-private cacheHashCode(int) helper centralises the isBackingBytesReused check on the slow path. - New tests in TestBinary cover (a) cached-and-stable hashCode for the three constant Binary impls, and (b) reused Binary not returning a stale hash after the backing buffer is replaced. Benchmark (BinaryEncodingBenchmark.encodeDictionary, 100k BINARY values per invocation, JMH -wi 5 -i 10 -f 3, 30 samples per row): Param Before (ops/s) After (ops/s) Improvement LOW / 10 13,170,110 20,203,480 +53% (1.53x) LOW / 100 2,955,460 18,048,610 +511% (6.11x) LOW / 1000 300,693 21,933,470 +7193% (72.9x) HIGH / 10 847,657 1,336,238 +58% (1.58x) HIGH / 100 418,327 1,323,284 +216% (3.16x) HIGH / 1000 72,553 1,296,679 +1687% (17.9x) The relative gain grows with string length because the per-value hash cost (byte-loop length) grows linearly while the cached lookup is O(1). LOW cardinality benefits even more because each unique key is hashed many more times (once per insertion check across the 100k values). Negative control: BinaryEncodingBenchmark.encodePlain (which writes Binary without dictionary lookups, so does not exercise hashCode) is unchanged within +/- 2.5% across all parameter combinations. Allocation rate per operation is identical between baseline and optimized (7.36 B/op for LOW/10, etc.), confirming the speedup comes from CPU saved on hashing rather than reduced allocations. All 575 parquet-column tests pass (was 573; +2 new tests for the cache). --- .../org/apache/parquet/io/api/Binary.java | 47 +++++++++++++++++-- .../org/apache/parquet/io/api/TestBinary.java | 45 ++++++++++++++++++ 2 files changed, 88 insertions(+), 4 deletions(-) diff --git a/parquet-column/src/main/java/org/apache/parquet/io/api/Binary.java b/parquet-column/src/main/java/org/apache/parquet/io/api/Binary.java index e37ee12483..5c7600a43f 100644 --- a/parquet-column/src/main/java/org/apache/parquet/io/api/Binary.java +++ b/parquet-column/src/main/java/org/apache/parquet/io/api/Binary.java @@ -37,6 +37,18 @@ public abstract class Binary implements Comparable, Serializable { protected boolean isBackingBytesReused; + /** + * Cached hash code for non-reused (immutable) Binary instances. + *

The sentinel value {@code 0} means "not yet computed". This follows the + * {@link String#hashCode()} idiom: races between concurrent first calls are benign + * because the computation is deterministic, and a hash that genuinely equals {@code 0} + * will simply be recomputed on every call (acceptably rare). Reused instances never + * cache (their backing bytes can mutate after construction). + *

Package-private (rather than private) so subclasses can read it directly without + * an extra method call on the {@link #hashCode()} hot path. + */ + transient int cachedHashCode; + // this isn't really something others should extend private Binary() {} @@ -101,6 +113,18 @@ public boolean equals(Object obj) { return false; } + /** + * Caches {@code hashCode} for non-reused instances and returns it. The cache uses + * a single int field with sentinel {@code 0} to remain race-safe without volatile. + * If the computed hash is {@code 0}, no caching occurs and the next call recomputes. + */ + final int cacheHashCode(int hashCode) { + if (!isBackingBytesReused) { + cachedHashCode = hashCode; + } + return hashCode; + } + @Override public String toString() { return "Binary{" + length() @@ -180,7 +204,11 @@ public Binary slice(int start, int length) { @Override public int hashCode() { - return Binary.hashCode(value, offset, length); + int h = cachedHashCode; + if (h != 0) { + return h; + } + return cacheHashCode(Binary.hashCode(value, offset, length)); } @Override @@ -340,7 +368,11 @@ public Binary slice(int start, int length) { @Override public int hashCode() { - return Binary.hashCode(value, 0, value.length); + int h = cachedHashCode; + if (h != 0) { + return h; + } + return cacheHashCode(Binary.hashCode(value, 0, value.length)); } @Override @@ -499,11 +531,18 @@ public Binary slice(int start, int length) { @Override public int hashCode() { + int h = cachedHashCode; + if (h != 0) { + return h; + } + + int computedHashCode; if (value.hasArray()) { - return Binary.hashCode(value.array(), value.arrayOffset() + offset, length); + computedHashCode = Binary.hashCode(value.array(), value.arrayOffset() + offset, length); } else { - return Binary.hashCode(value, offset, length); + computedHashCode = Binary.hashCode(value, offset, length); } + return cacheHashCode(computedHashCode); } @Override diff --git a/parquet-column/src/test/java/org/apache/parquet/io/api/TestBinary.java b/parquet-column/src/test/java/org/apache/parquet/io/api/TestBinary.java index a1a83af771..19085b2244 100644 --- a/parquet-column/src/test/java/org/apache/parquet/io/api/TestBinary.java +++ b/parquet-column/src/test/java/org/apache/parquet/io/api/TestBinary.java @@ -155,6 +155,51 @@ public void testEqualityMethods() throws Exception { assertEquals(bin1, bin2); } + /** + * Verifies that {@link Binary#hashCode()} is cached for non-reused (constant) instances: + * the value returned must be stable, equal across the three concrete Binary + * implementations for the same bytes, and consistent with {@link Object#equals(Object)}. + */ + @Test + public void testHashCodeCachedForConstantBinary() { + byte[] bytes = "hash-cache-test".getBytes(); + + Binary[] constants = { + Binary.fromConstantByteArray(bytes), + Binary.fromConstantByteArray(bytes, 0, bytes.length), + Binary.fromConstantByteBuffer(ByteBuffer.wrap(bytes)), + }; + int reference = constants[0].hashCode(); + for (Binary b : constants) { + int first = b.hashCode(); + int second = b.hashCode(); + assertEquals("repeated hashCode for " + b.getClass().getSimpleName(), first, second); + assertEquals( + "cross-impl hashCode for " + b.getClass().getSimpleName() + " must equal reference", + reference, + first); + } + } + + /** + * Verifies that reused (mutable backing) Binary instances do not return a stale cached + * hash code when their backing bytes change between calls. + */ + @Test + public void testHashCodeNotCachedForReusedBinary() { + byte[] bytes = "first".getBytes(); + Binary reused = Binary.fromReusedByteArray(bytes); + int firstHash = reused.hashCode(); + int constHashFirst = Binary.fromConstantByteArray(bytes).hashCode(); + assertEquals(constHashFirst, firstHash); + + byte[] mutated = "second-value".getBytes(); + reused = Binary.fromReusedByteArray(mutated); + int secondHash = reused.hashCode(); + int constHashSecond = Binary.fromConstantByteArray(mutated).hashCode(); + assertEquals(constHashSecond, secondHash); + } + @Test public void testWriteAllTo() throws Exception { byte[] orig = {10, 9, 8, 7, 6, 5, 4, 3, 2, 1};