diff --git a/lang/java/avro/pom.xml b/lang/java/avro/pom.xml
index ee7b525c94b..1b3b95245b3 100644
--- a/lang/java/avro/pom.xml
+++ b/lang/java/avro/pom.xml
@@ -250,5 +250,10 @@
hamcrest-library
test
+
+ org.mockito
+ mockito-core
+ test
+
diff --git a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java
index 05e5006acbf..a1fb6c4377b 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/file/DataFileWriter.java
@@ -33,6 +33,7 @@
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
+import java.util.function.Function;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.Schema;
@@ -72,6 +73,8 @@ public class DataFileWriter implements Closeable, Flushable {
private byte[] sync; // 16 random bytes
private int syncInterval = DataFileConstants.DEFAULT_SYNC_INTERVAL;
+ private Function initEncoder = out -> new EncoderFactory().directBinaryEncoder(out,
+ null);
private boolean isOpen;
private Codec codec;
@@ -129,6 +132,17 @@ public DataFileWriter setSyncInterval(int syncInterval) {
return this;
}
+ /**
+ * Allows setting a different encoder than the default DirectBinaryEncoder.
+ *
+ * @param initEncoderFunc Function to create a binary encoder
+ * @return this DataFileWriter
+ */
+ public DataFileWriter setEncoder(Function initEncoderFunc) {
+ this.initEncoder = initEncoderFunc;
+ return this;
+ }
+
/** Open a new file for data matching a schema with a random sync. */
public DataFileWriter create(Schema schema, File file) throws IOException {
SyncableFileOutputStream sfos = new SyncableFileOutputStream(file);
@@ -241,7 +255,7 @@ private void init(OutputStream outs) throws IOException {
this.vout = efactory.directBinaryEncoder(out, null);
dout.setSchema(schema);
buffer = new NonCopyingByteArrayOutputStream(Math.min((int) (syncInterval * 1.25), Integer.MAX_VALUE / 2 - 1));
- this.bufOut = efactory.directBinaryEncoder(buffer, null);
+ this.bufOut = this.initEncoder.apply(buffer);
if (this.codec == null) {
this.codec = CodecFactory.nullCodec().createInstance();
}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/BlockingDirectBinaryEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/BlockingDirectBinaryEncoder.java
new file mode 100644
index 00000000000..2ef2375e640
--- /dev/null
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/BlockingDirectBinaryEncoder.java
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+
+/**
+ * An {@link Encoder} for Avro's binary encoding that does not buffer output.
+ *
+ * This encoder does not buffer writes in contrast to
+ * {@link BufferedBinaryEncoder}. However, it is lighter-weight and useful when:
+ * The buffering in BufferedBinaryEncoder is not desired because you buffer a
+ * different level or the Encoder is very short-lived.
+ *
+ * The BlockingDirectBinaryEncoder will encode the number of bytes of the Map
+ * and Array blocks. This will allow to postpone the decoding, or skip over it
+ * at all.
+ *
+ * To construct, use
+ * {@link EncoderFactory#blockingDirectBinaryEncoder(OutputStream, BinaryEncoder)}
+ *
+ * {@link BlockingDirectBinaryEncoder} instances returned by this method are not
+ * thread-safe
+ *
+ * @see BinaryEncoder
+ * @see EncoderFactory
+ * @see Encoder
+ * @see Decoder
+ */
+public class BlockingDirectBinaryEncoder extends DirectBinaryEncoder {
+ private final ArrayList buffers;
+
+ private final ArrayDeque stashedBuffers;
+
+ private int depth = 0;
+
+ private final ArrayDeque blockItemCounts;
+
+ /**
+ * Create a writer that sends its output to the underlying stream
+ * out
.
+ *
+ * @param out The Outputstream to write to
+ */
+ public BlockingDirectBinaryEncoder(OutputStream out) {
+ super(out);
+ this.buffers = new ArrayList<>();
+ this.stashedBuffers = new ArrayDeque<>();
+ this.blockItemCounts = new ArrayDeque<>();
+ }
+
+ private void startBlock() {
+ stashedBuffers.push(out);
+ if (this.buffers.size() <= depth) {
+ this.buffers.add(new BufferOutputStream());
+ }
+ BufferOutputStream buf = buffers.get(depth);
+ buf.reset();
+ this.depth += 1;
+ this.out = buf;
+ }
+
+ private void endBlock() {
+ if (depth == 0) {
+ throw new RuntimeException("Called endBlock, while not buffering a block");
+ }
+ this.depth -= 1;
+ out = stashedBuffers.pop();
+ BufferOutputStream buffer = this.buffers.get(depth);
+ long blockItemCount = blockItemCounts.pop();
+ if (blockItemCount > 0) {
+ try {
+ // Make it negative, so the reader knows that the number of bytes is coming
+ writeLong(-blockItemCount);
+ writeLong(buffer.size());
+ writeFixed(buffer.toBufferWithoutCopy());
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+
+ @Override
+ public void setItemCount(long itemCount) throws IOException {
+ blockItemCounts.push(itemCount);
+ }
+
+ @Override
+ public void writeArrayStart() throws IOException {
+ startBlock();
+ }
+
+ @Override
+ public void writeArrayEnd() throws IOException {
+ endBlock();
+ // Writes another zero to indicate that this is the last block
+ super.writeArrayEnd();
+ }
+
+ @Override
+ public void writeMapStart() throws IOException {
+ startBlock();
+ }
+
+ @Override
+ public void writeMapEnd() throws IOException {
+ endBlock();
+ // Writes another zero to indicate that this is the last block
+ super.writeMapEnd();
+ }
+
+ private static class BufferOutputStream extends ByteArrayOutputStream {
+ BufferOutputStream() {
+ }
+
+ ByteBuffer toBufferWithoutCopy() {
+ return ByteBuffer.wrap(buf, 0, count);
+ }
+
+ }
+}
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryEncoder.java b/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryEncoder.java
index 62b2a482627..df7c118b648 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryEncoder.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/DirectBinaryEncoder.java
@@ -27,20 +27,20 @@
* This encoder does not buffer writes, and as a result is slower than
* {@link BufferedBinaryEncoder}. However, it is lighter-weight and useful when
* the buffering in BufferedBinaryEncoder is not desired and/or the Encoder is
- * very short lived.
+ * very short-lived.
*
* To construct, use
* {@link EncoderFactory#directBinaryEncoder(OutputStream, BinaryEncoder)}
*
* DirectBinaryEncoder is not thread-safe
- *
+ *
* @see BinaryEncoder
* @see EncoderFactory
* @see Encoder
* @see Decoder
*/
public class DirectBinaryEncoder extends BinaryEncoder {
- private OutputStream out;
+ protected OutputStream out;
// the buffer is used for writing floats, doubles, and large longs.
private final byte[] buf = new byte[12];
@@ -48,7 +48,7 @@ public class DirectBinaryEncoder extends BinaryEncoder {
* Create a writer that sends its output to the underlying stream
* out
.
**/
- DirectBinaryEncoder(OutputStream out) {
+ protected DirectBinaryEncoder(OutputStream out) {
configure(out);
}
@@ -69,8 +69,8 @@ public void writeBoolean(boolean b) throws IOException {
}
/*
- * buffering is slower for ints that encode to just 1 or two bytes, and and
- * faster for large ones. (Sun JRE 1.6u22, x64 -server)
+ * buffering is slower for ints that encode to just 1 or two bytes, and faster
+ * for large ones. (Sun JRE 1.6u22, x64 -server)
*/
@Override
public void writeInt(int n) throws IOException {
diff --git a/lang/java/avro/src/main/java/org/apache/avro/io/EncoderFactory.java b/lang/java/avro/src/main/java/org/apache/avro/io/EncoderFactory.java
index 0188a29637d..bc412d58da4 100644
--- a/lang/java/avro/src/main/java/org/apache/avro/io/EncoderFactory.java
+++ b/lang/java/avro/src/main/java/org/apache/avro/io/EncoderFactory.java
@@ -216,6 +216,49 @@ public BinaryEncoder directBinaryEncoder(OutputStream out, BinaryEncoder reuse)
}
}
+ /**
+ * Creates or reinitializes a {@link BlockingDirectBinaryEncoder} with the
+ * OutputStream provided as the destination for written data. If reuse is
+ * provided, an attempt will be made to reconfigure reuse rather than
+ * construct a new instance, but this is not guaranteed, a new instance may be
+ * returned.
+ *
+ * The {@link BinaryEncoder} implementation returned does not buffer its output,
+ * calling {@link Encoder#flush()} will simply cause the wrapped OutputStream to
+ * be flushed.
+ *
+ * The {@link BlockingDirectBinaryEncoder} will write the block sizes for the
+ * arrays and maps so efficient skipping can be done.
+ *
+ * Performance of unbuffered writes can be significantly slower than buffered
+ * writes. {@link #binaryEncoder(OutputStream, BinaryEncoder)} returns
+ * BinaryEncoder instances that are tuned for performance but may buffer output.
+ * The unbuffered, 'direct' encoder may be desired when buffering semantics are
+ * problematic, or if the lifetime of the encoder is so short that the buffer
+ * would not be useful.
+ *
+ * {@link BinaryEncoder} instances returned by this method are not thread-safe.
+ *
+ * @param out The OutputStream to initialize to. Cannot be null.
+ * @param reuse The BinaryEncoder to attempt to reuse given the factory
+ * configuration. A BinaryEncoder implementation may not be
+ * compatible with reuse, causing a new instance to be returned. If
+ * null, a new instance is returned.
+ * @return A BinaryEncoder that uses out as its data output. If
+ * reuse is null, this will be a new instance. If reuse is
+ * not null, then the returned instance may be a new instance or
+ * reuse reconfigured to use out .
+ * @see DirectBinaryEncoder
+ * @see Encoder
+ */
+ public BinaryEncoder blockingDirectBinaryEncoder(OutputStream out, BinaryEncoder reuse) {
+ if (null == reuse || !reuse.getClass().equals(BlockingDirectBinaryEncoder.class)) {
+ return new BlockingDirectBinaryEncoder(out);
+ } else {
+ return ((DirectBinaryEncoder) reuse).configure(out);
+ }
+ }
+
/**
* Creates or reinitializes a {@link BinaryEncoder} with the OutputStream
* provided as the destination for written data. If reuse is provided, an
diff --git a/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java b/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java
index ef20201a37b..c11b8377d18 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/TestDataFile.java
@@ -25,9 +25,11 @@
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
+import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
+import java.util.function.Function;
import java.util.stream.Stream;
import org.apache.avro.file.CodecFactory;
@@ -40,7 +42,9 @@
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DatumReader;
+import org.apache.avro.io.EncoderFactory;
import org.apache.avro.util.RandomData;
import org.junit.jupiter.api.Test;
@@ -93,22 +97,32 @@ private File makeFile(CodecFactory codec) {
@ParameterizedTest
@MethodSource("codecs")
public void runTestsInOrder(CodecFactory codec) throws Exception {
- LOG.info("Running with codec: " + codec);
- testGenericWrite(codec);
- testGenericRead(codec);
- testSplits(codec);
- testSyncDiscovery(codec);
- testGenericAppend(codec);
- testReadWithHeader(codec);
- testFSync(codec, false);
- testFSync(codec, true);
+ // Run for both encoders, but the MethodSource didn't really like it,
+ // so it is just a loop within the test
+ List> encoders = new ArrayList<>();
+ encoders.add(b -> new EncoderFactory().directBinaryEncoder(b, null));
+ encoders.add(b -> new EncoderFactory().blockingDirectBinaryEncoder(b, null));
+
+ for (Function encoder : encoders) {
+ LOG.info("Running with codec: {}", codec);
+ testGenericWrite(codec, encoder);
+ testGenericRead(codec);
+ testSplits(codec);
+ testSyncDiscovery(codec);
+ testGenericAppend(codec, encoder);
+ testReadWithHeader(codec);
+ testFSync(codec, encoder, false);
+ testFSync(codec, encoder, true);
+ }
}
- private void testGenericWrite(CodecFactory codec) throws IOException {
+ private void testGenericWrite(CodecFactory codec, Function encoderFunc)
+ throws IOException {
DataFileWriter writer = new DataFileWriter<>(new GenericDatumWriter<>()).setSyncInterval(100);
if (codec != null) {
writer.setCodec(codec);
}
+ writer.setEncoder(encoderFunc);
writer.create(SCHEMA, makeFile(codec));
try {
int count = 0;
@@ -210,10 +224,12 @@ private void testSyncDiscovery(CodecFactory codec) throws IOException {
}
}
- private void testGenericAppend(CodecFactory codec) throws IOException {
+ private void testGenericAppend(CodecFactory codec, Function encoderFunc)
+ throws IOException {
File file = makeFile(codec);
long start = file.length();
try (DataFileWriter writer = new DataFileWriter<>(new GenericDatumWriter<>()).appendTo(file)) {
+ writer.setEncoder(encoderFunc);
for (Object datum : new RandomData(SCHEMA, COUNT, SEED + 1)) {
writer.append(datum);
}
@@ -254,11 +270,8 @@ private void testReadWithHeader(CodecFactory codec) throws IOException {
assertEquals(validPos, sin.tell(), "Should not move from sync point on reopen");
assertNotNull(readerFalse.next(), "Should be able to reopen at sync point");
}
-
}
-
}
-
}
@Test
@@ -306,8 +319,10 @@ public void testFlushCount() throws IOException {
assertTrue(out.flushCount < currentCount && out.flushCount >= flushCounter);
}
- private void testFSync(CodecFactory codec, boolean useFile) throws IOException {
+ private void testFSync(CodecFactory codec, Function encoderFunc, boolean useFile)
+ throws IOException {
try (DataFileWriter writer = new DataFileWriter<>(new GenericDatumWriter<>())) {
+ writer.setEncoder(encoderFunc);
writer.setFlushOnEveryBlock(false);
TestingByteArrayOutputStream out = new TestingByteArrayOutputStream();
if (useFile) {
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryEncoderFidelity.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryEncoderFidelity.java
index f452c8b29b7..aa4bd60a5c2 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryEncoderFidelity.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestBinaryEncoderFidelity.java
@@ -179,6 +179,48 @@ public void testDirectBinaryEncoder() throws IOException {
Assert.assertArrayEquals(complexdata, result2);
}
+ @Test
+ public void testBlockingDirectBinaryEncoder() throws IOException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ BinaryEncoder e = factory.blockingDirectBinaryEncoder(baos, null);
+ generateData(e, true);
+
+ byte[] result = baos.toByteArray();
+ Assert.assertEquals(legacydata.length, result.length);
+ Assert.assertArrayEquals(legacydata, result);
+ baos.reset();
+
+ generateComplexData(e);
+ byte[] result2 = baos.toByteArray();
+ // blocking will cause different length, should be two bytes larger
+ Assert.assertEquals(complexdata.length + 2, result2.length);
+ // the first byte is the array start, with the count of items negative
+ Assert.assertEquals(complexdata[0] >>> 1, result2[0]);
+ baos.reset();
+
+ e.writeArrayStart();
+ e.setItemCount(1);
+ e.startItem();
+ e.writeInt(1);
+ e.writeArrayEnd();
+
+ // 1: 1 element in the array
+ // 2: 1 byte for the int
+ // 3: zigzag encoded int
+ // 4: 0 elements in the next block
+ Assert.assertArrayEquals(baos.toByteArray(), new byte[] { 1, 2, 2, 0 });
+ baos.reset();
+
+ e.writeArrayStart();
+ e.setItemCount(0);
+ e.writeArrayEnd();
+
+ // This is correct
+ // 0: 0 elements in the block
+ Assert.assertArrayEquals(baos.toByteArray(), new byte[] { 0 });
+ baos.reset();
+ }
+
@Test
public void testBlockingBinaryEncoder() throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingDirectBinaryEncoder.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingDirectBinaryEncoder.java
new file mode 100644
index 00000000000..e28e5b99590
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestBlockingDirectBinaryEncoder.java
@@ -0,0 +1,144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.io;
+
+import org.apache.avro.Schema;
+import org.apache.avro.SchemaNormalization;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.message.BinaryMessageDecoder;
+import org.apache.avro.specific.TestRecordWithMapsAndArrays;
+import org.hamcrest.Matchers;
+import org.junit.jupiter.api.Test;
+
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.security.NoSuchAlgorithmException;
+import java.util.Arrays;
+import java.util.Map;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.hasEntry;
+import static org.hamcrest.Matchers.is;
+import static org.mockito.Mockito.*;
+
+public class TestBlockingDirectBinaryEncoder {
+
+ private void writeToArray(BinaryEncoder encoder, int[] numbers) throws IOException {
+ encoder.writeArrayStart();
+ encoder.setItemCount(numbers.length);
+ for (int number : numbers) {
+ encoder.startItem();
+ encoder.writeString(Integer.toString(number));
+ }
+ encoder.writeArrayEnd();
+ }
+
+ private void writeToMap(BinaryEncoder encoder, long[] numbers) throws IOException {
+ encoder.writeMapStart();
+ encoder.setItemCount(numbers.length);
+ for (long number : numbers) {
+ encoder.startItem();
+ encoder.writeString(Long.toString(number));
+ encoder.writeLong(number);
+ }
+ encoder.writeMapEnd();
+ }
+
+ @Test
+ void blockingDirectBinaryEncoder() throws IOException, NoSuchAlgorithmException {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ BinaryEncoder encoder = EncoderFactory.get().blockingDirectBinaryEncoder(baos, null);
+
+ // This is needed because there is no BlockingDirectBinaryEncoder
+ // BinaryMessageWriter
+ // available out of the box
+ encoder.writeFixed(new byte[] { (byte) 0xC3, (byte) 0x01 });
+ encoder.writeFixed(SchemaNormalization.parsingFingerprint("CRC-64-AVRO", TestRecordWithMapsAndArrays.SCHEMA$));
+
+ // Array
+ this.writeToArray(encoder, new int[] { 1, 2, 3, 4, 5 });
+
+ // Map
+ writeToMap(encoder, new long[] { 1L, 2L, 3L, 4L, 5L });
+
+ // Nested Array
+
+ encoder.writeArrayStart();
+ encoder.setItemCount(2);
+ this.writeToArray(encoder, new int[] { 1, 2 });
+ this.writeToArray(encoder, new int[] { 3, 4, 5 });
+ encoder.writeArrayEnd();
+
+ // Nested Map
+
+ encoder.writeMapStart();
+ encoder.setItemCount(2);
+ encoder.writeString("first");
+ this.writeToMap(encoder, new long[] { 1L, 2L });
+ encoder.writeString("second");
+ this.writeToMap(encoder, new long[] { 3L, 4L, 5L });
+ encoder.writeMapEnd();
+
+ // Read
+
+ encoder.flush();
+
+ BinaryMessageDecoder decoder = TestRecordWithMapsAndArrays.getDecoder();
+ TestRecordWithMapsAndArrays r = decoder.decode(baos.toByteArray());
+
+ assertThat(r.getArr(), is(Arrays.asList("1", "2", "3", "4", "5")));
+ Map map = r.getMap();
+ assertThat(map.size(), is(5));
+ for (long i = 1; i <= 5; i++) {
+ assertThat(map.get(Long.toString(i)), is(i));
+ }
+
+ assertThat(r.getNestedArr(), is(Arrays.asList(Arrays.asList("1", "2"), Arrays.asList("3", "4", "5"))));
+
+ Map> nestedMap = r.getNestedMap();
+ assertThat(nestedMap.size(), is(2));
+
+ assertThat(nestedMap.get("first").size(), is(2));
+ assertThat(nestedMap.get("first").get("1"), is(1L));
+ assertThat(nestedMap.get("first").get("2"), is(2L));
+
+ assertThat(nestedMap.get("second").size(), is(3));
+ assertThat(nestedMap.get("second").get("3"), is(3L));
+ assertThat(nestedMap.get("second").get("4"), is(4L));
+ assertThat(nestedMap.get("second").get("5"), is(5L));
+ }
+
+ @Test
+ void testSkippingUsingBlocks() throws IOException, NoSuchAlgorithmException {
+ // Create an empty schema for read, so we skip over all the fields
+ Schema emptySchema = new Schema.Parser().parse(
+ "{\"type\":\"record\",\"name\":\"TestRecordWithMapsAndArrays\",\"namespace\":\"org.apache.avro.specific\",\"fields\":[]}");
+
+ GenericDatumReader> in = new GenericDatumReader<>(TestRecordWithMapsAndArrays.SCHEMA$, emptySchema);
+ Decoder mockDecoder = mock(BinaryDecoder.class);
+
+ for (long i = 0; i < 1; i++) {
+ in.read(null, mockDecoder);
+ }
+
+ verify(mockDecoder, times(2)).skipMap();
+ verify(mockDecoder, times(2)).skipArray();
+ verify(mockDecoder, times(0)).readString();
+ verify(mockDecoder, times(0)).readLong();
+ }
+}
diff --git a/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java b/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java
index 7e893ab1394..6ad85e6b251 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/io/TestEncoders.java
@@ -32,6 +32,7 @@
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
+import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
@@ -53,7 +54,7 @@ public class TestEncoders {
private static final int ENCODER_BUFFER_SIZE = 32;
private static final int EXAMPLE_DATA_SIZE = 17;
- private static EncoderFactory factory = EncoderFactory.get();
+ private static final EncoderFactory FACTORY = EncoderFactory.get();
@Rule
public TemporaryFolder DIR = new TemporaryFolder();
@@ -61,65 +62,92 @@ public class TestEncoders {
@Test
public void testBinaryEncoderInit() throws IOException {
OutputStream out = new ByteArrayOutputStream();
- BinaryEncoder enc = factory.binaryEncoder(out, null);
- Assert.assertSame(enc, factory.binaryEncoder(out, enc));
+ BinaryEncoder enc = FACTORY.binaryEncoder(out, null);
+ Assert.assertSame(enc, FACTORY.binaryEncoder(out, enc));
}
@Test(expected = NullPointerException.class)
public void testBadBinaryEncoderInit() {
- factory.binaryEncoder(null, null);
+ FACTORY.binaryEncoder(null, null);
}
@Test
public void testBlockingBinaryEncoderInit() throws IOException {
OutputStream out = new ByteArrayOutputStream();
BinaryEncoder reuse = null;
- reuse = factory.blockingBinaryEncoder(out, reuse);
- Assert.assertSame(reuse, factory.blockingBinaryEncoder(out, reuse));
+ reuse = FACTORY.blockingBinaryEncoder(out, reuse);
+ Assert.assertSame(reuse, FACTORY.blockingBinaryEncoder(out, reuse));
// comparison
}
@Test(expected = NullPointerException.class)
public void testBadBlockintBinaryEncoderInit() {
- factory.binaryEncoder(null, null);
+ OutputStream out = new ByteArrayOutputStream();
+
+ FACTORY.binaryEncoder(null, null);
+ BinaryEncoder enc = FACTORY.binaryEncoder(out, null);
+ Assert.assertSame(enc, FACTORY.binaryEncoder(out, enc));
+ }
+
+ @Test
+ public void blockingBinaryEncoderInit() throws IOException {
+ OutputStream out = new ByteArrayOutputStream();
+ BinaryEncoder reuse = null;
+ reuse = FACTORY.blockingBinaryEncoder(out, reuse);
+ Assert.assertSame(reuse, FACTORY.blockingBinaryEncoder(out, reuse));
+ // comparison
}
@Test
public void testDirectBinaryEncoderInit() throws IOException {
OutputStream out = new ByteArrayOutputStream();
- BinaryEncoder enc = factory.directBinaryEncoder(out, null);
- Assert.assertSame(enc, factory.directBinaryEncoder(out, enc));
+ BinaryEncoder enc = FACTORY.directBinaryEncoder(out, null);
+ Assert.assertSame(enc, FACTORY.directBinaryEncoder(out, enc));
}
@Test(expected = NullPointerException.class)
public void testBadDirectBinaryEncoderInit() {
- factory.directBinaryEncoder(null, null);
+ FACTORY.directBinaryEncoder(null, null);
}
@Test
public void testJsonEncoderInit() throws IOException {
+ OutputStream out = new ByteArrayOutputStream();
+ BinaryEncoder enc = FACTORY.directBinaryEncoder(out, null);
+ Assert.assertSame(enc, FACTORY.directBinaryEncoder(out, enc));
+ }
+
+ @Test
+ public void testBlokingDirectBinaryEncoderInit() throws IOException {
+ OutputStream out = new ByteArrayOutputStream();
+ BinaryEncoder enc = FACTORY.blockingDirectBinaryEncoder(out, null);
+ Assert.assertSame(enc, FACTORY.blockingDirectBinaryEncoder(out, enc));
+ }
+
+ @Test
+ public void jsonEncoderInit() throws IOException {
Schema s = new Schema.Parser().parse("\"int\"");
OutputStream out = new ByteArrayOutputStream();
- factory.jsonEncoder(s, out);
- JsonEncoder enc = factory.jsonEncoder(s, new JsonFactory().createGenerator(out, JsonEncoding.UTF8));
+ FACTORY.jsonEncoder(s, out);
+ JsonEncoder enc = FACTORY.jsonEncoder(s, new JsonFactory().createGenerator(out, JsonEncoding.UTF8));
enc.configure(out);
}
@Test(expected = NullPointerException.class)
public void testBadJsonEncoderInitOS() throws IOException {
- factory.jsonEncoder(Schema.create(Type.INT), (OutputStream) null);
+ FACTORY.jsonEncoder(Schema.create(Type.INT), (OutputStream) null);
}
@Test(expected = NullPointerException.class)
public void testBadJsonEncoderInit() throws IOException {
- factory.jsonEncoder(Schema.create(Type.INT), (JsonGenerator) null);
+ FACTORY.jsonEncoder(Schema.create(Type.INT), (JsonGenerator) null);
}
@Test
public void testJsonEncoderNewlineDelimited() throws IOException {
OutputStream out = new ByteArrayOutputStream();
Schema ints = Schema.create(Type.INT);
- Encoder e = factory.jsonEncoder(ints, out);
+ Encoder e = FACTORY.jsonEncoder(ints, out);
String separator = System.getProperty("line.separator");
GenericDatumWriter writer = new GenericDatumWriter<>(ints);
writer.write(1, e);
@@ -158,8 +186,8 @@ public void testJsonEncoderWhenIncludeNamespaceOptionIsTrue() throws IOException
public void testValidatingEncoderInit() throws IOException {
Schema s = new Schema.Parser().parse("\"int\"");
OutputStream out = new ByteArrayOutputStream();
- Encoder e = factory.directBinaryEncoder(out, null);
- factory.validatingEncoder(s, e).configure(e);
+ Encoder e = FACTORY.directBinaryEncoder(out, null);
+ FACTORY.validatingEncoder(s, e).configure(e);
}
@Test
@@ -311,7 +339,7 @@ private String fromAvroToJson(byte[] avroBytes, Schema schema, boolean includeNa
DatumWriter writer = new GenericDatumWriter<>(schema);
ByteArrayOutputStream output = new ByteArrayOutputStream();
- JsonEncoder encoder = factory.jsonEncoder(schema, output);
+ JsonEncoder encoder = FACTORY.jsonEncoder(schema, output);
encoder.setIncludeNamespace(includeNamespace);
Decoder decoder = DecoderFactory.get().binaryDecoder(avroBytes, null);
Object datum = reader.read(null, decoder);
diff --git a/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithLogicalTypes.java b/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithLogicalTypes.java
index c2e1ebd384c..1763a73144c 100644
--- a/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithLogicalTypes.java
+++ b/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithLogicalTypes.java
@@ -1,7 +1,19 @@
-/**
- * Autogenerated by Avro
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
*
- * DO NOT EDIT DIRECTLY
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
*/
package org.apache.avro.specific;
diff --git a/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithMapsAndArrays.java b/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithMapsAndArrays.java
new file mode 100644
index 00000000000..81572bc22c5
--- /dev/null
+++ b/lang/java/avro/src/test/java/org/apache/avro/specific/TestRecordWithMapsAndArrays.java
@@ -0,0 +1,875 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * https://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro.specific;
+
+import org.apache.avro.generic.GenericArray;
+import org.apache.avro.specific.SpecificData;
+import org.apache.avro.util.Utf8;
+import org.apache.avro.message.BinaryMessageEncoder;
+import org.apache.avro.message.BinaryMessageDecoder;
+import org.apache.avro.message.SchemaStore;
+
+@AvroGenerated
+public class TestRecordWithMapsAndArrays extends SpecificRecordBase implements SpecificRecord {
+ private static final long serialVersionUID = -3823801533006425147L;
+
+ public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse(
+ "{\"type\":\"record\",\"name\":\"TestRecordWithMapsAndArrays\",\"namespace\":\"org.apache.avro.specific\",\"fields\":[{\"name\":\"arr\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"default\":[]}},{\"name\":\"map\",\"type\":{\"type\":\"map\",\"values\":\"long\",\"avro.java.string\":\"String\",\"default\":{}}},{\"name\":\"nested_arr\",\"type\":{\"type\":\"array\",\"items\":{\"type\":\"array\",\"items\":{\"type\":\"string\",\"avro.java.string\":\"String\"},\"default\":[]},\"default\":[]}},{\"name\":\"nested_map\",\"type\":{\"type\":\"map\",\"values\":{\"type\":\"map\",\"values\":\"long\",\"avro.java.string\":\"String\",\"default\":{}},\"avro.java.string\":\"String\",\"default\":{}}}]}");
+
+ public static org.apache.avro.Schema getClassSchema() {
+ return SCHEMA$;
+ }
+
+ private static final SpecificData MODEL$ = new SpecificData();
+
+ private static final BinaryMessageEncoder ENCODER = new BinaryMessageEncoder<>(MODEL$,
+ SCHEMA$);
+
+ private static final BinaryMessageDecoder DECODER = new BinaryMessageDecoder<>(MODEL$,
+ SCHEMA$);
+
+ /**
+ * Return the BinaryMessageEncoder instance used by this class.
+ *
+ * @return the message encoder used by this class
+ */
+ public static BinaryMessageEncoder getEncoder() {
+ return ENCODER;
+ }
+
+ /**
+ * Return the BinaryMessageDecoder instance used by this class.
+ *
+ * @return the message decoder used by this class
+ */
+ public static BinaryMessageDecoder getDecoder() {
+ return DECODER;
+ }
+
+ /**
+ * Create a new BinaryMessageDecoder instance for this class that uses the
+ * specified {@link SchemaStore}.
+ *
+ * @param resolver a {@link SchemaStore} used to find schemas by fingerprint
+ * @return a BinaryMessageDecoder instance for this class backed by the given
+ * SchemaStore
+ */
+ public static BinaryMessageDecoder createDecoder(SchemaStore resolver) {
+ return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver);
+ }
+
+ /**
+ * Serializes this TestRecordWithMapsAndArrays to a ByteBuffer.
+ *
+ * @return a buffer holding the serialized data for this instance
+ * @throws java.io.IOException if this instance could not be serialized
+ */
+ public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException {
+ return ENCODER.encode(this);
+ }
+
+ /**
+ * Deserializes a TestRecordWithMapsAndArrays from a ByteBuffer.
+ *
+ * @param b a byte buffer holding serialized data for an instance of this class
+ * @return a TestRecordWithMapsAndArrays instance decoded from the given buffer
+ * @throws java.io.IOException if the given bytes could not be deserialized into
+ * an instance of this class
+ */
+ public static TestRecordWithMapsAndArrays fromByteBuffer(java.nio.ByteBuffer b) throws java.io.IOException {
+ return DECODER.decode(b);
+ }
+
+ private java.util.List arr;
+ private java.util.Map map;
+ private java.util.List> nested_arr;
+ private java.util.Map> nested_map;
+
+ /**
+ * Default constructor. Note that this does not initialize fields to their
+ * default values from the schema. If that is desired then one should use
+ * newBuilder()
.
+ */
+ public TestRecordWithMapsAndArrays() {
+ }
+
+ /**
+ * All-args constructor.
+ *
+ * @param arr The new value for arr
+ * @param map The new value for map
+ * @param nested_arr The new value for nested_arr
+ * @param nested_map The new value for nested_map
+ */
+ public TestRecordWithMapsAndArrays(java.util.List arr, java.util.Map map,
+ java.util.List> nested_arr,
+ java.util.Map> nested_map) {
+ this.arr = arr;
+ this.map = map;
+ this.nested_arr = nested_arr;
+ this.nested_map = nested_map;
+ }
+
+ @Override
+ public SpecificData getSpecificData() {
+ return MODEL$;
+ }
+
+ @Override
+ public org.apache.avro.Schema getSchema() {
+ return SCHEMA$;
+ }
+
+ // Used by DatumWriter. Applications should not call.
+ @Override
+ public Object get(int field$) {
+ switch (field$) {
+ case 0:
+ return arr;
+ case 1:
+ return map;
+ case 2:
+ return nested_arr;
+ case 3:
+ return nested_map;
+ default:
+ throw new IndexOutOfBoundsException("Invalid index: " + field$);
+ }
+ }
+
+ // Used by DatumReader. Applications should not call.
+ @Override
+ @SuppressWarnings(value = "unchecked")
+ public void put(int field$, Object value$) {
+ switch (field$) {
+ case 0:
+ arr = (java.util.List) value$;
+ break;
+ case 1:
+ map = (java.util.Map) value$;
+ break;
+ case 2:
+ nested_arr = (java.util.List>) value$;
+ break;
+ case 3:
+ nested_map = (java.util.Map>) value$;
+ break;
+ default:
+ throw new IndexOutOfBoundsException("Invalid index: " + field$);
+ }
+ }
+
+ /**
+ * Gets the value of the 'arr' field.
+ *
+ * @return The value of the 'arr' field.
+ */
+ public java.util.List getArr() {
+ return arr;
+ }
+
+ /**
+ * Sets the value of the 'arr' field.
+ *
+ * @param value the value to set.
+ */
+ public void setArr(java.util.List value) {
+ this.arr = value;
+ }
+
+ /**
+ * Gets the value of the 'map' field.
+ *
+ * @return The value of the 'map' field.
+ */
+ public java.util.Map getMap() {
+ return map;
+ }
+
+ /**
+ * Sets the value of the 'map' field.
+ *
+ * @param value the value to set.
+ */
+ public void setMap(java.util.Map value) {
+ this.map = value;
+ }
+
+ /**
+ * Gets the value of the 'nested_arr' field.
+ *
+ * @return The value of the 'nested_arr' field.
+ */
+ public java.util.List> getNestedArr() {
+ return nested_arr;
+ }
+
+ /**
+ * Sets the value of the 'nested_arr' field.
+ *
+ * @param value the value to set.
+ */
+ public void setNestedArr(java.util.List> value) {
+ this.nested_arr = value;
+ }
+
+ /**
+ * Gets the value of the 'nested_map' field.
+ *
+ * @return The value of the 'nested_map' field.
+ */
+ public java.util.Map> getNestedMap() {
+ return nested_map;
+ }
+
+ /**
+ * Sets the value of the 'nested_map' field.
+ *
+ * @param value the value to set.
+ */
+ public void setNestedMap(java.util.Map> value) {
+ this.nested_map = value;
+ }
+
+ /**
+ * Creates a new TestRecordWithMapsAndArrays RecordBuilder.
+ *
+ * @return A new TestRecordWithMapsAndArrays RecordBuilder
+ */
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
+ /**
+ * Creates a new TestRecordWithMapsAndArrays RecordBuilder by copying an
+ * existing Builder.
+ *
+ * @param other The existing builder to copy.
+ * @return A new TestRecordWithMapsAndArrays RecordBuilder
+ */
+ public static Builder newBuilder(Builder other) {
+ if (other == null) {
+ return new Builder();
+ } else {
+ return new Builder(other);
+ }
+ }
+
+ /**
+ * Creates a new TestRecordWithMapsAndArrays RecordBuilder by copying an
+ * existing TestRecordWithMapsAndArrays instance.
+ *
+ * @param other The existing instance to copy.
+ * @return A new TestRecordWithMapsAndArrays RecordBuilder
+ */
+ public static Builder newBuilder(TestRecordWithMapsAndArrays other) {
+ if (other == null) {
+ return new Builder();
+ } else {
+ return new Builder(other);
+ }
+ }
+
+ /**
+ * RecordBuilder for TestRecordWithMapsAndArrays instances.
+ */
+ @AvroGenerated
+ public static class Builder extends SpecificRecordBuilderBase
+ implements org.apache.avro.data.RecordBuilder {
+
+ private java.util.List arr;
+ private java.util.Map map;
+ private java.util.List> nested_arr;
+ private java.util.Map> nested_map;
+
+ /** Creates a new Builder */
+ private Builder() {
+ super(SCHEMA$, MODEL$);
+ }
+
+ /**
+ * Creates a Builder by copying an existing Builder.
+ *
+ * @param other The existing Builder to copy.
+ */
+ private Builder(Builder other) {
+ super(other);
+ if (isValidValue(fields()[0], other.arr)) {
+ this.arr = data().deepCopy(fields()[0].schema(), other.arr);
+ fieldSetFlags()[0] = other.fieldSetFlags()[0];
+ }
+ if (isValidValue(fields()[1], other.map)) {
+ this.map = data().deepCopy(fields()[1].schema(), other.map);
+ fieldSetFlags()[1] = other.fieldSetFlags()[1];
+ }
+ if (isValidValue(fields()[2], other.nested_arr)) {
+ this.nested_arr = data().deepCopy(fields()[2].schema(), other.nested_arr);
+ fieldSetFlags()[2] = other.fieldSetFlags()[2];
+ }
+ if (isValidValue(fields()[3], other.nested_map)) {
+ this.nested_map = data().deepCopy(fields()[3].schema(), other.nested_map);
+ fieldSetFlags()[3] = other.fieldSetFlags()[3];
+ }
+ }
+
+ /**
+ * Creates a Builder by copying an existing TestRecordWithMapsAndArrays instance
+ *
+ * @param other The existing instance to copy.
+ */
+ private Builder(TestRecordWithMapsAndArrays other) {
+ super(SCHEMA$, MODEL$);
+ if (isValidValue(fields()[0], other.arr)) {
+ this.arr = data().deepCopy(fields()[0].schema(), other.arr);
+ fieldSetFlags()[0] = true;
+ }
+ if (isValidValue(fields()[1], other.map)) {
+ this.map = data().deepCopy(fields()[1].schema(), other.map);
+ fieldSetFlags()[1] = true;
+ }
+ if (isValidValue(fields()[2], other.nested_arr)) {
+ this.nested_arr = data().deepCopy(fields()[2].schema(), other.nested_arr);
+ fieldSetFlags()[2] = true;
+ }
+ if (isValidValue(fields()[3], other.nested_map)) {
+ this.nested_map = data().deepCopy(fields()[3].schema(), other.nested_map);
+ fieldSetFlags()[3] = true;
+ }
+ }
+
+ /**
+ * Gets the value of the 'arr' field.
+ *
+ * @return The value.
+ */
+ public java.util.List getArr() {
+ return arr;
+ }
+
+ /**
+ * Sets the value of the 'arr' field.
+ *
+ * @param value The value of 'arr'.
+ * @return This builder.
+ */
+ public Builder setArr(java.util.List value) {
+ validate(fields()[0], value);
+ this.arr = value;
+ fieldSetFlags()[0] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'arr' field has been set.
+ *
+ * @return True if the 'arr' field has been set, false otherwise.
+ */
+ public boolean hasArr() {
+ return fieldSetFlags()[0];
+ }
+
+ /**
+ * Clears the value of the 'arr' field.
+ *
+ * @return This builder.
+ */
+ public Builder clearArr() {
+ arr = null;
+ fieldSetFlags()[0] = false;
+ return this;
+ }
+
+ /**
+ * Gets the value of the 'map' field.
+ *
+ * @return The value.
+ */
+ public java.util.Map getMap() {
+ return map;
+ }
+
+ /**
+ * Sets the value of the 'map' field.
+ *
+ * @param value The value of 'map'.
+ * @return This builder.
+ */
+ public Builder setMap(java.util.Map value) {
+ validate(fields()[1], value);
+ this.map = value;
+ fieldSetFlags()[1] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'map' field has been set.
+ *
+ * @return True if the 'map' field has been set, false otherwise.
+ */
+ public boolean hasMap() {
+ return fieldSetFlags()[1];
+ }
+
+ /**
+ * Clears the value of the 'map' field.
+ *
+ * @return This builder.
+ */
+ public Builder clearMap() {
+ map = null;
+ fieldSetFlags()[1] = false;
+ return this;
+ }
+
+ /**
+ * Gets the value of the 'nested_arr' field.
+ *
+ * @return The value.
+ */
+ public java.util.List> getNestedArr() {
+ return nested_arr;
+ }
+
+ /**
+ * Sets the value of the 'nested_arr' field.
+ *
+ * @param value The value of 'nested_arr'.
+ * @return This builder.
+ */
+ public Builder setNestedArr(java.util.List> value) {
+ validate(fields()[2], value);
+ this.nested_arr = value;
+ fieldSetFlags()[2] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'nested_arr' field has been set.
+ *
+ * @return True if the 'nested_arr' field has been set, false otherwise.
+ */
+ public boolean hasNestedArr() {
+ return fieldSetFlags()[2];
+ }
+
+ /**
+ * Clears the value of the 'nested_arr' field.
+ *
+ * @return This builder.
+ */
+ public Builder clearNestedArr() {
+ nested_arr = null;
+ fieldSetFlags()[2] = false;
+ return this;
+ }
+
+ /**
+ * Gets the value of the 'nested_map' field.
+ *
+ * @return The value.
+ */
+ public java.util.Map> getNestedMap() {
+ return nested_map;
+ }
+
+ /**
+ * Sets the value of the 'nested_map' field.
+ *
+ * @param value The value of 'nested_map'.
+ * @return This builder.
+ */
+ public Builder setNestedMap(java.util.Map> value) {
+ validate(fields()[3], value);
+ this.nested_map = value;
+ fieldSetFlags()[3] = true;
+ return this;
+ }
+
+ /**
+ * Checks whether the 'nested_map' field has been set.
+ *
+ * @return True if the 'nested_map' field has been set, false otherwise.
+ */
+ public boolean hasNestedMap() {
+ return fieldSetFlags()[3];
+ }
+
+ /**
+ * Clears the value of the 'nested_map' field.
+ *
+ * @return This builder.
+ */
+ public Builder clearNestedMap() {
+ nested_map = null;
+ fieldSetFlags()[3] = false;
+ return this;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public TestRecordWithMapsAndArrays build() {
+ try {
+ TestRecordWithMapsAndArrays record = new TestRecordWithMapsAndArrays();
+ record.arr = fieldSetFlags()[0] ? this.arr : (java.util.List) defaultValue(fields()[0]);
+ record.map = fieldSetFlags()[1] ? this.map : (java.util.Map) defaultValue(fields()[1]);
+ record.nested_arr = fieldSetFlags()[2] ? this.nested_arr
+ : (java.util.List>) defaultValue(fields()[2]);
+ record.nested_map = fieldSetFlags()[3] ? this.nested_map
+ : (java.util.Map>) defaultValue(fields()[3]);
+ return record;
+ } catch (org.apache.avro.AvroMissingFieldException e) {
+ throw e;
+ } catch (Exception e) {
+ throw new org.apache.avro.AvroRuntimeException(e);
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private static final org.apache.avro.io.DatumWriter WRITER$ = (org.apache.avro.io.DatumWriter) MODEL$
+ .createDatumWriter(SCHEMA$);
+
+ @Override
+ public void writeExternal(java.io.ObjectOutput out) throws java.io.IOException {
+ WRITER$.write(this, SpecificData.getEncoder(out));
+ }
+
+ @SuppressWarnings("unchecked")
+ private static final org.apache.avro.io.DatumReader READER$ = (org.apache.avro.io.DatumReader) MODEL$
+ .createDatumReader(SCHEMA$);
+
+ @Override
+ public void readExternal(java.io.ObjectInput in) throws java.io.IOException {
+ READER$.read(this, SpecificData.getDecoder(in));
+ }
+
+ @Override
+ protected boolean hasCustomCoders() {
+ return true;
+ }
+
+ @Override
+ public void customEncode(org.apache.avro.io.Encoder out) throws java.io.IOException {
+ long size0 = this.arr.size();
+ out.writeArrayStart();
+ out.setItemCount(size0);
+ long actualSize0 = 0;
+ for (String e0 : this.arr) {
+ actualSize0++;
+ out.startItem();
+ out.writeString(e0);
+ }
+ out.writeArrayEnd();
+ if (actualSize0 != size0)
+ throw new java.util.ConcurrentModificationException(
+ "Array-size written was " + size0 + ", but element count was " + actualSize0 + ".");
+
+ long size1 = this.map.size();
+ out.writeMapStart();
+ out.setItemCount(size1);
+ long actualSize1 = 0;
+ for (java.util.Map.Entry e1 : this.map.entrySet()) {
+ actualSize1++;
+ out.startItem();
+ out.writeString(e1.getKey());
+ Long v1 = e1.getValue();
+ out.writeLong(v1);
+ }
+ out.writeMapEnd();
+ if (actualSize1 != size1)
+ throw new java.util.ConcurrentModificationException(
+ "Map-size written was " + size1 + ", but element count was " + actualSize1 + ".");
+
+ long size2 = this.nested_arr.size();
+ out.writeArrayStart();
+ out.setItemCount(size2);
+ long actualSize2 = 0;
+ for (java.util.List e2 : this.nested_arr) {
+ actualSize2++;
+ out.startItem();
+ long size3 = e2.size();
+ out.writeArrayStart();
+ out.setItemCount(size3);
+ long actualSize3 = 0;
+ for (String e3 : e2) {
+ actualSize3++;
+ out.startItem();
+ out.writeString(e3);
+ }
+ out.writeArrayEnd();
+ if (actualSize3 != size3)
+ throw new java.util.ConcurrentModificationException(
+ "Array-size written was " + size3 + ", but element count was " + actualSize3 + ".");
+ }
+ out.writeArrayEnd();
+ if (actualSize2 != size2)
+ throw new java.util.ConcurrentModificationException(
+ "Array-size written was " + size2 + ", but element count was " + actualSize2 + ".");
+
+ long size4 = this.nested_map.size();
+ out.writeMapStart();
+ out.setItemCount(size4);
+ long actualSize4 = 0;
+ for (java.util.Map.Entry> e4 : this.nested_map.entrySet()) {
+ actualSize4++;
+ out.startItem();
+ out.writeString(e4.getKey());
+ java.util.Map v4 = e4.getValue();
+ long size5 = v4.size();
+ out.writeMapStart();
+ out.setItemCount(size5);
+ long actualSize5 = 0;
+ for (java.util.Map.Entry e5 : v4.entrySet()) {
+ actualSize5++;
+ out.startItem();
+ out.writeString(e5.getKey());
+ Long v5 = e5.getValue();
+ out.writeLong(v5);
+ }
+ out.writeMapEnd();
+ if (actualSize5 != size5)
+ throw new java.util.ConcurrentModificationException(
+ "Map-size written was " + size5 + ", but element count was " + actualSize5 + ".");
+ }
+ out.writeMapEnd();
+ if (actualSize4 != size4)
+ throw new java.util.ConcurrentModificationException(
+ "Map-size written was " + size4 + ", but element count was " + actualSize4 + ".");
+
+ }
+
+ @Override
+ public void customDecode(org.apache.avro.io.ResolvingDecoder in) throws java.io.IOException {
+ org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff();
+ if (fieldOrder == null) {
+ long size0 = in.readArrayStart();
+ java.util.List a0 = this.arr;
+ if (a0 == null) {
+ a0 = new SpecificData.Array((int) size0, SCHEMA$.getField("arr").schema());
+ this.arr = a0;
+ } else
+ a0.clear();
+ SpecificData.Array ga0 = (a0 instanceof SpecificData.Array ? (SpecificData.Array) a0 : null);
+ for (; 0 < size0; size0 = in.arrayNext()) {
+ for (; size0 != 0; size0--) {
+ String e0 = (ga0 != null ? ga0.peek() : null);
+ e0 = in.readString();
+ a0.add(e0);
+ }
+ }
+
+ long size1 = in.readMapStart();
+ java.util.Map m1 = this.map; // Need fresh name due to limitation of macro system
+ if (m1 == null) {
+ m1 = new java.util.HashMap((int) size1);
+ this.map = m1;
+ } else
+ m1.clear();
+ for (; 0 < size1; size1 = in.mapNext()) {
+ for (; size1 != 0; size1--) {
+ String k1 = null;
+ k1 = in.readString();
+ Long v1 = null;
+ v1 = in.readLong();
+ m1.put(k1, v1);
+ }
+ }
+
+ long size2 = in.readArrayStart();
+ java.util.List> a2 = this.nested_arr;
+ if (a2 == null) {
+ a2 = new SpecificData.Array>((int) size2, SCHEMA$.getField("nested_arr").schema());
+ this.nested_arr = a2;
+ } else
+ a2.clear();
+ SpecificData.Array> ga2 = (a2 instanceof SpecificData.Array
+ ? (SpecificData.Array>) a2
+ : null);
+ for (; 0 < size2; size2 = in.arrayNext()) {
+ for (; size2 != 0; size2--) {
+ java.util.List e2 = (ga2 != null ? ga2.peek() : null);
+ long size3 = in.readArrayStart();
+ java.util.List a3 = e2;
+ if (a3 == null) {
+ a3 = new SpecificData.Array((int) size3, SCHEMA$.getField("nested_arr").schema().getElementType());
+ e2 = a3;
+ } else
+ a3.clear();
+ SpecificData.Array ga3 = (a3 instanceof SpecificData.Array ? (SpecificData.Array) a3 : null);
+ for (; 0 < size3; size3 = in.arrayNext()) {
+ for (; size3 != 0; size3--) {
+ String e3 = (ga3 != null ? ga3.peek() : null);
+ e3 = in.readString();
+ a3.add(e3);
+ }
+ }
+ a2.add(e2);
+ }
+ }
+
+ long size4 = in.readMapStart();
+ java.util.Map> m4 = this.nested_map; // Need fresh name due to limitation of
+ // macro system
+ if (m4 == null) {
+ m4 = new java.util.HashMap>((int) size4);
+ this.nested_map = m4;
+ } else
+ m4.clear();
+ for (; 0 < size4; size4 = in.mapNext()) {
+ for (; size4 != 0; size4--) {
+ String k4 = null;
+ k4 = in.readString();
+ java.util.Map v4 = null;
+ long size5 = in.readMapStart();
+ java.util.Map m5 = v4; // Need fresh name due to limitation of macro system
+ if (m5 == null) {
+ m5 = new java.util.HashMap((int) size5);
+ v4 = m5;
+ } else
+ m5.clear();
+ for (; 0 < size5; size5 = in.mapNext()) {
+ for (; size5 != 0; size5--) {
+ String k5 = null;
+ k5 = in.readString();
+ Long v5 = null;
+ v5 = in.readLong();
+ m5.put(k5, v5);
+ }
+ }
+ m4.put(k4, v4);
+ }
+ }
+
+ } else {
+ for (int i = 0; i < 4; i++) {
+ switch (fieldOrder[i].pos()) {
+ case 0:
+ long size0 = in.readArrayStart();
+ java.util.List a0 = this.arr;
+ if (a0 == null) {
+ a0 = new SpecificData.Array((int) size0, SCHEMA$.getField("arr").schema());
+ this.arr = a0;
+ } else
+ a0.clear();
+ SpecificData.Array ga0 = (a0 instanceof SpecificData.Array ? (SpecificData.Array) a0 : null);
+ for (; 0 < size0; size0 = in.arrayNext()) {
+ for (; size0 != 0; size0--) {
+ String e0 = (ga0 != null ? ga0.peek() : null);
+ e0 = in.readString();
+ a0.add(e0);
+ }
+ }
+ break;
+
+ case 1:
+ long size1 = in.readMapStart();
+ java.util.Map m1 = this.map; // Need fresh name due to limitation of macro system
+ if (m1 == null) {
+ m1 = new java.util.HashMap((int) size1);
+ this.map = m1;
+ } else
+ m1.clear();
+ for (; 0 < size1; size1 = in.mapNext()) {
+ for (; size1 != 0; size1--) {
+ String k1 = null;
+ k1 = in.readString();
+ Long v1 = null;
+ v1 = in.readLong();
+ m1.put(k1, v1);
+ }
+ }
+ break;
+
+ case 2:
+ long size2 = in.readArrayStart();
+ java.util.List> a2 = this.nested_arr;
+ if (a2 == null) {
+ a2 = new SpecificData.Array>((int) size2, SCHEMA$.getField("nested_arr").schema());
+ this.nested_arr = a2;
+ } else
+ a2.clear();
+ SpecificData.Array> ga2 = (a2 instanceof SpecificData.Array
+ ? (SpecificData.Array>) a2
+ : null);
+ for (; 0 < size2; size2 = in.arrayNext()) {
+ for (; size2 != 0; size2--) {
+ java.util.List e2 = (ga2 != null ? ga2.peek() : null);
+ long size3 = in.readArrayStart();
+ java.util.List a3 = e2;
+ if (a3 == null) {
+ a3 = new SpecificData.Array((int) size3,
+ SCHEMA$.getField("nested_arr").schema().getElementType());
+ e2 = a3;
+ } else
+ a3.clear();
+ SpecificData.Array ga3 = (a3 instanceof SpecificData.Array ? (SpecificData.Array) a3
+ : null);
+ for (; 0 < size3; size3 = in.arrayNext()) {
+ for (; size3 != 0; size3--) {
+ String e3 = (ga3 != null ? ga3.peek() : null);
+ e3 = in.readString();
+ a3.add(e3);
+ }
+ }
+ a2.add(e2);
+ }
+ }
+ break;
+
+ case 3:
+ long size4 = in.readMapStart();
+ java.util.Map> m4 = this.nested_map; // Need fresh name due to limitation
+ // of macro system
+ if (m4 == null) {
+ m4 = new java.util.HashMap>((int) size4);
+ this.nested_map = m4;
+ } else
+ m4.clear();
+ for (; 0 < size4; size4 = in.mapNext()) {
+ for (; size4 != 0; size4--) {
+ String k4 = null;
+ k4 = in.readString();
+ java.util.Map v4 = null;
+ long size5 = in.readMapStart();
+ java.util.Map m5 = v4; // Need fresh name due to limitation of macro system
+ if (m5 == null) {
+ m5 = new java.util.HashMap((int) size5);
+ v4 = m5;
+ } else
+ m5.clear();
+ for (; 0 < size5; size5 = in.mapNext()) {
+ for (; size5 != 0; size5--) {
+ String k5 = null;
+ k5 = in.readString();
+ Long v5 = null;
+ v5 = in.readLong();
+ m5.put(k5, v5);
+ }
+ }
+ m4.put(k4, v4);
+ }
+ }
+ break;
+
+ default:
+ throw new java.io.IOException("Corrupt ResolvingDecoder.");
+ }
+ }
+ }
+ }
+}
diff --git a/lang/java/avro/src/test/resources/TestRecordWithMapsAndArrays.avsc b/lang/java/avro/src/test/resources/TestRecordWithMapsAndArrays.avsc
new file mode 100644
index 00000000000..d19c0d8dfb9
--- /dev/null
+++ b/lang/java/avro/src/test/resources/TestRecordWithMapsAndArrays.avsc
@@ -0,0 +1,47 @@
+{
+ "type": "record",
+ "name": "TestRecordWithMapsAndArrays",
+ "namespace": "org.apache.avro.specific",
+ "fields": [
+ {
+ "name": "arr",
+ "type": {
+ "type": "array",
+ "items": "string",
+ "default": []
+ }
+ },
+ {
+ "name": "map",
+ "type": {
+ "type": "map",
+ "values": "long",
+ "default": {}
+ }
+ },
+ {
+ "name": "nested_arr",
+ "type": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": "string",
+ "default": []
+ },
+ "default": []
+ }
+ },
+ {
+ "name": "nested_map",
+ "type": {
+ "type": "map",
+ "values": {
+ "type": "map",
+ "values": "long",
+ "default": {}
+ },
+ "default": {}
+ }
+ }
+ ]
+}