Skip to content

Commit

Permalink
AVRO-3876: test & fix encoder
Browse files Browse the repository at this point in the history
  • Loading branch information
clesaec committed Sep 29, 2023
1 parent dd37e64 commit 277334e
Show file tree
Hide file tree
Showing 3 changed files with 52 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ public void writeLong(long n) throws IOException {
@Override
public void writeFloat(float f) throws IOException {
parser.advance(Symbol.FLOAT);
out.writeNumber(f);
out.writeNumber(f + 0d);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,10 @@
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.Schema;
import org.apache.avro.SystemLimitException;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.util.ByteBufferInputStream;
import org.apache.avro.util.ByteBufferOutputStream;
import org.apache.avro.util.RandomData;
Expand All @@ -43,6 +45,7 @@
import java.util.Arrays;

import static org.apache.avro.TestSystemLimitException.*;
import static org.junit.jupiter.api.Assertions.assertEquals;

public class TestBinaryDecoder {
// prime number buffer size so that looping tests hit the buffer edge
Expand Down Expand Up @@ -682,4 +685,26 @@ void eof(boolean useDirect) throws IOException {
Assertions.assertThrows(EOFException.class, () -> d.readInt());
}

@Test
void testFloatPrecision() throws Exception {
String def = "{\"type\":\"record\",\"name\":\"X\",\"fields\":" + "[{\"type\":\"float\",\"name\":\"n\"}]}";
Schema schema = new Schema.Parser().parse(def);
DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);

float value = 33.33000183105469f;

GenericData.Record record = new GenericData.Record(schema);
record.put(0, value);
ByteArrayOutputStream out = new ByteArrayOutputStream();
Encoder encoder = EncoderFactory.get().directBinaryEncoder(out, null);

DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(schema);
writer.write(record, encoder);
encoder.flush();

Decoder decoder = DecoderFactory.get().directBinaryDecoder(new ByteArrayInputStream(out.toByteArray()), null);
GenericRecord r = reader.read(null, decoder);
assertEquals(value + 0d, ((float) r.get("n")) + 0d);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,15 @@
import org.apache.avro.AvroTypeException;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;

import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;

import java.io.ByteArrayOutputStream;
import java.io.IOException;

public class TestJsonDecoder {
Expand Down Expand Up @@ -66,6 +69,29 @@ private void checkNumeric(String type, Object value) throws Exception {
}
}

@Test
void testFloatPrecision() throws Exception {
String def = "{\"type\":\"record\",\"name\":\"X\",\"fields\":" + "[{\"type\":\"float\",\"name\":\"n\"}]}";
Schema schema = new Schema.Parser().parse(def);
DatumReader<GenericRecord> reader = new GenericDatumReader<>(schema);

float value = 33.33000183105469f;
GenericData.Record record = new GenericData.Record(schema);
record.put(0, value);
ByteArrayOutputStream out = new ByteArrayOutputStream();
JsonEncoder encoder = EncoderFactory.get().jsonEncoder(schema, out);

DatumWriter<GenericRecord> writer = new GenericDatumWriter<>(schema);
writer.write(record, encoder);
encoder.flush();
// check the whole float precision is kept.
assertEquals("{\"n\":33.33000183105469}", out.toString());

Decoder decoder = DecoderFactory.get().jsonDecoder(schema, out.toString());
GenericRecord r = reader.read(null, decoder);
assertEquals(value + 0d, ((float) r.get("n")) + 0d);
}

// Ensure that even if the order of fields in JSON is different from the order
// in schema,
// it works.
Expand Down

0 comments on commit 277334e

Please sign in to comment.