Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

AVRO-3863: [Java] Delete temporary test data after tests finish #2506

Merged
merged 3 commits into from
Sep 25, 2023
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,12 @@
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;

@SuppressWarnings("restriction")
public class TestDataFileReader {
@TempDir
public Path DATA_DIR;
Copy link
Contributor

@paliwalashish paliwalashish Sep 24, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we relook at the variable name once please? This format used is for constants, giving an impression that it is static final. Same for similar name in other classes


// regression test for bug AVRO-2286
@Test
Expand Down Expand Up @@ -90,7 +93,7 @@ void throttledInputStream() throws IOException {
Schema legacySchema = new Schema.Parser(Schema.NameValidator.NO_VALIDATION).setValidateDefaults(false)
.parse("{\"type\": \"record\", \"name\": \"TestSchema\", \"fields\": "
+ "[ {\"name\": \"id\", \"type\": [\"long\", \"null\"], \"default\": null}]}");
File f = Files.createTempFile("testThrottledInputStream", ".avro").toFile();
File f = DATA_DIR.resolve("testThrottledInputStream.avro").toFile();
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
w.create(legacySchema, f);
w.flush();
Expand Down Expand Up @@ -149,7 +152,7 @@ void inputStreamEOF() throws IOException {
Schema legacySchema = new Schema.Parser(Schema.NameValidator.NO_VALIDATION).setValidateDefaults(false)
.parse("{\"type\": \"record\", \"name\": \"TestSchema\", \"fields\": "
+ "[ {\"name\": \"id\", \"type\": [\"long\", \"null\"], \"default\": null}]}");
File f = Files.createTempFile("testInputStreamEOF", ".avro").toFile();
File f = DATA_DIR.resolve("testInputStreamEOF.avro").toFile();
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
w.create(legacySchema, f);
w.flush();
Expand Down Expand Up @@ -200,7 +203,7 @@ void ignoreSchemaValidationOnRead() throws IOException {
+ "[ {\"name\": \"id\", \"type\": [\"long\", \"null\"], \"default\": null}]}");

// Create a file with the legacy schema.
File f = Files.createTempFile("testIgnoreSchemaValidationOnRead", ".avro").toFile();
File f = DATA_DIR.resolve("testIgnoreSchemaValidationOnRead.avro").toFile();
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
w.create(legacySchema, f);
w.flush();
Expand All @@ -214,7 +217,7 @@ void ignoreSchemaValidationOnRead() throws IOException {

@Test
void invalidMagicLength() throws IOException {
File f = Files.createTempFile("testInvalidMagicLength", ".avro").toFile();
File f = DATA_DIR.resolve("testInvalidMagicLength.avro").toFile();
try (FileWriter w = new FileWriter(f)) {
w.write("-");
}
Expand All @@ -226,7 +229,7 @@ void invalidMagicLength() throws IOException {

@Test
void invalidMagicBytes() throws IOException {
File f = Files.createTempFile("testInvalidMagicBytes", ".avro").toFile();
File f = DATA_DIR.resolve("testInvalidMagicBytes.avro").toFile();
try (FileWriter w = new FileWriter(f)) {
w.write("invalid");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public class TestEncoders {
private static EncoderFactory factory = EncoderFactory.get();

@TempDir
public File DIR;
public Path DIR;

@Test
void binaryEncoderInit() throws IOException {
Expand Down Expand Up @@ -261,7 +261,7 @@ void arrayBackedByteBuffer() throws IOException {

@Test
void mappedByteBuffer() throws IOException {
Path file = Paths.get(DIR.getPath() + "testMappedByteBuffer.avro");
Path file = DIR.resolve("testMappedByteBuffer.avro");
Files.write(file, someBytes(EXAMPLE_DATA_SIZE));
MappedByteBuffer buffer = FileChannel.open(file, StandardOpenOption.READ).map(FileChannel.MapMode.READ_ONLY, 0,
EXAMPLE_DATA_SIZE);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.PrintStream;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
Expand All @@ -37,14 +38,16 @@
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;

import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;

public class TestCreateRandomFileTool {
private static final String COUNT = System.getProperty("test.count", "200");
private static final File DIR = new File("/tmp");
private static final File OUT_FILE = new File(DIR, "random.avro");

@TempDir
private Path DIR;
private static final File SCHEMA_FILE = new File("../../../share/test/schemas/weather.avsc");

private final Schema.Parser schemaParser = new Schema.Parser();
Expand Down Expand Up @@ -83,12 +86,13 @@ private int run(List<String> args) throws Exception {

private void check(String... extraArgs) throws Exception {
ArrayList<String> args = new ArrayList<>();
args.addAll(Arrays.asList(OUT_FILE.toString(), "--count", COUNT, "--schema-file", SCHEMA_FILE.toString(), "--seed",
File outFile = DIR.resolve("random.avro").toFile();
args.addAll(Arrays.asList(outFile.toString(), "--count", COUNT, "--schema-file", SCHEMA_FILE.toString(), "--seed",
Long.toString(SEED)));
args.addAll(Arrays.asList(extraArgs));
run(args);

DataFileReader<Object> reader = new DataFileReader<>(OUT_FILE, new GenericDatumReader<>());
DataFileReader<Object> reader = new DataFileReader<>(outFile, new GenericDatumReader<>());

Iterator<Object> found = reader.iterator();
for (Object expected : new RandomData(schemaParser.parse(SCHEMA_FILE), Integer.parseInt(COUNT), SEED))
Expand All @@ -99,8 +103,9 @@ private void check(String... extraArgs) throws Exception {

private void checkMissingCount(String... extraArgs) throws Exception {
ArrayList<String> args = new ArrayList<>();
File outFile = DIR.resolve("random.avro").toFile();
args.addAll(
Arrays.asList(OUT_FILE.toString(), "--schema-file", SCHEMA_FILE.toString(), "--seed", Long.toString(SEED)));
Arrays.asList(outFile.toString(), "--schema-file", SCHEMA_FILE.toString(), "--seed", Long.toString(SEED)));
args.addAll(Arrays.asList(extraArgs));
run(args);
assertTrue(err.toString().contains("Need count (--count)"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.PrintStream;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Iterator;

Expand All @@ -31,14 +32,15 @@
import org.apache.avro.util.RandomData;
import org.apache.trevni.avro.AvroColumnReader;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;

public class TestToTrevniTool {
private static final long SEED = System.currentTimeMillis();

private static final int COUNT = Integer.parseInt(System.getProperty("test.count", "200"));
private static final File DIR = new File("/tmp");
private static final File AVRO_FILE = new File(DIR, "random.avro");
private static final File TREVNI_FILE = new File(DIR, "random.trv");

@TempDir
private Path DIR;
private static final File SCHEMA_FILE = new File("../../../share/test/schemas/weather.avsc");

private String run(String... args) throws Exception {
Expand All @@ -53,14 +55,16 @@ void test() throws Exception {
Schema schema = new Schema.Parser().parse(SCHEMA_FILE);

DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>());
writer.create(schema, Util.createFromFS(AVRO_FILE.toString()));
File avroFile = DIR.resolve("random.avro").toFile();
writer.create(schema, avroFile);
for (Object datum : new RandomData(schema, COUNT, SEED))
writer.append(datum);
writer.close();

run(AVRO_FILE.toString(), TREVNI_FILE.toString());
File trevniFile = DIR.resolve("random.trv").toFile();
run(avroFile.toString(), trevniFile.toString());

AvroColumnReader<Object> reader = new AvroColumnReader<>(new AvroColumnReader.Params(TREVNI_FILE));
AvroColumnReader<Object> reader = new AvroColumnReader<>(new AvroColumnReader.Params(trevniFile));
Iterator<Object> found = reader.iterator();
for (Object expected : new RandomData(schema, COUNT, SEED))
assertEquals(expected, found.next());
Expand Down
Loading