Skip to content

Commit

Permalink
AVRO-3863: [Java] Delete temporary test data after tests finish (#2506)
Browse files Browse the repository at this point in the history
* AVRO-3863: [Java] Delete temporary test data after tests finish

* Use @tempdir

* Align Java naming convention
  • Loading branch information
sarutak authored Sep 25, 2023
1 parent 453a1f5 commit 9ed7379
Show file tree
Hide file tree
Showing 4 changed files with 30 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,12 @@
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericDatumWriter;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;

@SuppressWarnings("restriction")
public class TestDataFileReader {
@TempDir
public Path dataDir;

// regression test for bug AVRO-2286
@Test
Expand Down Expand Up @@ -90,7 +93,7 @@ void throttledInputStream() throws IOException {
Schema legacySchema = new Schema.Parser(Schema.NameValidator.NO_VALIDATION).setValidateDefaults(false)
.parse("{\"type\": \"record\", \"name\": \"TestSchema\", \"fields\": "
+ "[ {\"name\": \"id\", \"type\": [\"long\", \"null\"], \"default\": null}]}");
File f = Files.createTempFile("testThrottledInputStream", ".avro").toFile();
File f = dataDir.resolve("testThrottledInputStream.avro").toFile();
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
w.create(legacySchema, f);
w.flush();
Expand Down Expand Up @@ -149,7 +152,7 @@ void inputStreamEOF() throws IOException {
Schema legacySchema = new Schema.Parser(Schema.NameValidator.NO_VALIDATION).setValidateDefaults(false)
.parse("{\"type\": \"record\", \"name\": \"TestSchema\", \"fields\": "
+ "[ {\"name\": \"id\", \"type\": [\"long\", \"null\"], \"default\": null}]}");
File f = Files.createTempFile("testInputStreamEOF", ".avro").toFile();
File f = dataDir.resolve("testInputStreamEOF.avro").toFile();
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
w.create(legacySchema, f);
w.flush();
Expand Down Expand Up @@ -200,7 +203,7 @@ void ignoreSchemaValidationOnRead() throws IOException {
+ "[ {\"name\": \"id\", \"type\": [\"long\", \"null\"], \"default\": null}]}");

// Create a file with the legacy schema.
File f = Files.createTempFile("testIgnoreSchemaValidationOnRead", ".avro").toFile();
File f = dataDir.resolve("testIgnoreSchemaValidationOnRead.avro").toFile();
try (DataFileWriter<?> w = new DataFileWriter<>(new GenericDatumWriter<>())) {
w.create(legacySchema, f);
w.flush();
Expand All @@ -214,7 +217,7 @@ void ignoreSchemaValidationOnRead() throws IOException {

@Test
void invalidMagicLength() throws IOException {
File f = Files.createTempFile("testInvalidMagicLength", ".avro").toFile();
File f = dataDir.resolve("testInvalidMagicLength.avro").toFile();
try (FileWriter w = new FileWriter(f)) {
w.write("-");
}
Expand All @@ -226,7 +229,7 @@ void invalidMagicLength() throws IOException {

@Test
void invalidMagicBytes() throws IOException {
File f = Files.createTempFile("testInvalidMagicBytes", ".avro").toFile();
File f = dataDir.resolve("testInvalidMagicBytes.avro").toFile();
try (FileWriter w = new FileWriter(f)) {
w.write("invalid");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public class TestEncoders {
private static EncoderFactory factory = EncoderFactory.get();

@TempDir
public File DIR;
public Path dataDir;

@Test
void binaryEncoderInit() throws IOException {
Expand Down Expand Up @@ -261,7 +261,7 @@ void arrayBackedByteBuffer() throws IOException {

@Test
void mappedByteBuffer() throws IOException {
Path file = Paths.get(DIR.getPath() + "testMappedByteBuffer.avro");
Path file = dataDir.resolve("testMappedByteBuffer.avro");
Files.write(file, someBytes(EXAMPLE_DATA_SIZE));
MappedByteBuffer buffer = FileChannel.open(file, StandardOpenOption.READ).map(FileChannel.MapMode.READ_ONLY, 0,
EXAMPLE_DATA_SIZE);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.PrintStream;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
Expand All @@ -37,14 +38,16 @@
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;

import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;

public class TestCreateRandomFileTool {
private static final String COUNT = System.getProperty("test.count", "200");
private static final File DIR = new File("/tmp");
private static final File OUT_FILE = new File(DIR, "random.avro");

@TempDir
private Path dataDir;
private static final File SCHEMA_FILE = new File("../../../share/test/schemas/weather.avsc");

private final Schema.Parser schemaParser = new Schema.Parser();
Expand Down Expand Up @@ -83,12 +86,13 @@ private int run(List<String> args) throws Exception {

private void check(String... extraArgs) throws Exception {
ArrayList<String> args = new ArrayList<>();
args.addAll(Arrays.asList(OUT_FILE.toString(), "--count", COUNT, "--schema-file", SCHEMA_FILE.toString(), "--seed",
File outFile = dataDir.resolve("random.avro").toFile();
args.addAll(Arrays.asList(outFile.toString(), "--count", COUNT, "--schema-file", SCHEMA_FILE.toString(), "--seed",
Long.toString(SEED)));
args.addAll(Arrays.asList(extraArgs));
run(args);

DataFileReader<Object> reader = new DataFileReader<>(OUT_FILE, new GenericDatumReader<>());
DataFileReader<Object> reader = new DataFileReader<>(outFile, new GenericDatumReader<>());

Iterator<Object> found = reader.iterator();
for (Object expected : new RandomData(schemaParser.parse(SCHEMA_FILE), Integer.parseInt(COUNT), SEED))
Expand All @@ -99,8 +103,9 @@ private void check(String... extraArgs) throws Exception {

private void checkMissingCount(String... extraArgs) throws Exception {
ArrayList<String> args = new ArrayList<>();
File outFile = dataDir.resolve("random.avro").toFile();
args.addAll(
Arrays.asList(OUT_FILE.toString(), "--schema-file", SCHEMA_FILE.toString(), "--seed", Long.toString(SEED)));
Arrays.asList(outFile.toString(), "--schema-file", SCHEMA_FILE.toString(), "--seed", Long.toString(SEED)));
args.addAll(Arrays.asList(extraArgs));
run(args);
assertTrue(err.toString().contains("Need count (--count)"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.PrintStream;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.Iterator;

Expand All @@ -31,14 +32,15 @@
import org.apache.avro.util.RandomData;
import org.apache.trevni.avro.AvroColumnReader;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;

public class TestToTrevniTool {
private static final long SEED = System.currentTimeMillis();

private static final int COUNT = Integer.parseInt(System.getProperty("test.count", "200"));
private static final File DIR = new File("/tmp");
private static final File AVRO_FILE = new File(DIR, "random.avro");
private static final File TREVNI_FILE = new File(DIR, "random.trv");

@TempDir
private Path dataDir;
private static final File SCHEMA_FILE = new File("../../../share/test/schemas/weather.avsc");

private String run(String... args) throws Exception {
Expand All @@ -53,14 +55,16 @@ void test() throws Exception {
Schema schema = new Schema.Parser().parse(SCHEMA_FILE);

DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>());
writer.create(schema, Util.createFromFS(AVRO_FILE.toString()));
File avroFile = dataDir.resolve("random.avro").toFile();
writer.create(schema, avroFile);
for (Object datum : new RandomData(schema, COUNT, SEED))
writer.append(datum);
writer.close();

run(AVRO_FILE.toString(), TREVNI_FILE.toString());
File trevniFile = dataDir.resolve("random.trv").toFile();
run(avroFile.toString(), trevniFile.toString());

AvroColumnReader<Object> reader = new AvroColumnReader<>(new AvroColumnReader.Params(TREVNI_FILE));
AvroColumnReader<Object> reader = new AvroColumnReader<>(new AvroColumnReader.Params(trevniFile));
Iterator<Object> found = reader.iterator();
for (Object expected : new RandomData(schema, COUNT, SEED))
assertEquals(expected, found.next());
Expand Down

0 comments on commit 9ed7379

Please sign in to comment.