Skip to content

Commit

Permalink
Fix unit tests
Browse files Browse the repository at this point in the history
  • Loading branch information
jphalip committed Feb 23, 2024
1 parent 8075685 commit 9f4569f
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,12 @@ public class TestUtils {

public static String HIVE_INGESTION_TIME_PARTITIONED_PROPS = "'bq.time.partition.type'='DAY'";

public static Configuration getMockHadoopConf() {
Configuration conf = new Configuration();
conf.set(MRJobConfig.WORKFLOW_ID, "mock.mr.workflow.id.1234");
return conf;
}

/** Return Hive config values passed from system properties */
public static Map<String, String> getHiveConfSystemOverrides() {
Map<String, String> overrides = new HashMap<>();
Expand All @@ -173,28 +179,26 @@ public static Map<String, String> getHiveConfSystemOverrides() {
}

private static com.google.auth.Credentials getCredentials() {
Configuration config = new Configuration();
config.set(MRJobConfig.WORKFLOW_ID, "mock.mr.workflow.id.1234");
Configuration conf = getMockHadoopConf();
Map<String, String> hiveConfSystemOverrides = getHiveConfSystemOverrides();
for (String key : hiveConfSystemOverrides.keySet()) {
config.set(key, hiveConfSystemOverrides.get(key));
conf.set(key, hiveConfSystemOverrides.get(key));
}
Injector injector =
Guice.createInjector(new BigQueryClientModule(), new HiveBigQueryConnectorModule(config));
Guice.createInjector(new BigQueryClientModule(), new HiveBigQueryConnectorModule(conf));
BigQueryCredentialsSupplier credentialsSupplier =
injector.getInstance(BigQueryCredentialsSupplier.class);
return credentialsSupplier.getCredentials();
}

public static BigQueryClient getBigqueryClient() {
Configuration config = new Configuration();
config.set(MRJobConfig.WORKFLOW_ID, "mock.mr.workflow.id.1234");
Configuration conf = getMockHadoopConf();
Map<String, String> hiveConfSystemOverrides = getHiveConfSystemOverrides();
for (String key : hiveConfSystemOverrides.keySet()) {
config.set(key, hiveConfSystemOverrides.get(key));
conf.set(key, hiveConfSystemOverrides.get(key));
}
Injector injector =
Guice.createInjector(new BigQueryClientModule(), new HiveBigQueryConnectorModule(config));
Guice.createInjector(new BigQueryClientModule(), new HiveBigQueryConnectorModule(conf));
return injector.getInstance(BigQueryClient.class);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import com.google.cloud.bigquery.TableId;
import com.google.cloud.hive.bigquery.connector.JobDetails;
import com.google.cloud.hive.bigquery.connector.TestUtils;
import com.google.cloud.hive.bigquery.connector.output.WriterRegistry;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
Expand All @@ -46,32 +47,29 @@ public void testTableIdPrefix() {

@Test
public void testGetWorkDir() {
Configuration conf = new Configuration();
conf.set("hive.query.id", "query123");
Configuration conf = TestUtils.getMockHadoopConf();
conf.set("hadoop.tmp.dir", "/tmp");
Path path = JobUtils.getQueryWorkDir(conf);
assertEquals("/tmp/bq-hive-hive-query-id-query123", path.toString());
assertEquals("/tmp/bq-hive-mock.mr.workflow.id.1234", path.toString());
conf.set("bq.work.dir.parent.path", "/my/workdir");
path = JobUtils.getQueryWorkDir(conf);
assertEquals("/my/workdir/bq-hive-hive-query-id-query123", path.toString());
assertEquals("/my/workdir/bq-hive-mock.mr.workflow.id.1234", path.toString());
}

@Test
public void testGetJobDetailsFilePath() {
Configuration conf = new Configuration();
conf.set("hive.query.id", "query123");
Configuration conf = TestUtils.getMockHadoopConf();
conf.set("hadoop.tmp.dir", "/tmp");
String hmsDbTable = "default.mytable";
Path jobDetailsFilePath = JobUtils.getJobDetailsFilePath(conf, hmsDbTable);
assertEquals(
"/tmp/bq-hive-hive-query-id-query123/default.mytable/job-details.json",
"/tmp/bq-hive-mock.mr.workflow.id.1234/default.mytable/job-details.json",
jobDetailsFilePath.toString());
}

@Test
public void testGetTaskWriterOutputFile() {
Configuration conf = new Configuration();
conf.set("hive.query.id", "query123");
Configuration conf = TestUtils.getMockHadoopConf();
conf.set("hadoop.tmp.dir", "/hadoop-tmp/");
JobDetails jobDetails = new JobDetails();
jobDetails.setTableProperties(new Properties());
Expand All @@ -82,7 +80,7 @@ public void testGetTaskWriterOutputFile() {
String writerId = WriterRegistry.getWriterId();
Path path = JobUtils.getTaskWriterOutputFile(conf, jobDetails, taskAttemptID, writerId, "jpeg");
String pattern =
"^/hadoop-tmp/bq-hive-hive-query-id-query123/default.mytable/myproject_mydataset_mytable_abcd1234_w\\d+\\.jpeg";
"^/hadoop-tmp/bq-hive-mock.mr.workflow.id.1234/default.mytable/myproject_mydataset_mytable_abcd1234_w\\d+\\.jpeg";
assertThat(path.toString(), matchesPattern(pattern));
}
}

0 comments on commit 9f4569f

Please sign in to comment.