Skip to content

Commit

Permalink
One more fix for pig query id
Browse files Browse the repository at this point in the history
  • Loading branch information
jphalip committed Mar 6, 2024
1 parent 015d2e7 commit fe0cfde
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.mapred.JobID;
import org.apache.hadoop.mapred.TaskAttemptID;
import org.apache.hadoop.mapreduce.MRJobConfig;

/**
* Helper class that looks up details about a task ID and Tez Vertex ID. This is useful to create
Expand Down Expand Up @@ -50,21 +49,13 @@ public static String getQueryId(Configuration conf) {
String hiveQueryId = conf.get(ConfVars.HIVEQUERYID.varname);

Check warning on line 49 in hive-bigquery-connector-common/src/main/java/com/google/cloud/hive/bigquery/connector/utils/hive/HiveUtils.java

View check run for this annotation

Codecov / codecov/patch

hive-bigquery-connector-common/src/main/java/com/google/cloud/hive/bigquery/connector/utils/hive/HiveUtils.java#L49

Added line #L49 was not covered by tests
if (hiveQueryId != null) {
// In this case, the user is running a plain Hive query directly from Hive itself.
// Return the Hive query's id instead of the mapreduce workflow id, as the latter is not yet
// set when `BigQueryStorageHandlerBase.configureOutputJobProperties()` is called by the
// Hive driver.
return "hive-query-id-" + hiveQueryId;

Check warning on line 52 in hive-bigquery-connector-common/src/main/java/com/google/cloud/hive/bigquery/connector/utils/hive/HiveUtils.java

View check run for this annotation

Codecov / codecov/patch

hive-bigquery-connector-common/src/main/java/com/google/cloud/hive/bigquery/connector/utils/hive/HiveUtils.java#L52

Added line #L52 was not covered by tests
}
if (conf.get("pig.script.id") != null) {
// In this case, the user is running a Hive query from Pig.
// Return the mapreduce workflow id instead of the Pig script id because multiple mapreduce
// jobs could potentially be run as part of the same Pig script.
String workflowId = conf.get(MRJobConfig.WORKFLOW_ID, "");
if (workflowId.equals("")) {
throw new RuntimeException(
"No mapreduce workflow id found in Hadoop conf for this Pig job");
}
return workflowId;
// The user is running a Hive query from Pig. Use the job's timestamp as a pig script might
// run multiple jobs.
return String.format(
"pig-%s-%s", conf.get("pig.script.id"), conf.get("pig.job.submitted.timestamp"));

Check warning on line 58 in hive-bigquery-connector-common/src/main/java/com/google/cloud/hive/bigquery/connector/utils/hive/HiveUtils.java

View check run for this annotation

Codecov / codecov/patch

hive-bigquery-connector-common/src/main/java/com/google/cloud/hive/bigquery/connector/utils/hive/HiveUtils.java#L57-L58

Added lines #L57 - L58 were not covered by tests
}
throw new RuntimeException("No query id found in Hadoop conf");

Check warning on line 60 in hive-bigquery-connector-common/src/main/java/com/google/cloud/hive/bigquery/connector/utils/hive/HiveUtils.java

View check run for this annotation

Codecov / codecov/patch

hive-bigquery-connector-common/src/main/java/com/google/cloud/hive/bigquery/connector/utils/hive/HiveUtils.java#L60

Added line #L60 was not covered by tests
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,7 @@ public void testHiveQueryId() {
public void testPigQueryId() {
Configuration conf = new Configuration();
conf.set("pig.script.id", "abcd");
Throwable exception = assertThrows(RuntimeException.class, () -> HiveUtils.getQueryId(conf));
assertEquals(
"No mapreduce workflow id found in Hadoop conf for this Pig job", exception.getMessage());
conf.set("mapreduce.workflow.id", "xyz");
assertEquals("xyz", HiveUtils.getQueryId(conf));
conf.set("pig.job.submitted.timestamp", "123456789");
assertEquals("pig-abcd-123456789", HiveUtils.getQueryId(conf));
}
}

0 comments on commit fe0cfde

Please sign in to comment.