From ef63a5a825936fdae9cf8d12fd2e96421752a26a Mon Sep 17 00:00:00 2001 From: Dain Sundstrom Date: Sat, 11 Nov 2023 18:13:48 -0800 Subject: [PATCH] Add TracingHiveMetastore --- .../deltalake/TestDeltaLakeMetadata.java | 2 + plugin/trino-hive/pom.xml | 16 +- .../DecoratedHiveMetastoreModule.java | 4 +- .../metastore/HiveMetastoreDecorator.java | 3 +- .../tracing/MetastoreAttributes.java | 35 + .../hive/metastore/tracing/Tracing.java | 62 ++ .../tracing/TracingHiveMetastore.java | 734 ++++++++++++++++++ .../TracingHiveMetastoreDecorator.java | 45 ++ 8 files changed, 893 insertions(+), 8 deletions(-) create mode 100644 plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/MetastoreAttributes.java create mode 100644 plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/Tracing.java create mode 100644 plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/TracingHiveMetastore.java create mode 100644 plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/TracingHiveMetastoreDecorator.java diff --git a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeMetadata.java b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeMetadata.java index 1ab69d381f78..8b006a851dc3 100644 --- a/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeMetadata.java +++ b/plugin/trino-delta-lake/src/test/java/io/trino/plugin/deltalake/TestDeltaLakeMetadata.java @@ -22,6 +22,7 @@ import com.google.inject.Scopes; import io.airlift.bootstrap.Bootstrap; import io.airlift.json.JsonModule; +import io.opentelemetry.api.trace.Tracer; import io.trino.filesystem.TrinoFileSystemFactory; import io.trino.filesystem.hdfs.HdfsFileSystemFactory; import io.trino.hdfs.HdfsEnvironment; @@ -194,6 +195,7 @@ public void setUp() binder.bind(TypeManager.class).toInstance(context.getTypeManager()); binder.bind(NodeManager.class).toInstance(context.getNodeManager()); binder.bind(PageIndexerFactory.class).toInstance(context.getPageIndexerFactory()); + binder.bind(Tracer.class).toInstance(context.getTracer()); }, // connector modules new DeltaLakeMetastoreModule(), diff --git a/plugin/trino-hive/pom.xml b/plugin/trino-hive/pom.xml index 144933e8198b..f844ae68cb26 100644 --- a/plugin/trino-hive/pom.xml +++ b/plugin/trino-hive/pom.xml @@ -126,11 +126,21 @@ opentelemetry-api + + io.opentelemetry + opentelemetry-context + + io.opentelemetry.instrumentation opentelemetry-aws-sdk-1.11 + + io.opentelemetry.semconv + opentelemetry-semconv + + io.trino trino-cache @@ -283,12 +293,6 @@ runtime - - io.opentelemetry - opentelemetry-context - runtime - - jakarta.xml.bind jakarta.xml.bind-api diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/DecoratedHiveMetastoreModule.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/DecoratedHiveMetastoreModule.java index 262086727b39..9427bfeebd19 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/DecoratedHiveMetastoreModule.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/DecoratedHiveMetastoreModule.java @@ -25,6 +25,7 @@ import io.trino.plugin.hive.metastore.cache.SharedHiveMetastoreCache.CachingHiveMetastoreFactory; import io.trino.plugin.hive.metastore.procedure.FlushMetadataCacheProcedure; import io.trino.plugin.hive.metastore.recording.RecordingHiveMetastoreDecoratorModule; +import io.trino.plugin.hive.metastore.tracing.TracingHiveMetastoreDecorator; import io.trino.spi.procedure.Procedure; import io.trino.spi.security.ConnectorIdentity; @@ -52,7 +53,8 @@ public DecoratedHiveMetastoreModule(boolean installFlushMetadataCacheProcedure) @Override protected void setup(Binder binder) { - newSetBinder(binder, HiveMetastoreDecorator.class); + newSetBinder(binder, HiveMetastoreDecorator.class) + .addBinding().to(TracingHiveMetastoreDecorator.class).in(Scopes.SINGLETON); install(new RecordingHiveMetastoreDecoratorModule()); configBinder(binder).bindConfig(CachingHiveMetastoreConfig.class); diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/HiveMetastoreDecorator.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/HiveMetastoreDecorator.java index 1ed3df5d3325..689bc695dbb0 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/HiveMetastoreDecorator.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/HiveMetastoreDecorator.java @@ -17,7 +17,8 @@ public interface HiveMetastoreDecorator { int PRIORITY_PARTITION_PROJECTION = 50; - int PRIORITY_RECORDING = 100; + int PRIORITY_TRACING = 100; + int PRIORITY_RECORDING = 200; int getPriority(); diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/MetastoreAttributes.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/MetastoreAttributes.java new file mode 100644 index 000000000000..237d0662e1d1 --- /dev/null +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/MetastoreAttributes.java @@ -0,0 +1,35 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.hive.metastore.tracing; + +import io.opentelemetry.api.common.AttributeKey; + +import static io.opentelemetry.api.common.AttributeKey.longKey; +import static io.opentelemetry.api.common.AttributeKey.stringKey; + +final class MetastoreAttributes +{ + private MetastoreAttributes() {} + + public static final AttributeKey SCHEMA = stringKey("trino.schema"); + public static final AttributeKey SCHEMA_RESPONSE_COUNT = longKey("trino.hive.response.schema_count"); + public static final AttributeKey TABLE = stringKey("trino.table"); + public static final AttributeKey TABLE_RESPONSE_COUNT = longKey("trino.hive.response.table_count"); + public static final AttributeKey PARTITION = stringKey("trino.partition"); + public static final AttributeKey FUNCTION = stringKey("trino.function"); + public static final AttributeKey FUNCTION_RESPONSE_COUNT = longKey("trino.hive.response.function_count"); + public static final AttributeKey ACID_TRANSACTION = stringKey("trino.hive.acid_transaction"); + public static final AttributeKey PARTITION_REQUEST_COUNT = longKey("trino.hive.request.partition_count"); + public static final AttributeKey PARTITION_RESPONSE_COUNT = longKey("trino.hive.response.partition_count"); +} diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/Tracing.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/Tracing.java new file mode 100644 index 000000000000..5c1148e9fd01 --- /dev/null +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/Tracing.java @@ -0,0 +1,62 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.hive.metastore.tracing; + +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; + +import static io.opentelemetry.api.trace.StatusCode.ERROR; +import static io.opentelemetry.semconv.SemanticAttributes.EXCEPTION_ESCAPED; + +final class Tracing +{ + private Tracing() {} + + public static void withTracing(Span span, CheckedRunnable runnable) + throws E + { + withTracing(span, () -> { + runnable.run(); + return null; + }); + } + + public static T withTracing(Span span, CheckedSupplier supplier) + throws E + { + try (var ignored = span.makeCurrent()) { + return supplier.get(); + } + catch (Throwable t) { + span.setStatus(ERROR, t.getMessage()); + span.recordException(t, Attributes.of(EXCEPTION_ESCAPED, true)); + throw t; + } + finally { + span.end(); + } + } + + public interface CheckedRunnable + { + void run() + throws E; + } + + public interface CheckedSupplier + { + T get() + throws E; + } +} diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/TracingHiveMetastore.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/TracingHiveMetastore.java new file mode 100644 index 000000000000..dea02fb2ffd1 --- /dev/null +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/TracingHiveMetastore.java @@ -0,0 +1,734 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.hive.metastore.tracing; + +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.trino.hive.thrift.metastore.DataOperationType; +import io.trino.plugin.hive.HiveColumnStatisticType; +import io.trino.plugin.hive.HivePartition; +import io.trino.plugin.hive.HiveType; +import io.trino.plugin.hive.PartitionStatistics; +import io.trino.plugin.hive.acid.AcidOperation; +import io.trino.plugin.hive.acid.AcidTransaction; +import io.trino.plugin.hive.metastore.AcidTransactionOwner; +import io.trino.plugin.hive.metastore.Database; +import io.trino.plugin.hive.metastore.HiveMetastore; +import io.trino.plugin.hive.metastore.HivePrincipal; +import io.trino.plugin.hive.metastore.HivePrivilegeInfo; +import io.trino.plugin.hive.metastore.Partition; +import io.trino.plugin.hive.metastore.PartitionWithStatistics; +import io.trino.plugin.hive.metastore.PrincipalPrivileges; +import io.trino.plugin.hive.metastore.Table; +import io.trino.spi.connector.SchemaTableName; +import io.trino.spi.function.LanguageFunction; +import io.trino.spi.predicate.TupleDomain; +import io.trino.spi.security.RoleGrant; +import io.trino.spi.type.Type; + +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.OptionalLong; +import java.util.Set; +import java.util.function.Function; + +import static io.trino.plugin.hive.metastore.tracing.MetastoreAttributes.ACID_TRANSACTION; +import static io.trino.plugin.hive.metastore.tracing.MetastoreAttributes.FUNCTION; +import static io.trino.plugin.hive.metastore.tracing.MetastoreAttributes.FUNCTION_RESPONSE_COUNT; +import static io.trino.plugin.hive.metastore.tracing.MetastoreAttributes.PARTITION; +import static io.trino.plugin.hive.metastore.tracing.MetastoreAttributes.PARTITION_REQUEST_COUNT; +import static io.trino.plugin.hive.metastore.tracing.MetastoreAttributes.PARTITION_RESPONSE_COUNT; +import static io.trino.plugin.hive.metastore.tracing.MetastoreAttributes.SCHEMA; +import static io.trino.plugin.hive.metastore.tracing.MetastoreAttributes.SCHEMA_RESPONSE_COUNT; +import static io.trino.plugin.hive.metastore.tracing.MetastoreAttributes.TABLE; +import static io.trino.plugin.hive.metastore.tracing.MetastoreAttributes.TABLE_RESPONSE_COUNT; +import static io.trino.plugin.hive.metastore.tracing.Tracing.withTracing; +import static java.util.Objects.requireNonNull; + +public class TracingHiveMetastore + implements HiveMetastore +{ + private final Tracer tracer; + private final HiveMetastore delegate; + + public TracingHiveMetastore(Tracer tracer, HiveMetastore delegate) + { + this.tracer = requireNonNull(tracer, "tracer is null"); + this.delegate = requireNonNull(delegate, "delegate is null"); + } + + @Override + public Optional getDatabase(String databaseName) + { + Span span = tracer.spanBuilder("HiveMetastore.getDatabase") + .setAttribute(SCHEMA, databaseName) + .startSpan(); + return withTracing(span, () -> delegate.getDatabase(databaseName)); + } + + @Override + public List getAllDatabases() + { + Span span = tracer.spanBuilder("HiveMetastore.getAllDatabases") + .startSpan(); + return withTracing(span, () -> { + List databases = delegate.getAllDatabases(); + span.setAttribute(SCHEMA_RESPONSE_COUNT, databases.size()); + return databases; + }); + } + + @Override + public Optional getTable(String databaseName, String tableName) + { + Span span = tracer.spanBuilder("HiveMetastore.getTable") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + return withTracing(span, () -> delegate.getTable(databaseName, tableName)); + } + + @Override + public Set getSupportedColumnStatistics(Type type) + { + // Tracing is not necessary + return delegate.getSupportedColumnStatistics(type); + } + + @Override + public PartitionStatistics getTableStatistics(Table table) + { + Span span = tracer.spanBuilder("HiveMetastore.getTableStatistics") + .setAttribute(SCHEMA, table.getDatabaseName()) + .setAttribute(TABLE, table.getTableName()) + .startSpan(); + return withTracing(span, () -> delegate.getTableStatistics(table)); + } + + @Override + public Map getPartitionStatistics(Table table, List partitions) + { + Span span = tracer.spanBuilder("HiveMetastore.getPartitionStatistics") + .setAttribute(SCHEMA, table.getDatabaseName()) + .setAttribute(TABLE, table.getTableName()) + .setAttribute(PARTITION_REQUEST_COUNT, (long) partitions.size()) + .startSpan(); + return withTracing(span, () -> { + Map partitionStatistics = delegate.getPartitionStatistics(table, partitions); + span.setAttribute(PARTITION_RESPONSE_COUNT, partitionStatistics.size()); + return partitionStatistics; + }); + } + + @Override + public void updateTableStatistics(String databaseName, String tableName, AcidTransaction transaction, Function update) + { + Span span = tracer.spanBuilder("HiveMetastore.updateTableStatistics") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + if (transaction.isAcidTransactionRunning()) { + span.setAttribute(ACID_TRANSACTION, String.valueOf(transaction.getAcidTransactionId())); + } + + withTracing(span, () -> delegate.updateTableStatistics(databaseName, tableName, transaction, update)); + } + + @Override + public void updatePartitionStatistics(Table table, String partitionName, Function update) + { + Span span = tracer.spanBuilder("HiveMetastore.updatePartitionStatistics") + .setAttribute(SCHEMA, table.getDatabaseName()) + .setAttribute(TABLE, table.getTableName()) + .setAttribute(PARTITION, partitionName) + .startSpan(); + withTracing(span, () -> delegate.updatePartitionStatistics(table, partitionName, update)); + } + + @Override + public void updatePartitionStatistics(Table table, Map> updates) + { + Span span = tracer.spanBuilder("HiveMetastore.updatePartitionStatistics") + .setAttribute(SCHEMA, table.getDatabaseName()) + .setAttribute(TABLE, table.getTableName()) + .startSpan(); + withTracing(span, () -> delegate.updatePartitionStatistics(table, updates)); + } + + @Override + public List getAllTables(String databaseName) + { + Span span = tracer.spanBuilder("HiveMetastore.getAllTables") + .setAttribute(SCHEMA, databaseName) + .startSpan(); + return withTracing(span, () -> { + List tables = delegate.getAllTables(databaseName); + span.setAttribute(TABLE_RESPONSE_COUNT, tables.size()); + return tables; + }); + } + + @Override + public Optional> getAllTables() + { + Span span = tracer.spanBuilder("HiveMetastore.getAllTables") + .startSpan(); + return withTracing(span, () -> { + Optional> tables = delegate.getAllTables(); + tables.ifPresent(list -> span.setAttribute(TABLE_RESPONSE_COUNT, list.size())); + return tables; + }); + } + + @Override + public List getTablesWithParameter(String databaseName, String parameterKey, String parameterValue) + { + Span span = tracer.spanBuilder("HiveMetastore.getTablesWithParameter") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, parameterKey) + .startSpan(); + return withTracing(span, () -> { + List tables = delegate.getTablesWithParameter(databaseName, parameterKey, parameterValue); + span.setAttribute(TABLE_RESPONSE_COUNT, tables.size()); + return tables; + }); + } + + @Override + public List getAllViews(String databaseName) + { + Span span = tracer.spanBuilder("HiveMetastore.getAllViews") + .setAttribute(SCHEMA, databaseName) + .startSpan(); + return withTracing(span, () -> { + List views = delegate.getAllViews(databaseName); + span.setAttribute(TABLE_RESPONSE_COUNT, views.size()); + return views; + }); + } + + @Override + public Optional> getAllViews() + { + Span span = tracer.spanBuilder("HiveMetastore.getAllViews") + .startSpan(); + return withTracing(span, () -> { + Optional> views = delegate.getAllViews(); + views.ifPresent(list -> span.setAttribute(TABLE_RESPONSE_COUNT, list.size())); + return views; + }); + } + + @Override + public void createDatabase(Database database) + { + Span span = tracer.spanBuilder("HiveMetastore.createDatabase") + .setAttribute(SCHEMA, database.getDatabaseName()) + .startSpan(); + withTracing(span, () -> delegate.createDatabase(database)); + } + + @Override + public void dropDatabase(String databaseName, boolean deleteData) + { + Span span = tracer.spanBuilder("HiveMetastore.dropDatabase") + .setAttribute(SCHEMA, databaseName) + .startSpan(); + withTracing(span, () -> delegate.dropDatabase(databaseName, deleteData)); + } + + @Override + public void renameDatabase(String databaseName, String newDatabaseName) + { + Span span = tracer.spanBuilder("HiveMetastore.renameDatabase") + .setAttribute(SCHEMA, databaseName) + .startSpan(); + withTracing(span, () -> delegate.renameDatabase(databaseName, newDatabaseName)); + } + + @Override + public void setDatabaseOwner(String databaseName, HivePrincipal principal) + { + Span span = tracer.spanBuilder("HiveMetastore.setDatabaseOwner") + .setAttribute(SCHEMA, databaseName) + .startSpan(); + withTracing(span, () -> delegate.setDatabaseOwner(databaseName, principal)); + } + + @Override + public void createTable(Table table, PrincipalPrivileges principalPrivileges) + { + Span span = tracer.spanBuilder("HiveMetastore.createTable") + .setAttribute(SCHEMA, table.getDatabaseName()) + .setAttribute(TABLE, table.getTableName()) + .startSpan(); + withTracing(span, () -> delegate.createTable(table, principalPrivileges)); + } + + @Override + public void dropTable(String databaseName, String tableName, boolean deleteData) + { + Span span = tracer.spanBuilder("HiveMetastore.dropTable") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + withTracing(span, () -> delegate.dropTable(databaseName, tableName, deleteData)); + } + + @Override + public void replaceTable(String databaseName, String tableName, Table newTable, PrincipalPrivileges principalPrivileges) + { + Span span = tracer.spanBuilder("HiveMetastore.replaceTable") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + withTracing(span, () -> delegate.replaceTable(databaseName, tableName, newTable, principalPrivileges)); + } + + @Override + public void renameTable(String databaseName, String tableName, String newDatabaseName, String newTableName) + { + Span span = tracer.spanBuilder("HiveMetastore.renameTable") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + withTracing(span, () -> delegate.renameTable(databaseName, tableName, newDatabaseName, newTableName)); + } + + @Override + public void commentTable(String databaseName, String tableName, Optional comment) + { + Span span = tracer.spanBuilder("HiveMetastore.commentTable") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + withTracing(span, () -> delegate.commentTable(databaseName, tableName, comment)); + } + + @Override + public void setTableOwner(String databaseName, String tableName, HivePrincipal principal) + { + Span span = tracer.spanBuilder("HiveMetastore.setTableOwner") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + withTracing(span, () -> delegate.setTableOwner(databaseName, tableName, principal)); + } + + @Override + public void commentColumn(String databaseName, String tableName, String columnName, Optional comment) + { + Span span = tracer.spanBuilder("HiveMetastore.commentColumn") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + withTracing(span, () -> delegate.commentColumn(databaseName, tableName, columnName, comment)); + } + + @Override + public void addColumn(String databaseName, String tableName, String columnName, HiveType columnType, String columnComment) + { + Span span = tracer.spanBuilder("HiveMetastore.addColumn") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + withTracing(span, () -> delegate.addColumn(databaseName, tableName, columnName, columnType, columnComment)); + } + + @Override + public void renameColumn(String databaseName, String tableName, String oldColumnName, String newColumnName) + { + Span span = tracer.spanBuilder("HiveMetastore.renameColumn") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + withTracing(span, () -> delegate.renameColumn(databaseName, tableName, oldColumnName, newColumnName)); + } + + @Override + public void dropColumn(String databaseName, String tableName, String columnName) + { + Span span = tracer.spanBuilder("HiveMetastore.dropColumn") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + withTracing(span, () -> delegate.dropColumn(databaseName, tableName, columnName)); + } + + @Override + public Optional getPartition(Table table, List partitionValues) + { + Span span = tracer.spanBuilder("HiveMetastore.getPartition") + .setAttribute(SCHEMA, table.getDatabaseName()) + .setAttribute(TABLE, table.getTableName()) + .startSpan(); + return withTracing(span, () -> delegate.getPartition(table, partitionValues)); + } + + @Override + public Optional> getPartitionNamesByFilter(String databaseName, String tableName, List columnNames, TupleDomain partitionKeysFilter) + { + Span span = tracer.spanBuilder("HiveMetastore.getPartitionNamesByFilter") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + return withTracing(span, () -> { + Optional> partitionNames = delegate.getPartitionNamesByFilter(databaseName, tableName, columnNames, partitionKeysFilter); + partitionNames.ifPresent(partitions -> span.setAttribute(PARTITION_RESPONSE_COUNT, partitions.size())); + return partitionNames; + }); + } + + @Override + public Map> getPartitionsByNames(Table table, List partitionNames) + { + Span span = tracer.spanBuilder("HiveMetastore.getPartitionsByNames") + .setAttribute(SCHEMA, table.getDatabaseName()) + .setAttribute(TABLE, table.getTableName()) + .setAttribute(PARTITION_REQUEST_COUNT, (long) partitionNames.size()) + .startSpan(); + return withTracing(span, () -> { + Map> partitions = delegate.getPartitionsByNames(table, partitionNames); + span.setAttribute(PARTITION_RESPONSE_COUNT, partitions.size()); + return partitions; + }); + } + + @Override + public void addPartitions(String databaseName, String tableName, List partitions) + { + Span span = tracer.spanBuilder("HiveMetastore.addPartitions") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .setAttribute(PARTITION_REQUEST_COUNT, (long) partitions.size()) + .startSpan(); + withTracing(span, () -> delegate.addPartitions(databaseName, tableName, partitions)); + } + + @Override + public void dropPartition(String databaseName, String tableName, List parts, boolean deleteData) + { + Span span = tracer.spanBuilder("HiveMetastore.dropPartition") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + withTracing(span, () -> delegate.dropPartition(databaseName, tableName, parts, deleteData)); + } + + @Override + public void alterPartition(String databaseName, String tableName, PartitionWithStatistics partition) + { + Span span = tracer.spanBuilder("HiveMetastore.alterPartition") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .setAttribute(PARTITION, partition.getPartitionName()) + .startSpan(); + withTracing(span, () -> delegate.alterPartition(databaseName, tableName, partition)); + } + + @Override + public void createRole(String role, String grantor) + { + Span span = tracer.spanBuilder("HiveMetastore.createRole") + .startSpan(); + withTracing(span, () -> delegate.createRole(role, grantor)); + } + + @Override + public void dropRole(String role) + { + Span span = tracer.spanBuilder("HiveMetastore.dropRole") + .startSpan(); + withTracing(span, () -> delegate.dropRole(role)); + } + + @Override + public Set listRoles() + { + Span span = tracer.spanBuilder("HiveMetastore.listRoles") + .startSpan(); + return withTracing(span, delegate::listRoles); + } + + @Override + public void grantRoles(Set roles, Set grantees, boolean adminOption, HivePrincipal grantor) + { + Span span = tracer.spanBuilder("HiveMetastore.grantRoles") + .startSpan(); + withTracing(span, () -> delegate.grantRoles(roles, grantees, adminOption, grantor)); + } + + @Override + public void revokeRoles(Set roles, Set grantees, boolean adminOption, HivePrincipal grantor) + { + Span span = tracer.spanBuilder("HiveMetastore.revokeRoles") + .startSpan(); + withTracing(span, () -> delegate.revokeRoles(roles, grantees, adminOption, grantor)); + } + + @Override + public Set listRoleGrants(HivePrincipal principal) + { + Span span = tracer.spanBuilder("HiveMetastore.listRoleGrants") + .startSpan(); + return withTracing(span, () -> delegate.listRoleGrants(principal)); + } + + @Override + public void grantTablePrivileges( + String databaseName, + String tableName, + String tableOwner, + HivePrincipal grantee, + HivePrincipal grantor, + Set privileges, + boolean grantOption) + { + Span span = tracer.spanBuilder("HiveMetastore.grantTablePrivileges") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + withTracing(span, () -> delegate.grantTablePrivileges(databaseName, tableName, tableOwner, grantee, grantor, privileges, grantOption)); + } + + @Override + public void revokeTablePrivileges( + String databaseName, + String tableName, + String tableOwner, + HivePrincipal grantee, + HivePrincipal grantor, + Set privileges, + boolean grantOption) + { + Span span = tracer.spanBuilder("HiveMetastore.revokeTablePrivileges") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + withTracing(span, () -> delegate.revokeTablePrivileges(databaseName, tableName, tableOwner, grantee, grantor, privileges, grantOption)); + } + + @Override + public Set listTablePrivileges(String databaseName, String tableName, Optional tableOwner, Optional principal) + { + Span span = tracer.spanBuilder("HiveMetastore.listTablePrivileges") + .setAttribute(SCHEMA, databaseName) + .setAttribute(TABLE, tableName) + .startSpan(); + return withTracing(span, () -> delegate.listTablePrivileges(databaseName, tableName, tableOwner, principal)); + } + + @Override + public void checkSupportsTransactions() + { + // Tracing is not necessary + delegate.checkSupportsTransactions(); + } + + @Override + public long openTransaction(AcidTransactionOwner transactionOwner) + { + Span span = tracer.spanBuilder("HiveMetastore.openTransaction") + .startSpan(); + return withTracing(span, () -> { + long transactionId = delegate.openTransaction(transactionOwner); + span.setAttribute(ACID_TRANSACTION, String.valueOf(transactionId)); + return transactionId; + }); + } + + @Override + public void commitTransaction(long transactionId) + { + Span span = tracer.spanBuilder("HiveMetastore.commitTransaction") + .setAttribute(ACID_TRANSACTION, String.valueOf(transactionId)) + .startSpan(); + withTracing(span, () -> delegate.commitTransaction(transactionId)); + } + + @Override + public void abortTransaction(long transactionId) + { + Span span = tracer.spanBuilder("HiveMetastore.abortTransaction") + .setAttribute(ACID_TRANSACTION, String.valueOf(transactionId)) + .startSpan(); + withTracing(span, () -> delegate.abortTransaction(transactionId)); + } + + @Override + public void sendTransactionHeartbeat(long transactionId) + { + Span span = tracer.spanBuilder("HiveMetastore.sendTransactionHeartbeat") + .setAttribute(ACID_TRANSACTION, String.valueOf(transactionId)) + .startSpan(); + withTracing(span, () -> delegate.sendTransactionHeartbeat(transactionId)); + } + + @Override + public void acquireSharedReadLock( + AcidTransactionOwner transactionOwner, + String queryId, + long transactionId, + List fullTables, + List partitions) + { + Span span = tracer.spanBuilder("HiveMetastore.acquireSharedReadLock") + .setAttribute(ACID_TRANSACTION, String.valueOf(transactionId)) + .startSpan(); + withTracing(span, () -> delegate.acquireSharedReadLock(transactionOwner, queryId, transactionId, fullTables, partitions)); + } + + @Override + public String getValidWriteIds(List tables, long currentTransactionId) + { + Span span = tracer.spanBuilder("HiveMetastore.getValidWriteIds") + .setAttribute(ACID_TRANSACTION, String.valueOf(currentTransactionId)) + .startSpan(); + return withTracing(span, () -> delegate.getValidWriteIds(tables, currentTransactionId)); + } + + @Override + public Optional getConfigValue(String name) + { + Span span = tracer.spanBuilder("HiveMetastore.getConfigValue") + .startSpan(); + return withTracing(span, () -> delegate.getConfigValue(name)); + } + + @Override + public long allocateWriteId(String dbName, String tableName, long transactionId) + { + Span span = tracer.spanBuilder("HiveMetastore.allocateWriteId") + .setAttribute(SCHEMA, dbName) + .setAttribute(TABLE, tableName) + .setAttribute(ACID_TRANSACTION, String.valueOf(transactionId)) + .startSpan(); + return withTracing(span, () -> delegate.allocateWriteId(dbName, tableName, transactionId)); + } + + @Override + public void acquireTableWriteLock(AcidTransactionOwner transactionOwner, String queryId, long transactionId, String dbName, String tableName, DataOperationType operation, + boolean isDynamicPartitionWrite) + { + Span span = tracer.spanBuilder("HiveMetastore.acquireTableWriteLock") + .setAttribute(SCHEMA, dbName) + .setAttribute(TABLE, tableName) + .setAttribute(ACID_TRANSACTION, String.valueOf(transactionId)) + .startSpan(); + withTracing(span, () -> delegate.acquireTableWriteLock(transactionOwner, queryId, transactionId, dbName, tableName, operation, isDynamicPartitionWrite)); + } + + @Override + public void updateTableWriteId(String dbName, String tableName, long transactionId, long writeId, OptionalLong rowCountChange) + { + Span span = tracer.spanBuilder("HiveMetastore.updateTableWriteId") + .setAttribute(SCHEMA, dbName) + .setAttribute(TABLE, tableName) + .setAttribute(ACID_TRANSACTION, String.valueOf(transactionId)) + .startSpan(); + withTracing(span, () -> delegate.updateTableWriteId(dbName, tableName, transactionId, writeId, rowCountChange)); + } + + @Override + public void addDynamicPartitions(String dbName, String tableName, List partitionNames, long transactionId, long writeId, AcidOperation operation) + { + Span span = tracer.spanBuilder("HiveMetastore.addDynamicPartitions") + .setAttribute(SCHEMA, dbName) + .setAttribute(TABLE, tableName) + .setAttribute(ACID_TRANSACTION, String.valueOf(transactionId)) + .setAttribute(PARTITION_REQUEST_COUNT, (long) partitionNames.size()) + .startSpan(); + withTracing(span, () -> delegate.addDynamicPartitions(dbName, tableName, partitionNames, transactionId, writeId, operation)); + } + + @Override + public void alterTransactionalTable(Table table, long transactionId, long writeId, PrincipalPrivileges principalPrivileges) + { + Span span = tracer.spanBuilder("HiveMetastore.alterTransactionalTable") + .setAttribute(SCHEMA, table.getDatabaseName()) + .setAttribute(TABLE, table.getTableName()) + .setAttribute(ACID_TRANSACTION, String.valueOf(transactionId)) + .startSpan(); + withTracing(span, () -> delegate.alterTransactionalTable(table, transactionId, writeId, principalPrivileges)); + } + + @Override + public boolean functionExists(String databaseName, String functionName, String signatureToken) + { + Span span = tracer.spanBuilder("HiveMetastore.functionExists") + .setAttribute(SCHEMA, databaseName) + .setAttribute(FUNCTION, functionName) + .startSpan(); + return withTracing(span, () -> delegate.functionExists(databaseName, functionName, signatureToken)); + } + + @Override + public Collection getFunctions(String databaseName) + { + Span span = tracer.spanBuilder("HiveMetastore.getFunctions") + .setAttribute(SCHEMA, databaseName) + .startSpan(); + return withTracing(span, () -> { + Collection functions = delegate.getFunctions(databaseName); + span.setAttribute(FUNCTION_RESPONSE_COUNT, functions.size()); + return functions; + }); + } + + @Override + public Collection getFunctions(String databaseName, String functionName) + { + Span span = tracer.spanBuilder("HiveMetastore.getFunctions") + .setAttribute(SCHEMA, databaseName) + .setAttribute(FUNCTION, functionName) + .startSpan(); + return withTracing(span, () -> { + Collection functions = delegate.getFunctions(databaseName, functionName); + span.setAttribute(FUNCTION_RESPONSE_COUNT, functions.size()); + return functions; + }); + } + + @Override + public void createFunction(String databaseName, String functionName, LanguageFunction function) + { + Span span = tracer.spanBuilder("HiveMetastore.createFunction") + .setAttribute(SCHEMA, databaseName) + .setAttribute(FUNCTION, functionName) + .startSpan(); + withTracing(span, () -> delegate.createFunction(databaseName, functionName, function)); + } + + @Override + public void replaceFunction(String databaseName, String functionName, LanguageFunction function) + { + Span span = tracer.spanBuilder("HiveMetastore.replaceFunction") + .setAttribute(SCHEMA, databaseName) + .setAttribute(FUNCTION, functionName) + .startSpan(); + withTracing(span, () -> delegate.replaceFunction(databaseName, functionName, function)); + } + + @Override + public void dropFunction(String databaseName, String functionName, String signatureToken) + { + Span span = tracer.spanBuilder("HiveMetastore.dropFunction") + .setAttribute(SCHEMA, databaseName) + .setAttribute(FUNCTION, functionName) + .startSpan(); + withTracing(span, () -> delegate.dropFunction(databaseName, functionName, signatureToken)); + } +} diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/TracingHiveMetastoreDecorator.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/TracingHiveMetastoreDecorator.java new file mode 100644 index 000000000000..54aa31bdfe2f --- /dev/null +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/metastore/tracing/TracingHiveMetastoreDecorator.java @@ -0,0 +1,45 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package io.trino.plugin.hive.metastore.tracing; + +import com.google.inject.Inject; +import io.opentelemetry.api.trace.Tracer; +import io.trino.plugin.hive.metastore.HiveMetastore; +import io.trino.plugin.hive.metastore.HiveMetastoreDecorator; + +import static java.util.Objects.requireNonNull; + +public class TracingHiveMetastoreDecorator + implements HiveMetastoreDecorator +{ + private final Tracer tracer; + + @Inject + public TracingHiveMetastoreDecorator(Tracer tracer) + { + this.tracer = requireNonNull(tracer, "tracer is null"); + } + + @Override + public int getPriority() + { + return PRIORITY_TRACING; + } + + @Override + public HiveMetastore decorate(HiveMetastore hiveMetastore) + { + return new TracingHiveMetastore(tracer, hiveMetastore); + } +}