Skip to content

Commit

Permalink
emit search related metrics from SearchDAO (linkedin#412)
Browse files Browse the repository at this point in the history
* emit search metrics from DAO

* fix

* fix

* catch

* address comment

* fix

---------

Co-authored-by: Jesse Jia <[email protected]>
  • Loading branch information
zhixuanjia and Jesse Jia authored Sep 4, 2024
1 parent 3c10b71 commit e088d57
Show file tree
Hide file tree
Showing 3 changed files with 82 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import com.linkedin.data.template.UnionTemplate;
import com.linkedin.metadata.dao.utils.ModelUtils;
import com.linkedin.metadata.events.IngestionMode;
import java.util.List;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;

Expand Down Expand Up @@ -63,4 +64,12 @@ public abstract <ASPECT extends RecordTemplate> void produceMetadataAuditEvent(@
*/
public abstract <ASPECT extends RecordTemplate> void produceAspectSpecificMetadataAuditEvent(@Nonnull URN urn,
@Nullable ASPECT oldValue, @Nonnull ASPECT newValue, @Nullable AuditStamp auditStamp, @Nullable IngestionMode ingestionMode);

/**
* Produce Metadata Graph search metrics inside SearchDAO.
* TODO: (jejia) Clean this up after we fully migrate to Hosted Search.
*/
public abstract void produceMetadataGraphSearchMetric(@Nonnull String input, @Nonnull String request,
@Nonnull String index, @Nonnull List<String> topHits, @Nonnull String api);

}
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import com.linkedin.metadata.dummy.DummyAspect;
import com.linkedin.metadata.dummy.DummySnapshot;
import com.linkedin.metadata.events.IngestionMode;
import java.util.List;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;

Expand Down Expand Up @@ -37,4 +38,10 @@ public <ASPECT extends RecordTemplate> void produceAspectSpecificMetadataAuditEv
@Nullable ASPECT oldValue, @Nonnull ASPECT newValue, @Nullable AuditStamp auditStamp, @Nullable IngestionMode ingestionMode) {
// Do nothing
}

@Override
public void produceMetadataGraphSearchMetric(@Nonnull String input, @Nonnull String request, @Nonnull String index,
@Nonnull List<String> topHits, @Nonnull String api) {
// Do nothing
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import com.linkedin.metadata.dao.BaseSearchDAO;
import com.linkedin.metadata.dao.SearchResult;
import com.linkedin.metadata.dao.exception.ESQueryException;
import com.linkedin.metadata.dao.producer.BaseMetadataEventProducer;
import com.linkedin.metadata.dao.tracking.BaseTrackingManager;
import com.linkedin.metadata.dao.tracking.DummyTrackingManager;
import com.linkedin.metadata.dao.tracking.TrackingUtils;
Expand All @@ -29,6 +30,7 @@
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
Expand All @@ -41,6 +43,7 @@
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RequestOptions;
Expand Down Expand Up @@ -80,6 +83,7 @@ public class ESSearchDAO<DOCUMENT extends RecordTemplate> extends BaseSearchDAO<
private BaseESAutoCompleteQuery _autoCompleteQueryForLowCardFields;
private BaseESAutoCompleteQuery _autoCompleteQueryForHighCardFields;
private BaseTrackingManager _baseTrackingManager;
private BaseMetadataEventProducer _baseMetadataEventProducer;
private int _maxTermBucketSize = DEFAULT_TERM_BUCKETS_SIZE_100;
private int _lowerBoundHits = Integer.MAX_VALUE;

Expand Down Expand Up @@ -131,6 +135,13 @@ public void setTrackTotalHits(int lowermost) {
_lowerBoundHits = lowermost;
}

/**
* Set BaseMetadataEventProducer.
*/
public void setMetadataEventProducer(BaseMetadataEventProducer baseMetadataEventProducer) {
_baseMetadataEventProducer = baseMetadataEventProducer;
}

@Nonnull
protected BaseESAutoCompleteQuery getAutocompleteQueryGenerator(@Nonnull String field) {
if (_config.getLowCardinalityFields() != null && _config.getLowCardinalityFields().contains(field)) {
Expand Down Expand Up @@ -222,6 +233,7 @@ public SearchResult<DOCUMENT> searchV2(@Nonnull String input, @Nullable Filter p
@Nonnull
public SearchResult<DOCUMENT> search(@Nonnull String input, @Nullable Filter postFilters,
@Nullable SortCriterion sortCriterion, @Nullable String preference, int from, int size, boolean multiFilters) {

// Step 0: TODO: Add type casting if needed and add request params validation against the model
final byte[] id = getRandomTrackingId();
_baseTrackingManager.trackRequest(id, SEARCH_QUERY_START);
Expand All @@ -230,6 +242,18 @@ public SearchResult<DOCUMENT> search(@Nonnull String input, @Nullable Filter pos
// Step 2: execute the query and extract results, validated against document model as well
final SearchResult<DOCUMENT> searchResult = executeAndExtract(req, from, size, id, SEARCH_QUERY_FAIL);
_baseTrackingManager.trackRequest(id, SEARCH_QUERY_END);

if (_baseMetadataEventProducer != null) {
try {
List<String> uids = searchResult.getSearchResultMetadata().getUrns().stream().map(Urn::toString).collect(Collectors.toList());
_baseMetadataEventProducer.produceMetadataGraphSearchMetric(input, req.source().toString(),
_config.getIndexName(), uids, "search");
} catch (Exception e) {
log.error("Failed to emit search metrics for search api with index {}; exception {}",
_config.getIndexName(), ExceptionUtils.getStackTrace(e));
}
}

return searchResult;
}

Expand All @@ -242,9 +266,39 @@ public SearchResult<DOCUMENT> filter(@Nullable Filter filters, @Nullable SortCri
final SearchRequest searchRequest = getFilteredSearchQuery(filters, sortCriterion, from, size);
final SearchResult<DOCUMENT> searchResult = executeAndExtract(searchRequest, from, size, id, FILTER_QUERY_FAIL);
_baseTrackingManager.trackRequest(id, FILTER_QUERY_END);

if (_baseMetadataEventProducer != null) {

try {
List<String> uids = searchResult.getSearchResultMetadata().getUrns().stream().map(Urn::toString).collect(Collectors.toList());
_baseMetadataEventProducer.produceMetadataGraphSearchMetric(flattenFilter(filters), searchRequest.source().toString(),
_config.getIndexName(), uids, "filter");
} catch (Exception e) {
log.error("Failed to emit search metrics for filter api with index {}; exception {}",
_config.getIndexName(), ExceptionUtils.getStackTrace(e));
}

}

return searchResult;
}

private String flattenFilter(Filter filter) {
if (filter == null) {
return "null";
}

List<String> fields = new ArrayList<>();

for (Criterion criterion : filter.getCriteria()) {
fields.add(String.join(",", criterion.getField(), criterion.getCondition().name(), criterion.getValue()));
}

Collections.sort(fields);

return String.join(";", fields);
}

/**
* Returns a {@link SearchRequest} given filters to be applied to search query and sort criterion to be applied to
* search results.
Expand Down Expand Up @@ -482,6 +536,18 @@ public AutoCompleteResult autoComplete(@Nonnull String query, @Nullable String f
SearchResponse searchResponse = _client.search(req, RequestOptions.DEFAULT);
final AutoCompleteResult autoCompleteResult = extractAutoCompleteResult(searchResponse, query, field, limit);
_baseTrackingManager.trackRequest(id, AUTOCOMPLETE_QUERY_END);

if (_baseMetadataEventProducer != null) {
try {
List<String> uids = new ArrayList<>(autoCompleteResult.getSuggestions());
_baseMetadataEventProducer.produceMetadataGraphSearchMetric(query, req.source().toString(),
_config.getIndexName(), uids, "autocomplete");
} catch (Exception e) {
log.error("Failed to emit search metrics for autocomplete api with index {}; exception {}",
_config.getIndexName(), ExceptionUtils.getStackTrace(e));
}
}

return autoCompleteResult;
} catch (Exception e) {
log.error("Auto complete query failed:" + e.getMessage());
Expand Down

0 comments on commit e088d57

Please sign in to comment.