From 3c6651fd86cf1602b20482d2126460b9191efde2 Mon Sep 17 00:00:00 2001 From: Folyd Date: Sun, 5 Apr 2020 12:28:24 +0800 Subject: [PATCH] Fix some clang-tidy and PSV Studio warnings --- robots.cc | 12 +++++------- robots.h | 10 +++++----- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/robots.cc b/robots.cc index bdbccea..81be9d1 100644 --- a/robots.cc +++ b/robots.cc @@ -54,7 +54,7 @@ namespace googlebot { // Match. class RobotsMatchStrategy { public: - virtual ~RobotsMatchStrategy() {} + virtual ~RobotsMatchStrategy() = default; virtual int MatchAllow(absl::string_view path, absl::string_view pattern) = 0; @@ -120,8 +120,6 @@ static const char* kHexDigits = "0123456789ABCDEF"; // authority, and fragment. Result always starts with "/". // Returns "/" if the url doesn't have a path or is not valid. std::string GetPathParamsQuery(const std::string& url) { - std::string path; - // Initial two slashes are ignored. size_t search_start = 0; if (url.size() >= 2 && url[0] == '/' && url[1] == '/') search_start = 2; @@ -291,7 +289,7 @@ class RobotsTxtParser { static void StripWhitespaceSlowly(char ** s); void ParseAndEmitLine(int current_line, char* line); - bool NeedEscapeValueForKey(const Key& key); + static bool NeedEscapeValueForKey(const Key& key); absl::string_view robots_body_; RobotsParseHandler* const handler_; @@ -351,7 +349,7 @@ bool RobotsTxtParser::GetKeyAndValueFrom(char ** key, char ** value, *sep = '\0'; // And stops at the separator. StripWhitespaceSlowly(key); // Get rid of any trailing whitespace. - if (strlen(*key) > 0) { + if (*key[0] != '\0') { *value = 1 + sep; // Value starts after the separator. StripWhitespaceSlowly(value); // Get rid of any leading whitespace. return true; @@ -438,7 +436,7 @@ void RobotsTxtParser::Parse() { // characters matched by a pattern is returned as its match priority. class LongestMatchRobotsMatchStrategy : public RobotsMatchStrategy { public: - LongestMatchRobotsMatchStrategy() { } + LongestMatchRobotsMatchStrategy() = default; // Disallow copying and assignment. LongestMatchRobotsMatchStrategy(const LongestMatchRobotsMatchStrategy&) = @@ -527,7 +525,7 @@ bool RobotsMatcher::disallow_ignore_global() const { return false; } -const int RobotsMatcher::matching_line() const { +int RobotsMatcher::matching_line() const { if (ever_seen_specific_agent_) { return Match::HigherPriorityMatch(disallow_.specific, allow_.specific) .line(); diff --git a/robots.h b/robots.h index adccef5..a789fac 100644 --- a/robots.h +++ b/robots.h @@ -44,8 +44,8 @@ namespace googlebot { // ParseRobotsTxt() in the sequence they have been found in the file. class RobotsParseHandler { public: - RobotsParseHandler() {} - virtual ~RobotsParseHandler() {} + RobotsParseHandler() = default; + virtual ~RobotsParseHandler() = default; // Disallow copying and assignment. RobotsParseHandler(const RobotsParseHandler&) = delete; @@ -111,7 +111,7 @@ class RobotsMatcher : protected RobotsParseHandler { // [a-zA-Z_-]. static bool IsValidUserAgentToObey(absl::string_view user_agent); - // Returns true iff 'url' is allowed to be fetched by any member of the + // Returns true if 'url' is allowed to be fetched by any member of the // "user_agents" vector. 'url' must be %-encoded according to RFC3986. bool AllowedByRobots(absl::string_view robots_body, const std::vector* user_agents, @@ -131,12 +131,12 @@ class RobotsMatcher : protected RobotsParseHandler { // the specified user agents. bool disallow_ignore_global() const; - // Returns true iff, when AllowedByRobots() was called, the robots file + // Returns true if, when AllowedByRobots() was called, the robots file // referred explicitly to one of the specified user agents. bool ever_seen_specific_agent() const; // Returns the line that matched or 0 if none matched. - const int matching_line() const; + int matching_line() const; protected: // Parse callbacks.