diff --git a/cmd/client/main.go b/cmd/client/main.go index 74d715b..b8f2d47 100644 --- a/cmd/client/main.go +++ b/cmd/client/main.go @@ -174,7 +174,7 @@ func loop(c Coordinator, client *http.Client) error { level.Error(c.logger).Log("msg", "Error parsing url:", "err", err) return errors.Wrap(err, "error parsing url") } - u, err := url.Parse("poll") + u, err := url.Parse("poll/" + *myFqdn) if err != nil { level.Error(c.logger).Log("msg", "Error parsing url:", "err", err) return errors.Wrap(err, "error parsing url poll") diff --git a/cmd/proxy/coordinator.go b/cmd/proxy/coordinator.go index cf2bb42..97f4ed2 100644 --- a/cmd/proxy/coordinator.go +++ b/cmd/proxy/coordinator.go @@ -25,9 +25,9 @@ import ( "github.com/go-kit/kit/log" "github.com/go-kit/kit/log/level" "github.com/google/uuid" + "github.com/prometheus-community/pushprox/util" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/prometheus-community/pushprox/util" ) var ( @@ -114,6 +114,8 @@ func (c *Coordinator) DoScrape(ctx context.Context, r *http.Request) (*http.Resp return nil, err } level.Info(c.logger).Log("msg", "DoScrape", "scrape_id", id, "url", r.URL.String()) + // It is important this id is cryptographically generated as it is relied + // upon to match the request and the response. r.Header.Add("Id", id) select { case <-ctx.Done(): diff --git a/cmd/proxy/main.go b/cmd/proxy/main.go index 4ea2246..2f93a13 100644 --- a/cmd/proxy/main.go +++ b/cmd/proxy/main.go @@ -99,6 +99,7 @@ func newHTTPHandler(logger log.Logger, coordinator *Coordinator, mux *http.Serve handlers := map[string]http.HandlerFunc{ "/push": h.handlePush, "/poll": h.handlePoll, + "/poll/": h.handlePollWithPath, "/clients": h.handleListClients, "/metrics": promhttp.Handler().ServeHTTP, } @@ -144,7 +145,17 @@ func (h *httpHandler) handlePush(w http.ResponseWriter, r *http.Request) { // handlePoll handles clients registering and asking for scrapes. func (h *httpHandler) handlePoll(w http.ResponseWriter, r *http.Request) { fqdn, _ := ioutil.ReadAll(r.Body) - request, err := h.coordinator.WaitForScrapeInstruction(strings.TrimSpace(string(fqdn))) + h.pollWithFQDN(string(fqdn), w) +} + +// handlePoll handles clients registering and asking for scrapes. +func (h *httpHandler) handlePollWithPath(w http.ResponseWriter, r *http.Request) { + fqdn := r.URL.Path[len("/poll/"):] + h.pollWithFQDN(fqdn, w) +} + +func (h *httpHandler) pollWithFQDN(fqdn string, w http.ResponseWriter) { + request, err := h.coordinator.WaitForScrapeInstruction(strings.TrimSpace(fqdn)) if err != nil { level.Info(h.logger).Log("msg", "Error WaitForScrapeInstruction:", "err", err) http.Error(w, fmt.Sprintf("Error WaitForScrapeInstruction: %s", err.Error()), 408)