From 289f85223cb7002e1b9a5a1ce4fbe47b4b30ae2a Mon Sep 17 00:00:00 2001
From: Linus Wallgren <linus@nectarinehealth.com>
Date: Mon, 11 Jan 2021 13:56:04 +0100
Subject: [PATCH 1/2] Register with FQDN in URL

This replaces having the FQDN in the body in order to make client
certificate validation easier.

The proxy will continue to work with older clients as we still keep the
old endpoint. However any calls to `/poll/*` will use the FQDN provided
in the URL for registration.

Signed-off-by: Linus Wallgren <linus@nectarinehealth.com>
---
 cmd/client/main.go |  2 +-
 cmd/proxy/main.go  | 13 ++++++++++++-
 2 files changed, 13 insertions(+), 2 deletions(-)

diff --git a/cmd/client/main.go b/cmd/client/main.go
index 74d715b..b8f2d47 100644
--- a/cmd/client/main.go
+++ b/cmd/client/main.go
@@ -174,7 +174,7 @@ func loop(c Coordinator, client *http.Client) error {
 		level.Error(c.logger).Log("msg", "Error parsing url:", "err", err)
 		return errors.Wrap(err, "error parsing url")
 	}
-	u, err := url.Parse("poll")
+	u, err := url.Parse("poll/" + *myFqdn)
 	if err != nil {
 		level.Error(c.logger).Log("msg", "Error parsing url:", "err", err)
 		return errors.Wrap(err, "error parsing url poll")
diff --git a/cmd/proxy/main.go b/cmd/proxy/main.go
index 4ea2246..2f93a13 100644
--- a/cmd/proxy/main.go
+++ b/cmd/proxy/main.go
@@ -99,6 +99,7 @@ func newHTTPHandler(logger log.Logger, coordinator *Coordinator, mux *http.Serve
 	handlers := map[string]http.HandlerFunc{
 		"/push":    h.handlePush,
 		"/poll":    h.handlePoll,
+		"/poll/":   h.handlePollWithPath,
 		"/clients": h.handleListClients,
 		"/metrics": promhttp.Handler().ServeHTTP,
 	}
@@ -144,7 +145,17 @@ func (h *httpHandler) handlePush(w http.ResponseWriter, r *http.Request) {
 // handlePoll handles clients registering and asking for scrapes.
 func (h *httpHandler) handlePoll(w http.ResponseWriter, r *http.Request) {
 	fqdn, _ := ioutil.ReadAll(r.Body)
-	request, err := h.coordinator.WaitForScrapeInstruction(strings.TrimSpace(string(fqdn)))
+	h.pollWithFQDN(string(fqdn), w)
+}
+
+// handlePoll handles clients registering and asking for scrapes.
+func (h *httpHandler) handlePollWithPath(w http.ResponseWriter, r *http.Request) {
+	fqdn := r.URL.Path[len("/poll/"):]
+	h.pollWithFQDN(fqdn, w)
+}
+
+func (h *httpHandler) pollWithFQDN(fqdn string, w http.ResponseWriter) {
+	request, err := h.coordinator.WaitForScrapeInstruction(strings.TrimSpace(fqdn))
 	if err != nil {
 		level.Info(h.logger).Log("msg", "Error WaitForScrapeInstruction:", "err", err)
 		http.Error(w, fmt.Sprintf("Error WaitForScrapeInstruction: %s", err.Error()), 408)

From acb1c610c026ec18c0baa500f5af559f0364d630 Mon Sep 17 00:00:00 2001
From: Linus Wallgren <linus@nectarinehealth.com>
Date: Mon, 11 Jan 2021 13:56:35 +0100
Subject: [PATCH 2/2] Document ID header properties

It needs to be cryptographically unique so that each client is uniquely
identified and cannot be mistaken with another client.

If it is not cryptographically unique one client could potentially guess
the ID and thus send data as if it is was another client.

Signed-off-by: Linus Wallgren <linus@nectarinehealth.com>
---
 cmd/proxy/coordinator.go | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/cmd/proxy/coordinator.go b/cmd/proxy/coordinator.go
index cf2bb42..41982ec 100644
--- a/cmd/proxy/coordinator.go
+++ b/cmd/proxy/coordinator.go
@@ -25,9 +25,9 @@ import (
 	"github.com/go-kit/kit/log"
 	"github.com/go-kit/kit/log/level"
 	"github.com/google/uuid"
+	"github.com/prometheus-community/pushprox/util"
 	"github.com/prometheus/client_golang/prometheus"
 	"github.com/prometheus/client_golang/prometheus/promauto"
-	"github.com/prometheus-community/pushprox/util"
 )
 
 var (
@@ -73,6 +73,8 @@ func NewCoordinator(logger log.Logger) (*Coordinator, error) {
 }
 
 // Generate a unique ID
+// It is important this ID is cryptographically unique to ensure clients can't
+// be mixed up.
 func (c *Coordinator) genID() (string, error) {
 	id, err := uuid.NewRandom()
 	return id.String(), err
@@ -114,6 +116,8 @@ func (c *Coordinator) DoScrape(ctx context.Context, r *http.Request) (*http.Resp
 		return nil, err
 	}
 	level.Info(c.logger).Log("msg", "DoScrape", "scrape_id", id, "url", r.URL.String())
+	// It is important this id is cryptographically generated as it is relied
+	// upon to match the request and the response.
 	r.Header.Add("Id", id)
 	select {
 	case <-ctx.Done():