diff --git a/.github/prerelease.sh b/.github/prerelease.sh index 52d08209..86c09b70 100755 --- a/.github/prerelease.sh +++ b/.github/prerelease.sh @@ -21,12 +21,15 @@ function cleanup { } trap cleanup EXIT +# Create some directories to avoid race errors on snap packages build +mkdir -p ${HOME}/.cache/snapcraft/{download,stage-packages} + # Build the binaries using a prerelease tag git tag -d edge git tag -f ${PRERELEASE_TAG} goreleaser release \ - --rm-dist \ - --skip-validate \ + --clean \ + --skip=validate \ -f .goreleaser.pre.yml # Delete existing assets from the edge prerelease on GitHub diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 27194005..b90cf2b6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -10,60 +10,66 @@ on: jobs: release: - runs-on: ubuntu-latest + runs-on: ubuntu-24.04 env: DOCKER_CLI_EXPERIMENTAL: 'enabled' steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 with: fetch-depth: 0 - name: Set up QEMU - uses: docker/setup-qemu-action@v1 + uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 + uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3 - - name: Docker Login - uses: docker/login-action@v1 + - name: docker.io Login + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3 + with: + registry: docker.io + username: ${{ github.repository_owner }} + password: ${{ secrets.DOCKER_HUB_TOKEN }} + + - name: ghcr.io login + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3 with: registry: ghcr.io username: ${{ github.repository_owner }} password: ${{ secrets.GH_PAT }} - - name: Docker Login - uses: docker/login-action@v1 + - name: quay.io Login + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3 with: - registry: docker.io + registry: quay.io username: ${{ github.repository_owner }} - password: ${{ secrets.DOCKER_HUB_TOKEN }} + password: ${{ secrets.QUAY_TOKEN }} - name: Snapcraft config - uses: samuelmeuli/action-snapcraft@v1 - with: - snapcraft_token: ${{ secrets.SNAPCRAFT_TOKEN }} + uses: samuelmeuli/action-snapcraft@d33c176a9b784876d966f80fb1b461808edc0641 # v2 - name: Set up Go - uses: actions/setup-go@v2 + uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5 with: - go-version: 1.16 + go-version: '1.22' - name: Import GPG key - uses: crazy-max/ghaction-import-gpg@v3 + uses: crazy-max/ghaction-import-gpg@cb9bde2e2525e640591a934b1fd28eef1dcaf5e5 # v6 with: - gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} + gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} passphrase: ${{ secrets.GPG_PASSPHRASE }} - name: Install goreleaser - uses: goreleaser/goreleaser-action@v2 + uses: goreleaser/goreleaser-action@9ed2f89a662bf1735a48bc8557fd212fa902bebf # v6 with: - version: v0.159.0 + version: v1.24.0 install-only: true - + - name: Run goreleaser run: make ${{ github.ref == 'refs/heads/main' && 'pre' || '' }}release env: GITHUB_TOKEN: ${{ secrets.GH_PAT }} + SNAPCRAFT_STORE_CREDENTIALS: ${{ secrets.SNAPCRAFT_TOKEN }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index cae9139a..9811b80d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -15,36 +15,32 @@ jobs: test: strategy: matrix: - go-version: - - 1.16 os: - - ubuntu-20.04 - - macos-10.15 - - macos-11.0 - # Would need to sort out some FS issues: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/runs/1565120934 - # - windows-2019 + - ubuntu-24.04 + - macos-13 + - windows-2022 runs-on: ${{ matrix.os }} steps: - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - name: Install Go - uses: actions/setup-go@v2 + uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5 with: - go-version: ${{ matrix.go-version }} + go-version: '1.23' - name: Lint - if: ${{ matrix.os == 'ubuntu-20.04' && matrix.go-version == '1.16' }} + if: ${{ matrix.os == 'ubuntu-24.04' }} run: make lint - name: Test - run: make coverage + run: make test - name: Publish coverage to coveralls.io - uses: shogo82148/actions-goveralls@v1 - if: ${{ matrix.os == 'ubuntu-20.04' && matrix.go-version == '1.16' }} + uses: shogo82148/actions-goveralls@785c9d68212c91196d3994652647f8721918ba11 # v1 + if: ${{ matrix.os == 'ubuntu-24.04' }} with: path-to-profile: coverage.out diff --git a/.gitignore b/.gitignore index 30e71080..fdf3dae2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ dist +helpers vendor **/*.tgz /gitlab-ci-pipelines-exporter.yml @@ -7,4 +8,5 @@ gitlab-ci-pipelines-exporter !cmd/gitlab-ci-pipelines-exporter !examples/**/gitlab-ci-pipelines-exporter coverage.out -.waypoint +.*.sock +.idea diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 00000000..9dd7f706 --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,50 @@ +linters: + enable-all: true + disable: + # Deprecated + - gomnd + + # We don't want these ones + - forcetypeassert + - gochecknoglobals + - godox + - ireturn + - nakedret + - testpackage + - varnamelen + - interfacebloat + - wsl + + # TODO + - tagliatelle + - promlinter + - paralleltest + - gocognit + - gomoddirectives + - forbidigo + - goconst + - mnd + - lll + - dupl + - depguard + - tagalign + +linters-settings: + funlen: + lines: -1 # (disabled) + statements: 100 + + cyclop: + max-complexity: 20 + + lll: + line-length: 140 + + nestif: + min-complexity: 18 + + gci: + sections: + - standard + - default + - prefix(github.com/mvisonneau) diff --git a/.goreleaser.pre.yml b/.goreleaser.pre.yml index 15193083..18d7c60f 100644 --- a/.goreleaser.pre.yml +++ b/.goreleaser.pre.yml @@ -1,6 +1,7 @@ before: hooks: - - go mod download + - make man-pages + - make autocomplete-scripts builds: - main: ./cmd/gitlab-ci-pipelines-exporter @@ -13,39 +14,77 @@ builds: goarch: - 386 - amd64 + - arm - arm64 + goarm: [6, 7] flags: - -trimpath - ignore: - - goos: darwin - goarch: 386 + +universal_binaries: + - {} archives: - - name_template: '{{ .ProjectName }}_edge_{{ .Os }}_{{ .Arch }}' + - name_template: '{{ .ProjectName }}_edge_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}' format_overrides: - goos: windows format: zip + files: + - README.md + - LICENSE + - helpers/* + +nfpms: + - maintainer: &author Maxime VISONNEAU + description: &description GitLab CI pipelines exporter (prometheus/open-metrics) + license: &license Apache-2.0 + homepage: &homepage https://github.com/mvisonneau/gitlab-ci-pipelines-exporter + vendor: *author + file_name_template: '{{ .ProjectName }}_edge_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}' + bindir: /usr/local/bin + formats: + - apk + - deb + - rpm + contents: + - src: ./helpers/autocomplete/bash + dst: /usr/share/bash-completion/completions/{{ .ProjectName }} + file_info: + mode: 0644 + - src: ./helpers/autocomplete/zsh + dst: /usr/share/zsh/vendor-completions/_{{ .ProjectName }} + file_info: + mode: 0644 + - src: ./helpers/manpages/{{ .ProjectName }}.1.gz + dst: /usr/share/man/man1/{{ .ProjectName }}.1.gz + file_info: + mode: 0644 + - src: ./LICENSE + dst: /usr/share/doc/{{ .ProjectName }}/copyright + file_info: + mode: 0644 release: disable: true -snapcrafts: - - summary: GitLab CI pipelines exporter (prometheus/open-metrics) - description: Monitor GitLab CI pipelines related metrics. - license: Apache-2.0 - confinement: strict - grade: devel - apps: - gitlab-ci-pipelines-exporter: - plugs: [home, network, network-bind] +# Disabling until https://github.com/canonical/snapcraft/issues/4769 is resolved +#snapcrafts: +# - summary: GitLab CI pipelines exporter (prometheus/open-metrics) +# description: Monitor GitLab CI pipelines related metrics. +# license: Apache-2.0 +# confinement: strict +# grade: devel +# apps: +# gitlab-ci-pipelines-exporter: +# plugs: [home, network, network-bind] dockers: - image_templates: - 'docker.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-amd64' - 'ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-amd64' + - 'quay.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-amd64' ids: [gitlab-ci-pipelines-exporter] goarch: amd64 - use_buildx: true + use: buildx build_flag_templates: - --platform=linux/amd64 - --label=org.opencontainers.image.title={{ .ProjectName }} @@ -58,13 +97,52 @@ dockers: - --label=org.opencontainers.image.licenses=Apache-2.0 - image_templates: - - 'docker.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-arm64v8' - - 'ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-arm64v8' + - 'docker.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-arm64' + - 'ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-arm64' + - 'quay.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-arm64' ids: [gitlab-ci-pipelines-exporter] goarch: arm64 - use_buildx: true + use: buildx build_flag_templates: - - --platform=linux/arm64/v8 + - --platform=linux/arm64 + - --label=org.opencontainers.image.title={{ .ProjectName }} + - --label=org.opencontainers.image.description={{ .ProjectName }} + - --label=org.opencontainers.image.url=https://github.com/mvisonneau/gitlab-ci-pipelines-exporter + - --label=org.opencontainers.image.source=https://github.com/mvisonneau/gitlab-ci-pipelines-exporter + - --label=org.opencontainers.image.version={{ .Version }} + - --label=org.opencontainers.image.created={{ time "2006-01-02T15:04:05Z07:00" }} + - --label=org.opencontainers.image.revision={{ .FullCommit }} + - --label=org.opencontainers.image.licenses=Apache-2.0 + + - image_templates: + - 'docker.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-armv6' + - 'ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-armv6' + - 'quay.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-armv6' + ids: [gitlab-ci-pipelines-exporter] + goarch: arm + goarm: 6 + use: buildx + build_flag_templates: + - --platform=linux/arm/v6 + - --label=org.opencontainers.image.title={{ .ProjectName }} + - --label=org.opencontainers.image.description={{ .ProjectName }} + - --label=org.opencontainers.image.url=https://github.com/mvisonneau/gitlab-ci-pipelines-exporter + - --label=org.opencontainers.image.source=https://github.com/mvisonneau/gitlab-ci-pipelines-exporter + - --label=org.opencontainers.image.version={{ .Version }} + - --label=org.opencontainers.image.created={{ time "2006-01-02T15:04:05Z07:00" }} + - --label=org.opencontainers.image.revision={{ .FullCommit }} + - --label=org.opencontainers.image.licenses=Apache-2.0 + + - image_templates: + - 'docker.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-armv7' + - 'ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-armv7' + - 'quay.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-armv7' + ids: [gitlab-ci-pipelines-exporter] + goarch: arm + goarm: 7 + use: buildx + build_flag_templates: + - --platform=linux/arm/v7 - --label=org.opencontainers.image.title={{ .ProjectName }} - --label=org.opencontainers.image.description={{ .ProjectName }} - --label=org.opencontainers.image.url=https://github.com/mvisonneau/gitlab-ci-pipelines-exporter @@ -78,12 +156,23 @@ docker_manifests: - name_template: docker.io/mvisonneau/gitlab-ci-pipelines-exporter:latest image_templates: - docker.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-amd64 - - docker.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-arm64v8 + - docker.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-arm64 + - docker.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-armv6 + - docker.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-armv7 - name_template: ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:latest image_templates: - ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-amd64 - - ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-arm64v8 + - ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-arm64 + - ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-armv6 + - ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-armv7 + + - name_template: quay.io/mvisonneau/gitlab-ci-pipelines-exporter:latest + image_templates: + - quay.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-amd64 + - quay.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-arm64 + - quay.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-armv6 + - quay.io/mvisonneau/gitlab-ci-pipelines-exporter:latest-armv7 signs: - artifacts: checksum @@ -102,4 +191,4 @@ checksum: algorithm: sha512 changelog: - skip: true + disable: true diff --git a/.goreleaser.yml b/.goreleaser.yml index 62594225..a97f5055 100644 --- a/.goreleaser.yml +++ b/.goreleaser.yml @@ -1,6 +1,7 @@ before: hooks: - - go mod download + - make man-pages + - make autocomplete-scripts builds: - main: ./cmd/gitlab-ci-pipelines-exporter @@ -13,18 +14,24 @@ builds: goarch: - 386 - amd64 + - arm - arm64 + goarm: [6, 7] flags: - -trimpath - ignore: - - goos: darwin - goarch: 386 + +universal_binaries: + - {} archives: - - name_template: '{{ .ProjectName }}_{{ .Tag }}_{{ .Os }}_{{ .Arch }}' + - name_template: '{{ .ProjectName }}_{{ .Tag }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}' format_overrides: - goos: windows format: zip + files: + - README.md + - LICENSE + - helpers/* nfpms: - maintainer: &author Maxime VISONNEAU @@ -32,43 +39,72 @@ nfpms: license: &license Apache-2.0 homepage: &homepage https://github.com/mvisonneau/gitlab-ci-pipelines-exporter vendor: *author - file_name_template: '{{ .ProjectName }}_{{ .Tag }}_{{ .Os }}_{{ .Arch }}' + file_name_template: '{{ .ProjectName }}_{{ .Tag }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}' + bindir: /usr/local/bin formats: + - apk - deb - rpm + contents: + - src: ./helpers/autocomplete/bash + dst: /usr/share/bash-completion/completions/{{ .ProjectName }} + file_info: + mode: 0644 + - src: ./helpers/autocomplete/zsh + dst: /usr/share/zsh/vendor-completions/_{{ .ProjectName }} + file_info: + mode: 0644 + - src: ./helpers/manpages/{{ .ProjectName }}.1.gz + dst: /usr/share/man/man1/{{ .ProjectName }}.1.gz + file_info: + mode: 0644 + - src: ./LICENSE + dst: /usr/share/doc/{{ .ProjectName }}/copyright + file_info: + mode: 0644 brews: - description: *description homepage: *homepage + license: *license folder: Formula - tap: + repository: owner: mvisonneau name: homebrew-tap + test: | + system "#{bin}/{{ .ProjectName }} -v" + install: |- + bin.install "{{ .ProjectName }}" + bash_completion.install "./helpers/autocomplete/bash" => "{{ .ProjectName }}" + zsh_completion.install "./helpers/autocomplete/zsh" => "_{{ .ProjectName }}" + man1.install "./helpers/manpages/{{ .ProjectName }}.1.gz" -scoop: - description: *description - homepage: *homepage - license: *license - bucket: - owner: mvisonneau - name: scoops - -snapcrafts: - - summary: *description - description: Monitor GitLab CI pipelines related metrics. +scoops: + - description: *description + homepage: *homepage license: *license - grade: stable - apps: - gitlab-ci-pipelines-exporter: - plugs: [home, network, network-bind] + repository: + owner: mvisonneau + name: scoops + +# Disabling until https://github.com/canonical/snapcraft/issues/4769 is resolved +#snapcrafts: +# - summary: *description +# description: Monitor GitLab CI pipelines related metrics. +# license: *license +# grade: stable +# apps: +# gitlab-ci-pipelines-exporter: +# plugs: [home, network, network-bind] dockers: - image_templates: - 'docker.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-amd64' - 'ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-amd64' + - 'quay.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-amd64' ids: [gitlab-ci-pipelines-exporter] goarch: amd64 - use_buildx: true + use: buildx build_flag_templates: - --platform=linux/amd64 - --label=org.opencontainers.image.title={{ .ProjectName }} @@ -81,13 +117,52 @@ dockers: - --label=org.opencontainers.image.licenses=Apache-2.0 - image_templates: - - 'docker.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-arm64v8' - - 'ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-arm64v8' + - 'docker.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-arm64' + - 'ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-arm64' + - 'quay.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-arm64' ids: [gitlab-ci-pipelines-exporter] goarch: arm64 - use_buildx: true + use: buildx + build_flag_templates: + - --platform=linux/arm64 + - --label=org.opencontainers.image.title={{ .ProjectName }} + - --label=org.opencontainers.image.description={{ .ProjectName }} + - --label=org.opencontainers.image.url=https://github.com/mvisonneau/gitlab-ci-pipelines-exporter + - --label=org.opencontainers.image.source=https://github.com/mvisonneau/gitlab-ci-pipelines-exporter + - --label=org.opencontainers.image.version={{ .Version }} + - --label=org.opencontainers.image.created={{ time "2006-01-02T15:04:05Z07:00" }} + - --label=org.opencontainers.image.revision={{ .FullCommit }} + - --label=org.opencontainers.image.licenses=Apache-2.0 + + - image_templates: + - 'docker.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-armv6' + - 'ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-armv6' + - 'quay.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-armv6' + ids: [gitlab-ci-pipelines-exporter] + goarch: arm + goarm: 6 + use: buildx + build_flag_templates: + - --platform=linux/arm/v6 + - --label=org.opencontainers.image.title={{ .ProjectName }} + - --label=org.opencontainers.image.description={{ .ProjectName }} + - --label=org.opencontainers.image.url=https://github.com/mvisonneau/gitlab-ci-pipelines-exporter + - --label=org.opencontainers.image.source=https://github.com/mvisonneau/gitlab-ci-pipelines-exporter + - --label=org.opencontainers.image.version={{ .Version }} + - --label=org.opencontainers.image.created={{ time "2006-01-02T15:04:05Z07:00" }} + - --label=org.opencontainers.image.revision={{ .FullCommit }} + - --label=org.opencontainers.image.licenses=Apache-2.0 + + - image_templates: + - 'docker.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-armv7' + - 'ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-armv7' + - 'quay.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-armv7' + ids: [gitlab-ci-pipelines-exporter] + goarch: arm + goarm: 7 + use: buildx build_flag_templates: - - --platform=linux/arm64/v8 + - --platform=linux/arm/v7 - --label=org.opencontainers.image.title={{ .ProjectName }} - --label=org.opencontainers.image.description={{ .ProjectName }} - --label=org.opencontainers.image.url=https://github.com/mvisonneau/gitlab-ci-pipelines-exporter @@ -101,12 +176,23 @@ docker_manifests: - name_template: docker.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }} image_templates: - docker.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-amd64 - - docker.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-arm64v8 + - docker.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-arm64 + - docker.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-armv6 + - docker.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-armv7 - name_template: ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }} image_templates: - ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-amd64 - - ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-arm64v8 + - ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-arm64 + - ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-armv6 + - ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-armv7 + + - name_template: quay.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }} + image_templates: + - quay.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-amd64 + - quay.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-arm64 + - quay.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-armv6 + - quay.io/mvisonneau/gitlab-ci-pipelines-exporter:{{ .Tag }}-armv7 checksum: name_template: '{{ .ProjectName }}_{{ .Tag }}_sha512sums.txt' @@ -125,4 +211,4 @@ signs: ] changelog: - skip: true + disable: true diff --git a/.revive.toml b/.revive.toml deleted file mode 100644 index a1ccc86d..00000000 --- a/.revive.toml +++ /dev/null @@ -1,31 +0,0 @@ -ignoreGeneratedHeader = false -severity = "warning" -confidence = 0.8 -errorCode = 1 -warningCode = 1 - -[rule.blank-imports] -[rule.context-as-argument] -[rule.context-keys-type] -[rule.cyclomatic] - arguments = [29] -[rule.dot-imports] -[rule.error-return] -[rule.error-strings] -[rule.error-naming] -[rule.exported] -[rule.if-return] -[rule.increment-decrement] -[rule.var-naming] -[rule.package-comments] -[rule.range] -[rule.receiver-naming] -[rule.time-naming] -[rule.unexported-return] -[rule.indent-error-flow] -[rule.errorf] -[rule.empty-block] -[rule.superfluous-else] -[rule.unused-parameter] -[rule.unreachable-code] -[rule.redefines-builtin-id] \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 89c2c567..198ac704 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,203 @@ and this project adheres to [0ver](https://0ver.org) (more or less). ## [Unreleased] +## [v0.5.8] - 2024-05-03 + +### Changed + +* fix(devenv): Fix devenv rule in makefile. by @audig in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/822 +* Add gitlab-health-url flag and envvar by @Thor77 in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/800 +* fix project pull webhook args by @fitz7 in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/828 +* go.mod: updated most dependencies + +## [v0.5.7] - 2024-03-03 + +### Changed + +* fix environment id hyperlink NaN by @toby181 in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/776 +* webhooks: handle ref and tag deletion events by @sysedwinistrator in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/794 +* webhooks: only pull project for which request was received by @sysedwinistrator in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/793 +* ratelimit: redis should retry if allowed requests exceeded by @bkylerussell in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/789 +* chore: dependencies, tooling and go updates by @mvisonneau in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/799 + +## [v0.5.6] - 2023-12-06 + +### Changed + +* fix default branch regexp by @gnomus in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/665 +* Add test cases metrics from pipeline reports by @karpanin in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/675 +* bump go-gitlab version, bump golang version for make dev-env by @jasonwliu in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/670 +* Combine test report metrics with childrens reports by @jasonwliu in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/672 +* Fix misformed url to Gitlab pipeline by @fredsnap in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/650 +* Update go-gitlab to fix test reports by @clawoflight in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/686 +* Update GitLab logo to new branding in Grafana dashboard quickstart by @dnsmichi in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/694 +* Add tag_list label to metrics by @mpetke in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/499 +* pipelines: store the source of the pipeline as well by @mathstuf in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/723 +* feat: failure reason to job status by @strpc in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/718 +* build(deps): bump github.com/xanzy/go-gitlab from 0.92.3 to 0.94.0 by @dependabot in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/743 +* build(deps): bump github.com/charmbracelet/lipgloss from 0.8.0 to 0.9.1 by @dependabot in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/735 +* gitlab/client: guard against `nil` responses by @mathstuf in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/729 +* build(deps): bump golang.org/x/net from 0.15.0 to 0.17.0 by @dependabot in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/732 +* build(deps): bump github.com/alicebob/miniredis/v2 from 2.23.0 to 2.31.0 by @dependabot in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/734 +* build(deps): bump github.com/google/uuid from 1.3.1 to 1.4.0 by @dependabot in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/737 +* build(deps): bump golang.org/x/time from 0.3.0 to 0.4.0 by @dependabot in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/741 +* build(deps): bump github.com/go-playground/validator/v10 from 10.15.4 to 10.16.0 by @dependabot in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/742 +* build(deps): bump github.com/redis/go-redis/v9 from 9.2.1 to 9.3.0 by @dependabot in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/740 +* build(deps): bump github.com/vmihailenco/msgpack/v5 from 5.4.0 to 5.4.1 by @dependabot in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/739 +* build(deps): bump google.golang.org/grpc from 1.58.2 to 1.59.0 by @dependabot in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/736 +* feat: use keyset pagination for retrieving project CI jobs by @stanhu in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/744 +* fix: use keyset pagination only when it is supported by @stanhu in https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/pull/755 + +## [v0.5.5] - 2023-05-22 + +### Added + +- new metrics: + - `gitlab_ci_pipeline_test_report_total_time` -> Duration in seconds of all the tests in the most recently finished pipeline + - `gitlab_ci_pipeline_test_report_total_count` -> Number of total tests in the most recently finished pipeline + - `gitlab_ci_pipeline_test_report_success_count` -> Number of successful tests in the most recently finished pipeline + - `gitlab_ci_pipeline_test_report_failed_count` -> Number of failed tests in the most recently finished pipeline + - `gitlab_ci_pipeline_test_report_skipped_count` -> Number of skipped tests in the most recently finished pipeline + - `gitlab_ci_pipeline_test_report_error_count` -> Number of errored tests in the most recently finished pipeline + - `gitlab_ci_pipeline_test_suite_total_time` -> Duration in seconds for the test suite + - `gitlab_ci_pipeline_test_suite_total_count` -> Number of total tests for the test suite + - `gitlab_ci_pipeline_test_suite_success_count` -> Number of successful tests for the test suite + - `gitlab_ci_pipeline_test_suite_failed_count` -> Number of failed tests for the test suite + - `gitlab_ci_pipeline_test_suite_skipped_count` -> Number of skipped tests for the test suite + - `gitlab_ci_pipeline_test_suite_error_count` -> Duration in errored tests for the test suite +- new configuration parameter: `gitlab.burstable_requests_per_second`, introducing a burstable amount of API RPS +- new configuration parameter: `gitlab.maximum_jobs_queue_size`, controlling the queue buffer size +- new label for pipelines and jobs: `source` to indicate the reason the pipeline started + +### Changed + +- Upgraded golang to **v1.20** +- Upgraded most dependencies to their latest versions +- Reduced the amount of data being pulled from the project list API calls + +## [v0.5.4] - 2022-08-25 + +### Added + +- Kickstarted tracing support through `opentelemetry` implementation for most of the network calls +- Now passing a `context.Context` to most functional calls +- Aggregated already used linters and added new ones through the implementation of `golangci` +- Release `.apk` packages for Alpine linux +- Added man pages and autocompletion scripts (bash & zsh) to `.apk`, `.deb`, `.rpm` & `homebrew` packages +- Release "fat" binaries (arm64 + amd64 combined) for MacOS under `_all` suffix + +### Changed + +- Fixed a config issue preventing the arm deb/rpm packages to be released correctly +- Upgraded golang to **v1.19** +- Upgraded most dependencies to their lastest versions +- Fixed child pipeline jobs not found whilst looking up through bridges (#345) +- `gitlab_ci_pipeline_job_queued_duration_seconds` & `gitlab_ci_pipeline_queued_duration_seconds` will now be leveraging the value returned through the GitLab API instead of computing it with (startedAt - createdAt) +- Refactored the RPC layer used for CLI monitoring with gRPC + +## [v0.5.3] - 2022-02-11 + +### Added + +- `linux/arm/v6` & `linux/arm/v7` binary & container image releases +- `quay.io` container image releases +- New internal metrics about exporter's health: + - `gcpe_gitlab_api_requests_remaining` - GitLab API requests remaining in the API Limit + - `gcpe_gitlab_api_requests_limit` - GitLab API requests available in the API Limit + +### Changed + +- Fixed an issue when running in cluster mode where tasks could hang when the exporter restarted. +- Fixed a bug in some cases where pagination headers are not returned from GitLab's API +- Upgraded most dependencies to their latest versions +- fixed json syntax error in webhook error body +- dashboards: fixed owner multiselect filters +- fixed a bug on `gitlab_ci_pipeline_run_count` being incremented multiple times + +## [v0.5.2] - 2021-08-25 +### Changed + +- Updated default metrics garbage collection intervals from 5 to 10m +- monitor: correctly sanitize the config output +- fixed an issue introduced in v0.5.1 causing the exporter to fail when the monitoring listener address was not defined +- fixed a bug which could cause nil pointer deferences on readiness checks +- Updated golang to `1.17` + +## [v0.5.1] - 2021-07-19 + +### Added + +- New monitoring CLI to retrieve information about the exporter +- New internal metrics about exporter's health: + - `gcpe_currently_queued_tasks_count` - Number of tasks in the queue + - `gcpe_environments_count` - Number of GitLab environments being exported + - `gcpe_executed_tasks_count` - Number of tasks executed + - `gcpe_gitlab_api_requests_count` - GitLab API requests count + - `gcpe_metrics_count` - Number of GitLab pipelines metrics being exported + - `gcpe_projects_count` - Number of GitLab projects being exported + - `gcpe_refs_count` - Number of GitLab refs being exported + +### Changed + +- fixed a goroutines/memory leak introduced in `0.5.0` which can occur when attempting to process more tasks than the ratelimit permits +- fixed a bug causing the refreshing of tag based jobs to not match any API results, generating lots of unnecessary requests +- webhooks: added more details when processing wildcards +- examples/quickstart: updated prometheus and grafana versions +- updated most libraries to their most recent versions + +## [v0.5.0] - 2021-06-02 + +**BREAKING CHANGES** + +- The configuration syntax has evolved, you can refer to the [updated version of the doc](docs/configuration_syntax.md) + - `pull.maximum_gitlab_api_requests_per_second` has moved to `gitlab.maximum_requests_per_second` + - `project_defaults.pull.refs.*` has been redone in a hopefully more comprehensible fashion + - `project_defaults.pull.refs.(branches|tags|merge_requests).*` parameters can now be used to have a finer granularity + on the management of per-ref-kind settings + - `project_defaults.pull.refs.from.(pipelines|merge_requests)` is not natively managed as part of the per-ref-kind specific parameters + - `project_defaults.pull.environments.name_regexp` has moved to `project_defaults.pull.environments.regexp` + - `project_defaults.pull.environments.tags_regexp` was removed to avoid confusion + +- Logging configuration now has to be done as part of the config file instead of CLI flags: + +```yaml +log: + level: info + format: text +``` + +- By default, when exporting metrics for `environments`, stopped ones will not be considered any more. + +### Added + +- New metric `gitlab_ci_pipeline_queued_duration_seconds` +- New metric `gitlab_ci_pipeline_job_queued_duration_seconds` +- (en|dis)able pulling `branches` / `tags` / `merge_requests` on a global or per-project basis +- Choose to export metrics for only the most 'n' recently updated refs pipelines +- Choose to export metrics for refs with pipelines 'updated in the last x seconds' +- Choose to export metrics for deleted `branches` or `tags` +- Choose to export metrics for available `environments` only + +### Changed + +- Bumped all dependencies +- Enhanced the function signatures for the ref objects management +- Fixed a bug causing the jobs metrics pull to fail on ref.Kind=RefKindMergeRequest +- Fixed a bug causing the environments to be garbage collected on every run +- Fixed the error handling when comparing 2 refs which resulted into nil pointer dereferences +- Fixed the pulling of merge-request based pipelines +- Fixed unit tests on windows +- Replaced custom config mangement implementation with `creasty/defaults` and `go-playground/validator` +- Rewrote the non-OOP approach used so far for the controller + +## [v0.4.9] - 2021-05-05 + +### Changed + +- Fixed a bug preventing to set `output_sparse_status_metrics` globally or at the wildcard level +- Updated all dependencies to their latest versions +- Reduced the default GitLab API request rate limit from 10 to 1rps + ## [v0.4.8] - 2021-03-08 ### Added @@ -507,7 +704,17 @@ if not seen in a long time. - LICENSE - README -[Unreleased]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/compare/v0.4.8...HEAD +[Unreleased]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/compare/v0.5.8...HEAD +[v0.5.8]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/tree/v0.5.8 +[v0.5.7]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/tree/v0.5.7 +[v0.5.6]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/tree/v0.5.6 +[v0.5.5]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/tree/v0.5.5 +[v0.5.4]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/tree/v0.5.4 +[v0.5.3]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/tree/v0.5.3 +[v0.5.2]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/tree/v0.5.2 +[v0.5.1]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/tree/v0.5.1 +[v0.5.0]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/tree/v0.5.0 +[v0.4.9]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/tree/v0.4.9 [v0.4.8]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/tree/v0.4.8 [v0.4.7]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/tree/v0.4.7 [v0.4.6]: https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/tree/v0.4.6 diff --git a/Dockerfile b/Dockerfile index 2f7e18e4..710b9456 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ # BUILD CONTAINER ## -FROM alpine:3.13 as certs +FROM alpine:3.21@sha256:21dc6063fd678b478f57c0e13f47560d0ea4eeba26dfc947b2a4f81f686b9f45 as certs RUN \ apk add --no-cache ca-certificates @@ -11,7 +11,7 @@ apk add --no-cache ca-certificates # RELEASE CONTAINER ## -FROM busybox:1.32-glibc +FROM busybox:1.37-glibc@sha256:c598938e58d0efcc5a01efe9059d113f22970914e05e39ab2a597a10f9db9bdc WORKDIR / @@ -24,4 +24,4 @@ USER 65534 EXPOSE 8080 ENTRYPOINT ["/usr/local/bin/gitlab-ci-pipelines-exporter"] -CMD [""] +CMD ["run"] diff --git a/Makefile b/Makefile index 7e45e70b..12728470 100644 --- a/Makefile +++ b/Makefile @@ -1,55 +1,28 @@ -NAME := gitlab-ci-pipelines-exporter -FILES := $(shell git ls-files */*.go) -REPOSITORY := mvisonneau/$(NAME) -.DEFAULT_GOAL := help - -export GO111MODULE=on - -.PHONY: setup -setup: ## Install required libraries/tools for build tasks - @command -v cover 2>&1 >/dev/null || GO111MODULE=off go get -u -v golang.org/x/tools/cmd/cover - @command -v gofumpt 2>&1 >/dev/null || GO111MODULE=off go get -u -v mvdan.cc/gofumpt - @command -v gosec 2>&1 >/dev/null || GO111MODULE=off go get -u -v github.com/securego/gosec/cmd/gosec - @command -v goveralls 2>&1 >/dev/null || GO111MODULE=off go get -u -v github.com/mattn/goveralls - @command -v ineffassign 2>&1 >/dev/null || GO111MODULE=off go get -u -v github.com/gordonklaus/ineffassign - @command -v misspell 2>&1 >/dev/null || GO111MODULE=off go get -u -v github.com/client9/misspell/cmd/misspell - @command -v revive 2>&1 >/dev/null || GO111MODULE=off go get -u -v github.com/mgechev/revive +NAME := gitlab-ci-pipelines-exporter +FILES := $(shell git ls-files */*.go) +COVERAGE_FILE := coverage.out +REPOSITORY := mvisonneau/$(NAME) +.DEFAULT_GOAL := help +GOLANG_VERSION := 1.23 .PHONY: fmt -fmt: setup ## Format source code - gofumpt -w $(FILES) +fmt: ## Format source code + go run mvdan.cc/gofumpt@v0.7.0 -w $(shell git ls-files **/*.go) + go run github.com/daixiang0/gci@v0.13.5 write -s standard -s default -s "prefix(github.com/mvisonneau)" . .PHONY: lint -lint: revive vet gofumpt ineffassign misspell gosec ## Run all lint related tests against the codebase - -.PHONY: revive -revive: setup ## Test code syntax with revive - revive -config .revive.toml $(FILES) - -.PHONY: vet -vet: ## Test code syntax with go vet - go vet ./... - -.PHONY: gofumpt -gofumpt: setup ## Test code syntax with gofumpt - gofumpt -d $(FILES) > gofumpt.out - @if [ -s gofumpt.out ]; then cat gofumpt.out; rm gofumpt.out; exit 1; else rm gofumpt.out; fi - -.PHONY: ineffassign -ineffassign: setup ## Test code syntax for ineffassign - ineffassign ./... - -.PHONY: misspell -misspell: setup ## Test code with misspell - misspell -error $(FILES) - -.PHONY: gosec -gosec: setup ## Test code for security vulnerabilities - gosec ./... +lint: ## Run all lint related tests upon the codebase + go run github.com/golangci/golangci-lint/cmd/golangci-lint@v1.62.2 run -v --fast .PHONY: test test: ## Run the tests against the codebase - go test -v -count=1 -race ./... + @rm -rf $(COVERAGE_FILE) + go test -v -count=1 -race ./... -coverprofile=$(COVERAGE_FILE) + @go tool cover -func $(COVERAGE_FILE) | awk '/^total/ {print "coverage: " $$3}' + +.PHONY: coverage +coverage: ## Prints coverage report + go tool cover -func $(COVERAGE_FILE) .PHONY: install install: ## Build and install locally the binary (dev purpose) @@ -61,27 +34,33 @@ build: ## Build the binaries using local GOOS .PHONY: release release: ## Build & release the binaries (stable) + mkdir -p ${HOME}/.cache/snapcraft/download + mkdir -p ${HOME}/.cache/snapcraft/stage-packages git tag -d edge - goreleaser release --rm-dist + goreleaser release --clean find dist -type f -name "*.snap" -exec snapcraft upload --release stable,edge '{}' \; +.PHONY: protoc +protoc: ## Generate golang from .proto files + @command -v protoc 2>&1 >/dev/null || (echo "protoc needs to be available in PATH: https://github.com/protocolbuffers/protobuf/releases"; false) + @command -v protoc-gen-go 2>&1 >/dev/null || go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.3.0 + protoc \ + --go_out=. --go_opt=paths=source_relative \ + --go-grpc_out=. --go-grpc_opt=paths=source_relative \ + pkg/monitor/protobuf/monitor.proto + .PHONY: prerelease -prerelease: setup ## Build & prerelease the binaries (edge) +prerelease: ## Build & prerelease the binaries (edge) @\ REPOSITORY=$(REPOSITORY) \ - NAME=$(NAME) \ - GITHUB_TOKEN=$(GITHUB_TOKEN) \ - .github/prerelease.sh + NAME=$(NAME) \ + GITHUB_TOKEN=$(GITHUB_TOKEN) \ + .github/prerelease.sh .PHONY: clean clean: ## Remove binary if it exists rm -f $(NAME) -.PHONY: coverage -coverage: ## Generates coverage report - rm -rf *.out - go test -count=1 -race -v ./... -coverpkg=./... -coverprofile=coverage.out - .PHONY: coverage-html coverage-html: ## Generates coverage report and displays it in the browser go tool cover -html=coverage.out @@ -92,14 +71,32 @@ dev-env: ## Build a local development environment using Docker -v $(shell pwd):/go/src/github.com/mvisonneau/$(NAME) \ -w /go/src/github.com/mvisonneau/$(NAME) \ -p 8080:8080 \ - golang:1.16 \ - /bin/bash -c 'make setup; make install; bash' + golang:$(GOLANG_VERSION) \ + /bin/bash -c '\ + git config --global --add safe.directory $$(pwd);\ + make install;\ + bash\ + ' .PHONY: is-git-dirty is-git-dirty: ## Tests if git is in a dirty state @git status --porcelain @test $(shell git status --porcelain | grep -c .) -eq 0 +.PHONY: man-pages +man-pages: ## Generates man pages + rm -rf helpers/manpages + mkdir -p helpers/manpages + go run ./cmd/tools/man | gzip -c -9 >helpers/manpages/$(NAME).1.gz + +.PHONY: autocomplete-scripts +autocomplete-scripts: ## Download CLI autocompletion scripts + rm -rf helpers/autocomplete + mkdir -p helpers/autocomplete + curl -sL https://raw.githubusercontent.com/urfave/cli/v2.27.1/autocomplete/bash_autocomplete > helpers/autocomplete/bash + curl -sL https://raw.githubusercontent.com/urfave/cli/v2.27.1/autocomplete/zsh_autocomplete > helpers/autocomplete/zsh + curl -sL https://raw.githubusercontent.com/urfave/cli/v2.27.1/autocomplete/powershell_autocomplete.ps1 > helpers/autocomplete/ps1 + .PHONY: all all: lint test build coverage ## Test, builds and ship package for all supported platforms diff --git a/README.md b/README.md index 3d5738ec..1e7f5dd9 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ [![release](https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/actions/workflows/release.yml/badge.svg)](https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/actions/workflows/release.yml) [![gitlab-ci-pipelines-exporter](https://snapcraft.io/gitlab-ci-pipelines-exporter/badge.svg)](https://snapcraft.io/gitlab-ci-pipelines-exporter) -`gitlab-ci-pipelines-exporter` allows you to monitor your [GitLab CI pipelines](https://docs.gitlab.com/ee/ci/pipelines.html) with [Prometheus](https://prometheus.io/) or any monitoring solution supporting the [OpenMetrics](https://github.com/OpenObservability/OpenMetrics) format. +`gitlab-ci-pipelines-exporter` allows you to monitor your [GitLab CI pipelines](https://docs.gitlab.com/ee/ci/pipelines/) with [Prometheus](https://prometheus.io/) or any monitoring solution supporting the [OpenMetrics](https://github.com/OpenObservability/OpenMetrics) format. You can find more information [on GitLab docs](https://docs.gitlab.com/ee/ci/pipelines/pipeline_efficiency.html#pipeline-monitoring) about how it takes part improving your pipeline efficiency. @@ -41,7 +41,7 @@ If you want to quickly try them out with your own data, have a look into the [ex ### Go ```bash -~$ go get -u github.com/mvisonneau/gitlab-ci-pipelines-exporter/cmd/gitlab-ci-pipelines-exporter +~$ go run github.com/mvisonneau/gitlab-ci-pipelines-exporter/cmd/gitlab-ci-pipelines-exporter@latest ``` ### Snapcraft @@ -60,8 +60,8 @@ If you want to quickly try them out with your own data, have a look into the [ex ```bash ~$ docker run -it --rm docker.io/mvisonneau/gitlab-ci-pipelines-exporter -or ~$ docker run -it --rm ghcr.io/mvisonneau/gitlab-ci-pipelines-exporter +~$ docker run -it --rm quay.io/mvisonneau/gitlab-ci-pipelines-exporter ``` ### Scoop @@ -71,6 +71,12 @@ or ~$ scoop install gitlab-ci-pipelines-exporter ``` +### NixOS + +``` +~$ nix-env -iA nixos.prometheus-gitlab-ci-pipelines-exporter +``` + ### Binaries, DEB and RPM packages Have a look onto the [latest release page](https://github.com/mvisonneau/gitlab-ci-pipelines-exporter/releases/latest) to pick your flavor and version. Here is an helper to fetch the most recent one: @@ -151,7 +157,7 @@ EOF -v $(pwd)/config.yml:/etc/config.yml \ -p 8080:8080 \ mvisonneau/gitlab-ci-pipelines-exporter:latest \ - --config /etc/config.yml + run --config /etc/config.yml ``` You should then be able to see the following logs @@ -262,18 +268,79 @@ USAGE: gitlab-ci-pipelines-exporter [global options] command [command options] [arguments...] COMMANDS: + run start the exporter + monitor display information about the currently running exporter help, h Shows a list of commands or help for one command GLOBAL OPTIONS: - --config file, -c file config file (default: "~/.gitlab-ci-pipelines-exporter.yml") [$GCPE_CONFIG] - --redis-url url redis url for an HA setup (format: redis[s]://[:password@]host[:port][/db-number][?option=value]) [$GCPE_REDIS_URL] - --gitlab-token token GitLab API access token (can be used to override the value set in the config file) [$GCPE_GITLAB_TOKEN] - --webhook-secret-token token token used to authenticate legitimate requests (can be used to override the value set in the config file) [$GCPE_WEBHOOK_SECRET_TOKEN] - --log-level level log level (debug,info,warn,fatal,panic) (default: "info") [$GCPE_LOG_LEVEL] - --log-format format log format (json,text) (default: "text") [$GCPE_LOG_FORMAT] + --internal-monitoring-listener-address value, -m value internal monitoring listener address [$GCPE_INTERNAL_MONITORING_LISTENER_ADDRESS] + --help, -h show help (default: false) + --version, -v print the version (default: false) +``` + +### run + +```bash +~$ gitlab-ci-pipelines-exporter run --help +NAME: + gitlab-ci-pipelines-exporter run - start the exporter + +USAGE: + gitlab-ci-pipelines-exporter run [command options] [arguments...] + +OPTIONS: + --config file, -c file config file (default: "./gitlab-ci-pipelines-exporter.yml") [$GCPE_CONFIG] + --redis-url url redis url for an HA setup (format: redis[s]://[:password@]host[:port][/db-number][?option=value]) (overrides config file parameter) [$GCPE_REDIS_URL] + --gitlab-token token GitLab API access token (overrides config file parameter) [$GCPE_GITLAB_TOKEN] + --webhook-secret-token token token used to authenticate legitimate requests (overrides config file parameter) [$GCPE_WEBHOOK_SECRET_TOKEN] --help, -h show help (default: false) ``` +### monitor + +```bash +~$ gitlab-ci-pipelines-exporter monitor --help +NAME: + gitlab-ci-pipelines-exporter monitor - display information about the currently running exporter + +USAGE: + gitlab-ci-pipelines-exporter monitor [command options] [arguments...] + +OPTIONS: + --help, -h show help (default: false) +``` + +## Monitor / Troubleshoot + +![monitor_cli_example](/docs/images/monitor_cli_example.gif) + +If you need to dig into your exporter's internal, you can leverage the internal CLI monitoring endpoint. This will get you insights about the following: +- Live telemetry regarding: + - GitLab API requests + - Tasks buffer usage + - Projects count and schedules + - Environments count and schedules + - Refs count and schedules + - Metrics count and schedules +- **Parsed configuration details** + +To use it, you have to start your exporter with the following flag `--internal-monitoring-listener-address`, `-m` or the `GCPE_INTERNAL_MONITORING_LISTENER_ADDRESS` env variable. + +You can whether use a TCP or UNIX socket eg: + +``` +~$ gitlab-ci-pipelines-exporter -m 'unix://gcpe-monitor.sock' run +~$ gitlab-ci-pipelines-exporter -m 'tcp://127.0.0.1:9000' run +``` + +To use the monitor CLI, you need to be able to access the monitoring socket and reuse the same flag: + +``` +export GCPE_INTERNAL_MONITORING_LISTENER_ADDRESS='unix://gcpe-monitor.sock' +~$ gitlab-ci-pipelines-exporter run & +~$ gitlab-ci-pipelines-exporter monitor +``` + ## Develop / Test If you use docker, you can easily get started using : diff --git a/cmd/gitlab-ci-pipelines-exporter/main.go b/cmd/gitlab-ci-pipelines-exporter/main.go index 3b1e2c0d..a56ec79a 100644 --- a/cmd/gitlab-ci-pipelines-exporter/main.go +++ b/cmd/gitlab-ci-pipelines-exporter/main.go @@ -6,7 +6,7 @@ import ( "github.com/mvisonneau/gitlab-ci-pipelines-exporter/internal/cli" ) -var version = "" +var version = "devel" func main() { cli.Run(version, os.Args) diff --git a/cmd/tools/man/main.go b/cmd/tools/man/main.go new file mode 100644 index 00000000..66f32cf9 --- /dev/null +++ b/cmd/tools/man/main.go @@ -0,0 +1,14 @@ +package main + +import ( + "fmt" + "time" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/internal/cli" +) + +var version = "devel" + +func main() { + fmt.Println(cli.NewApp(version, time.Now()).ToMan()) +} diff --git a/docs/configuration_syntax.md b/docs/configuration_syntax.md index bac8d9f6..dd0a20e0 100644 --- a/docs/configuration_syntax.md +++ b/docs/configuration_syntax.md @@ -1,6 +1,24 @@ # GitLab CI Pipelines Exporter - Configuration syntax ```yaml +# Log configuration +log: + # Set the logging level + # allowed values: trace, debug, info, warning, error, fatal or panic + # (optional, default: info) + level: info + + # Set the logging format + # allowed values: text or json + # (optional, default: text) + format: text + +# OpenTelemetry configuration (currently supports tracing only) +opentelemetry: + # Configure the OpenTelemetry collector gRPC endpoint in order to enable tracing + # (optional, default: "") + grpc_endpoint: + # Exporter HTTP servers configuration server: # [address:port] to make the process listen @@ -14,7 +32,7 @@ server: metrics: # Enable /metrics endpoint (optional, default: true) enabled: true - + # Enable OpenMetrics content encoding in # prometheus HTTP handler (optional, default: false) # see: https://godoc.org/github.com/prometheus/client_golang/prometheus/promhttp#HandlerOpts @@ -68,11 +86,26 @@ gitlab: # GitLab instance (handy when self-hosting) (optional, default: true) enable_tls_verify: true -pull: - # Global rate limit for the GitLab API request/sec - # (optional, default: 10) - maximum_gitlab_api_requests_per_second: 10 + # Maximum limit for the GitLab API requests/sec + # (optional, default: 1) + maximum_requests_per_second: 1 + + # Rate limit for the GitLab API requests/sec + # (optional, default: 5) + burstable_requests_per_second: 5 + + # Maximum amount of jobs to keep queue, if this limit is reached + # newly created ones will get dropped. As a best practice you should not change this value. + # Workarounds to avoid hitting the limit are: + # - increase polling intervals + # - increase API rate limit + # - reduce the amount of projects, refs, environments or metrics you are looking into + # - leverage webhooks instead of polling schedules + # + # (optional, default: 1000) + maximum_jobs_queue_size: 1000 +pull: projects_from_wildcards: # Whether to trigger a discovery or not when the # exporter starts (optional, default: true) @@ -178,8 +211,8 @@ garbage_collect: scheduled: true # Interval in seconds to garbage collect metrics - # (optional, default: 300) - interval_seconds: 300 + # (optional, default: 600) + interval_seconds: 600 # Default settings which can be overridden at the project # or wildcard level (optional) @@ -197,44 +230,67 @@ project_defaults: # Filter out by name environments to include # (optional, default: ".*") - name_regexp: ".*" + regexp: ".*" - # When deployments are based upon tags, you can - # choose to filter out the ones which you are - # using to deploy your environment (optional, default: ".*") - tags_regexp: ".*" + # Do not export metrics for stopped environments + # (optional, default: true) + exclude_stopped: true refs: - # Filter refs (branches/tags only) to include - # (optional, default: "^main|master$" -- main or master branch) - regexp: "^main|master$" - - # If the age of the most recent commit for the ref is greater than - # this value, the ref won't get exported (optional, default: 0 (disabled)) - # nb: when used in conjuction of pull.from.(pipelines|merge_requests).enabled = true, the creation date - # of the pipeline is taken in account, not the age of the commit - max_age_seconds: 0 - - from: - pipelines: - # Whether to trigger a discovery of the projects refs - # from the most recent project pipelines when the - # project is configured/discovered (optional, default: false) - # This flag is useful if you want/need to obtain pipelines - # metrics of deleted refs - enabled: false + branches: + # Monitor pipelines related to project branches + # (optional, default: true) + enabled: true + + # Filter for branches to include + # (optional, default: "^(?:main|master)$" -- main/master branches) + regexp: "^(?:main|master)$" + + # Only keep most 'n' recently updated branches + # (optional, default: 0 -- disabled/keep every branch matching the regexp)" + most_recent: 0 - # Maximum number of pipelines to analyze per project - # to search for refs on init (optional, default: 100) - depth: 100 + # If the age of the most recently updated pipeline for the branch is greater than + # this value, the pipeline metrics won't get exported (optional, default: 0 (disabled)) + max_age_seconds: 0 - merge_requests: - # Fetch merge request pipelines refs (optional, default: false) - enabled: false + # If set to false, it will continue to export metrics for the branch even + # if it has been deleted (optional, default: true) + exclude_deleted: true + + tags: + # Monitor pipelines related to project tags + # (optional, default: true) + enabled: true + + # Filter for tags to include + # (optional, default: ".*" -- all tags) + regexp: ".*" + + # Only keep most 'n' recently updated tags + # (optional, default: 0 -- disabled/keep every tag matching the regexp)" + most_recent: 0 + + # If the age of the most recently updated pipeline for the tag is greater than + # this value, the pipeline metrics won't get exported (optional, default: 0 (disabled)) + max_age_seconds: 0 + + # If set to false, it will continue to export metrics for the tag even + # if it has been deleted (optional, default: true) + exclude_deleted: true - # Maximum number for merge requests pipelines to - # attempt fetch on each project ref discovery (optional, default: 1) - depth: 1 + merge_requests: + # Monitor pipelines related to project merge requests + # (optional, default: false) + enabled: false + + # Only keep most 'n' recently updated merge requests + # (optional, default: 0 -- disabled/keep every merge request) + most_recent: 0 + + # If the age of the most recently updated pipeline for the merge request is greater than + # this value, the pipeline metrics won't get exported (optional, default: 0 (disabled)) + max_age_seconds: 0 pipeline: jobs: @@ -265,6 +321,14 @@ project_defaults: # Filter pipelines variables to include # (optional, default: ".*", all variables) regexp: ".*" + + test_reports: + # Fetch test reports in a separate metric (optiona, default: false) + enabled: false + + test_cases: + # Fetch test cases reports in a separate metric (optional, default: false) + enabled: false # The list of the projects you want to monitor (optional) projects: @@ -281,44 +345,67 @@ projects: # Filter out by name environments to include # (optional, default: ".*") - name_regexp: ".*" + regexp: ".*" + + # Do not export metrics for stopped environments + # (optional, default: true) + exclude_stopped: true - # When deployments are based upon tags, you can - # choose to filter out the ones which you are - # using to deploy your environment (optional, default: ".*") - tags_regexp: ".*" - refs: - # Filter refs (branches/tags only) to include - # (optional, default: "^main|master$" -- main or master branch) - regexp: "^main|master$" - - # If the age of the most recent commit for the ref is greater than - # this value, the ref won't get exported (optional, default: 0 (disabled)) - # nb: when used in conjuction of pull.from.(pipelines|merge_requests).enabled = true, the creation date - # of the pipeline is taken in account, not the age of the commit - max_age_seconds: 0 + branches: + # Monitor pipelines related to project branches + # (optional, default: true) + enabled: true - from: - pipelines: - # Whether to trigger a discovery of the projects refs - # from the most recent project pipelines when the - # project is configured/discovered (optional, default: false) - # This flag is useful if you want/need to obtain pipelines - # metrics of deleted refs - enabled: false + # Filter for branches to include + # (optional, default: "^(?:main|master)$" -- main/master branches) + regexp: "^(?:main|master)$" + + # Only keep most 'n' recently updated branches + # (optional, default: 0 -- disabled/keep every branch matching the regexp)" + most_recent: 0 - # Maximum number of pipelines to analyze per project - # to search for refs on init (optional, default: 100) - depth: 100 + # If the age of the most recently updated pipeline for the branch is greater than + # this value, the pipeline metrics won't get exported (optional, default: 0 (disabled)) + max_age_seconds: 0 - merge_requests: - # Fetch merge request pipelines refs (optional, default: false) - enabled: false + # If set to false, it will continue to export metrics for the branch even + # if it has been deleted (optional, default: true) + exclude_deleted: true - # Maximum number for merge requests pipelines to - # attempt fetch on each project ref discovery (optional, default: 1) - depth: 1 + tags: + # Monitor pipelines related to project tags + # (optional, default: true) + enabled: true + + # Filter for tags to include + # (optional, default: ".*" -- all tags) + regexp: ".*" + + # Only keep most 'n' recently updated tags + # (optional, default: 0 -- disabled/keep every tag matching the regexp)" + most_recent: 0 + + # If the age of the most recently updated pipeline for the tag is greater than + # this value, the pipeline metrics won't get exported (optional, default: 0 (disabled)) + max_age_seconds: 0 + + # If set to false, it will continue to export metrics for the tag even + # if it has been deleted (optional, default: true) + exclude_deleted: true + + merge_requests: + # Monitor pipelines related to project merge requests + # (optional, default: false) + enabled: false + + # Only keep most 'n' recently updated merge requests + # (optional, default: 0 -- disabled/keep every merge request) + most_recent: 0 + + # If the age of the most recently updated pipeline for the merge request is greater than + # this value, the pipeline metrics won't get exported (optional, default: 0 (disabled)) + max_age_seconds: 0 pipeline: jobs: @@ -349,6 +436,14 @@ projects: # Filter pipelines variables to include # (optional, default: ".*", all variables) regexp: ".*" + + test_reports: + # Fetch test reports in a separate metric (optiona, default: false) + enabled: false + + test_cases: + # Fetch test cases reports in a separate metric (optional, default: false) + enabled: false # Dynamically fetch projects to monitor using a wildcard (optional) wildcards: @@ -381,44 +476,67 @@ wildcards: # Filter out by name environments to include # (optional, default: ".*") - name_regexp: ".*" + regexp: ".*" - # When deployments are based upon tags, you can - # choose to filter out the ones which you are - # using to deploy your environment (optional, default: ".*") - tags_regexp: ".*" + # Do not export metrics for stopped environments + # (optional, default: true) + exclude_stopped: true refs: - # Filter refs (branches/tags only) to include - # (optional, default: "^main|master$" -- main or master branch) - regexp: "^main|master$" - - # If the age of the most recent commit for the ref is greater than - # this value, the ref won't get exported (optional, default: 0 (disabled)) - # nb: when used in conjuction of pull.from.(pipelines|merge_requests).enabled = true, the creation date - # of the pipeline is taken in account, not the age of the commit - max_age_seconds: 0 + branches: + # Monitor pipelines related to project branches + # (optional, default: true) + enabled: true - from: - pipelines: - # Whether to trigger a discovery of the projects refs - # from the most recent project pipelines when the - # project is configured/discovered (optional, default: false) - # This flag is useful if you want/need to obtain pipelines - # metrics of deleted refs - enabled: false + # Filter for branches to include + # (optional, default: "^(?:main|master)$" -- main/master branches) + regexp: "^(?:main|master)$" + + # Only keep most 'n' recently updated branches + # (optional, default: 0 -- disabled/keep every branch matching the regexp)" + most_recent: 0 - # Maximum number of pipelines to analyze per project - # to search for refs on init (optional, default: 100, min: 1, max: 100) - depth: 100 + # If the age of the most recently updated pipeline for the branch is greater than + # this value, the pipeline metrics won't get exported (optional, default: 0 (disabled)) + max_age_seconds: 0 - merge_requests: - # Fetch merge request pipelines refs (optional, default: false) - enabled: false + # If set to false, it will continue to export metrics for the branch even + # if it has been deleted (optional, default: true) + exclude_deleted: true - # Maximum number for merge requests pipelines to - # attempt fetch on each project ref discovery (optional, default: 1) - depth: 1 + tags: + # Monitor pipelines related to project tags + # (optional, default: true) + enabled: true + + # Filter for tags to include + # (optional, default: ".*" -- all tags) + regexp: ".*" + + # Only keep most 'n' recently updated tags + # (optional, default: 0 -- disabled/keep every tag matching the regexp)" + most_recent: 0 + + # If the age of the most recently updated pipeline for the tag is greater than + # this value, the pipeline metrics won't get exported (optional, default: 0 (disabled)) + max_age_seconds: 0 + + # If set to false, it will continue to export metrics for the tag even + # if it has been deleted (optional, default: true) + exclude_deleted: true + + merge_requests: + # Monitor pipelines related to project merge requests + # (optional, default: false) + enabled: false + + # Only keep most 'n' recently updated merge requests + # (optional, default: 0 -- disabled/keep every merge request) + most_recent: 0 + + # If the age of the most recently updated pipeline for the merge request is greater than + # this value, the pipeline metrics won't get exported (optional, default: 0 (disabled)) + max_age_seconds: 0 pipeline: jobs: @@ -449,6 +567,19 @@ wildcards: # Filter pipelines variables to include # (optional, default: ".*", all variables) regexp: ".*" + + test_reports: + # Fetch test reports in a separate metric (optiona, default: false) + enabled: false + + from_child_pipelines: + # Combines test reports from subsequent child/downstream pipelines + # (optional, default: false) + enabled: false + + test_cases: + # Fetch test cases reports in a separate metric (optional, default: false) + enabled: false ``` ## Pull all projects accessible by the provided token diff --git a/docs/images/monitor_cli_example.gif b/docs/images/monitor_cli_example.gif new file mode 100644 index 00000000..621ffbaa Binary files /dev/null and b/docs/images/monitor_cli_example.gif differ diff --git a/docs/metrics.md b/docs/metrics.md index 5292c045..dd6a48f3 100644 --- a/docs/metrics.md +++ b/docs/metrics.md @@ -4,6 +4,15 @@ | Metric name | Description | Labels | Configuration | |---|---|---|---| +| `gcpe_currently_queued_tasks_count` | Number of tasks in the queue || *available by default* | +| `gcpe_environments_count` | Number of GitLab environments being exported || *available by default* | +| `gcpe_executed_tasks_count` | Number of tasks executed || *available by default* | +| `gcpe_gitlab_api_requests_count` | GitLab API requests count || *available by default* | +| `gcpe_gitlab_api_requests_remaining` | GitLab API requests remaining in the API Limit || *available by default* | +| `gcpe_gitlab_api_requests_limit` | GitLab API requests available in the API Limit || *available by default* | +| `gcpe_metrics_count` | Number of GitLab pipelines metrics being exported || *available by default* | +| `gcpe_projects_count` | Number of GitLab projects being exported || *available by default* | +| `gcpe_refs_count` | Number of GitLab refs being exported || *available by default* | | `gitlab_ci_environment_behind_commits_count` | Number of commits the environment is behind given its last deployment | [project], [environment] | `project_defaults.pull.environments.enabled` | | `gitlab_ci_environment_behind_duration_seconds` | Duration in seconds the environment is behind the most recent commit given its last deployment | [project], [environment] | `project_defaults.pull.environments.enabled` | | `gitlab_ci_environment_deployment_count` |Number of deployments for an environment | [project], [environment] | `project_defaults.pull.environments.enabled` | @@ -12,18 +21,34 @@ | `gitlab_ci_environment_deployment_status` | Status of the most recent deployment of the environment | [project], [environment], [status] | `project_defaults.pull.environments.enabled` | | `gitlab_ci_environment_deployment_timestamp` | Creation date of the most recent deployment of the environment | [project], [environment] | `project_defaults.pull.environments.enabled` | | `gitlab_ci_environment_information` | Information about the environment | [project], [environment], [environment_id], [external_url], [kind], [ref], [latest_commit_short_id], [current_commit_short_id], [available], [username] | `project_defaults.pull.environments.enabled` | -| `gitlab_ci_pipeline_coverage` | Coverage of the most recent pipeline | [project], [topics], [ref], [kind], [variables] | *available by default* | -| `gitlab_ci_pipeline_duration_seconds` | Duration in seconds of the most recent pipeline | [project], [topics], [ref], [kind], [variables] | *available by default* | -| `gitlab_ci_pipeline_id` | ID of the most recent pipeline | [project], [topics], [ref], [kind], [variables] | *available by default* | -| `gitlab_ci_pipeline_job_artifact_size_bytes` | Artifact size in bytes (sum of all of them) of the most recent job | [project], [topics], [ref], [runner_description], [kind], [variables], [stage], [job_name] | `project_defaults.pull.pipeline.jobs.enabled` | -| `gitlab_ci_pipeline_job_duration_seconds` | Duration in seconds of the most recent job | [project], [topics], [ref], [runner_description], [kind], [variables], [stage], [job_name] | `project_defaults.pull.pipeline.jobs.enabled` | -| `gitlab_ci_pipeline_job_id` | ID of the most recent job | [project], [topics], [ref], [runner_description], [kind], [variables], [stage], [job_name] | `project_defaults.pull.pipeline.jobs.enabled` | -| `gitlab_ci_pipeline_job_run_count` | Number of executions of a job | [project], [topics], [ref], [runner_description], [kind], [variables], [stage], [job_name] | `project_defaults.pull.pipeline.jobs.enabled` | -| `gitlab_ci_pipeline_job_status` | Status of the most recent job | [project], [topics], [ref], [runner_description], [kind], [variables], [stage], [job_name], [status] | `project_defaults.pull.pipeline.jobs.enabled` | -| `gitlab_ci_pipeline_job_timestamp` | Creation date timestamp of the the most recent job | [project], [topics], [ref], [runner_description], [kind], [variables], [stage], [job_name] | `project_defaults.pull.pipeline.jobs.enabled` | -| `gitlab_ci_pipeline_status` | Status of the most recent pipeline | [project], [topics], [ref], [kind], [variables], [status] | *available by default* | -| `gitlab_ci_pipeline_timestamp` | Timestamp of the last update of the most recent pipeline | [project], [topics], [ref], [kind], [variables] | *available by default* | -| `gitlab_ci_pipeline_run_count` | Number of executions of a pipeline | [project], [topics], [ref], [kind], [variables] | *available by default* | +| `gitlab_ci_pipeline_coverage` | Coverage of the most recent pipeline | [project], [topics], [ref], [kind], [source], [variables] | *available by default* | +| `gitlab_ci_pipeline_duration_seconds` | Duration in seconds of the most recent pipeline | [project], [topics], [ref], [kind], [source], [variables] | *available by default* | +| `gitlab_ci_pipeline_id` | ID of the most recent pipeline | [project], [topics], [ref], [kind], [source], [variables] | *available by default* | +| `gitlab_ci_pipeline_job_artifact_size_bytes` | Artifact size in bytes (sum of all of them) of the most recent job | [project], [topics], [ref], [runner_description], [kind], [source], [variables], [stage], [job_name], [tag_list], [failure_reason] | `project_defaults.pull.pipeline.jobs.enabled` | +| `gitlab_ci_pipeline_job_duration_seconds` | Duration in seconds of the most recent job | [project], [topics], [ref], [runner_description], [kind], [source], [variables], [stage], [job_name], [tag_list], [failure_reason] | `project_defaults.pull.pipeline.jobs.enabled` | +| `gitlab_ci_pipeline_job_id` | ID of the most recent job | [project], [topics], [ref], [runner_description], [kind], [source], [variables], [stage], [job_name], [tag_list], [failure_reason] | `project_defaults.pull.pipeline.jobs.enabled` | +| `gitlab_ci_pipeline_job_queued_duration_seconds` | Duration in seconds the most recent job has been queued before starting | [project], [topics], [ref], [runner_description], [kind], [source], [variables], [stage], [job_name], [tag_list], [failure_reason] | `project_defaults.pull.pipeline.jobs.enabled` | +| `gitlab_ci_pipeline_job_run_count` | Number of executions of a job | [project], [topics], [ref], [runner_description], [kind], [source], [variables], [stage], [job_name], [tag_list], [failure_reason] | `project_defaults.pull.pipeline.jobs.enabled` | +| `gitlab_ci_pipeline_job_status` | Status of the most recent job | [project], [topics], [ref], [runner_description], [kind], [source], [variables], [stage], [job_name], [tag_list], [status], [failure_reason] | `project_defaults.pull.pipeline.jobs.enabled` | +| `gitlab_ci_pipeline_job_timestamp` | Creation date timestamp of the the most recent job | [project], [topics], [ref], [runner_description], [kind], [source], [variables], [stage], [job_name], [tag_list], [failure_reason] | `project_defaults.pull.pipeline.jobs.enabled` | +| `gitlab_ci_pipeline_queued_duration_seconds` | Duration in seconds the most recent pipeline has been queued before starting | [project], [topics], [ref], [kind], [source], [variables] | *available by default* | +| `gitlab_ci_pipeline_run_count` | Number of executions of a pipeline | [project], [topics], [ref], [kind], [source], [variables] | *available by default* | +| `gitlab_ci_pipeline_status` | Status of the most recent pipeline | [project], [topics], [ref], [kind], [source], [variables], [status] | *available by default* | +| `gitlab_ci_pipeline_timestamp` | Timestamp of the last update of the most recent pipeline | [project], [topics], [ref], [kind], [source], [variables] | *available by default* | +| `gitlab_ci_pipeline_test_report_total_time` | Duration in seconds of all the tests in the most recently finished pipeline | [project], [topics], [ref], [kind], [source], [variables] | `project_defaults.pull.pipeline.test_reports.enabled` | +| `gitlab_ci_pipeline_test_report_total_count` | Number of total tests in the most recently finished pipeline | [project], [topics], [ref], [kind], [source], [variables] | `project_defaults.pull.pipeline.test_reports.enabled` | +| `gitlab_ci_pipeline_test_report_success_count` | Number of successful tests in the most recently finished pipeline | [project], [topics], [ref], [kind], [source], [variables] | `project_defaults.pull.pipeline.test_reports.enabled` | +| `gitlab_ci_pipeline_test_report_failed_count` | Number of failed tests in the most recently finished pipeline | [project], [topics], [ref], [kind], [source], [variables] | `project_defaults.pull.pipeline.test_reports.enabled` | +| `gitlab_ci_pipeline_test_report_skipped_count` | Number of skipped tests in the most recently finished pipeline | [project], [topics], [ref], [kind], [source], [variables] | `project_defaults.pull.pipeline.test_reports.enabled` | +| `gitlab_ci_pipeline_test_report_error_count` | Number of errored tests in the most recently finished pipeline | [project], [topics], [ref], [kind], [source], [variables] | `project_defaults.pull.pipeline.test_reports.enabled` | +| `gitlab_ci_pipeline_test_suite_total_time` | Duration in seconds for the test suite | [project], [topics], [ref], [kind], [source], [variables], [test_suite_name] | `project_defaults.pull.pipeline.test_reports.enabled` | +| `gitlab_ci_pipeline_test_suite_total_count` | Number of total tests for the test suite | [project], [topics], [ref], [kind], [source], [variables], [test_suite_name] | `project_defaults.pull.pipeline.test_reports.enabled` | +| `gitlab_ci_pipeline_test_suite_success_count` | Number of successful tests for the test suite | [project], [topics], [ref], [kind], [source], [variables], [test_suite_name] | `project_defaults.pull.pipeline.test_reports.enabled` | +| `gitlab_ci_pipeline_test_suite_failed_count` | Number of failed tests for the test suite | [project], [topics], [ref], [kind], [source], [variables], [test_suite_name] | `project_defaults.pull.pipeline.test_reports.enabled` | +| `gitlab_ci_pipeline_test_suite_skipped_count` | Number of skipped tests for the test suite | [project], [topics], [ref], [kind], [source], [variables], [test_suite_name] | `project_defaults.pull.pipeline.test_reports.enabled` | +| `gitlab_ci_pipeline_test_suite_error_count` | Duration in errored tests for the test suite | [project], [topics], [ref], [kind], [source], [variables], [test_suite_name] | `project_defaults.pull.pipeline.test_reports.enabled` | +| `gitlab_ci_pipeline_test_case_execution_time` | Duration in seconds for the test case | [project], [topics], [ref], [kind], [source], [variables], [test_suite_name], [test_case_name], [test_case_classname] | `project_defaults.pull.pipeline.test_reports.test_cases.enabled` | +| `gitlab_ci_pipeline_test_case_status` | Status of the most recent test case | [project], [topics], [ref], [kind], [source], [variables], [test_suite_name], [test_case_name], [test_case_classname], [status] | `project_defaults.pull.pipeline.test_reports.test_cases.enabled` | ## Labels @@ -47,11 +72,30 @@ Description of the runner on which the most recent job ran Type of the ref used by the pipeline. Can be either **branch**, **tag** or **merge_request** +### Source + +The reason the pipeline exists. + ### Variables User defined variables for the pipelines. Those are not fetched by default, you need to set `project_defaults.pull.pipeline.variables.enabled` to **true** +### Test Suite Name + +Name of the test suite. +This is not fetched by default, you need to set `project_default.pull.pipeline.test_reports.enabled` to **true** + +### Test Case Name + +Name of the test case. +This is not fetched by default, you need to set `project_default.pull.pipeline.test_reports.test_cases.enabled` to **true** + +### Test Case ClassName + +Name of the test case classname. +This is not fetched by default, you need to set `project_default.pull.pipeline.test_reports.test_cases.enabled` to **true** + ### Environment Name of the environment @@ -78,7 +122,7 @@ GitLab username of the person which triggered the most recent deployment of the ### Status -Status of the pipeline or deployment +Status of the pipeline, deployment or test case ### Stage @@ -88,6 +132,10 @@ Stage of the job Name of the job +### Tag list + +Tag list of the job + ### Environment ID ID of the environment @@ -115,6 +163,7 @@ This flag affect every `_status$` metrics: - `gitlab_ci_pipeline_environment_deployment_status` - `gitlab_ci_pipeline_job_status` - `gitlab_ci_pipeline_status` +- `gitlab_ci_pipeline_test_case_status` [available]: #available [current_commit_short_id]: #current-commit-short-id @@ -122,6 +171,7 @@ This flag affect every `_status$` metrics: [environment_id]: #environment-id [external_url]: #external-url [job_name]: #job-name +[tag_list]: #tag-list [kind]: #ref-kind [latest_commit_short_id]: #latest-commit-short-id [project]: #project @@ -131,4 +181,8 @@ This flag affect every `_status$` metrics: [status]: #status [topics]: #topics [username]: #username -[variables]: #variables \ No newline at end of file +[source]: #source +[variables]: #variables +[test_suite_name]: #test-suite-name +[test_case_name]: #test-case-name +[test_case_classname]: #test-case-classname diff --git a/examples/ha-setup/README.md b/examples/ha-setup/README.md index a1ec906f..8d77d6af 100644 --- a/examples/ha-setup/README.md +++ b/examples/ha-setup/README.md @@ -19,7 +19,7 @@ The [docker-compose.yml](./docker-compose.yml) is configured to spinup the neces ~$ cd gitlab-ci-pipelines-exporter/examples/ha-setup # Provide your personal GitLab API access token (needs read_api permissions) -~$ sed -i 's//xXF_xxjV_xxyzxzz' gitlab-ci-pipelines-exporter/config.yml +~$ sed -i 's//xXF_xxjV_xxyzxzz/' gitlab-ci-pipelines-exporter.yml # Start redis and gitlab-ci-pipelines-exporter containers ~$ docker-compose up -d diff --git a/examples/ha-setup/docker-compose.yml b/examples/ha-setup/docker-compose.yml index 85c41a21..2e8f9505 100644 --- a/examples/ha-setup/docker-compose.yml +++ b/examples/ha-setup/docker-compose.yml @@ -1,14 +1,15 @@ +--- version: '3.8' services: redis: - image: docker.io/bitnami/redis:6.0.9 + image: docker.io/bitnami/redis:6.2 ports: - 6379:6379 environment: ALLOW_EMPTY_PASSWORD: 'yes' gitlab-ci-pipelines-exporter-1: &gitlab-ci-pipelines-exporter - image: docker.io/mvisonneau/gitlab-ci-pipelines-exporter:v0.4.8 + image: quay.io/mvisonneau/gitlab-ci-pipelines-exporter:v0.5.8 # You can comment out the image name and use the following statement # to build the image against the current version of the repository #build: ../.. @@ -18,7 +19,7 @@ services: - redis environment: GCPE_CONFIG: /etc/gitlab-ci-pipelines-exporter.yml - GCPE_LOG_LEVEL: debug + GCPE_INTERNAL_MONITORING_LISTENER_ADDRESS: tcp://127.0.0.1:8082 volumes: - type: bind source: ./gitlab-ci-pipelines-exporter.yml diff --git a/examples/ha-setup/gitlab-ci-pipelines-exporter.yml b/examples/ha-setup/gitlab-ci-pipelines-exporter.yml index 8a9362ac..63b9e506 100644 --- a/examples/ha-setup/gitlab-ci-pipelines-exporter.yml +++ b/examples/ha-setup/gitlab-ci-pipelines-exporter.yml @@ -1,3 +1,7 @@ +--- +log: + level: debug + gitlab: url: https://gitlab.com token: diff --git a/examples/opentelemetry/README.md b/examples/opentelemetry/README.md new file mode 100644 index 00000000..836e2f94 --- /dev/null +++ b/examples/opentelemetry/README.md @@ -0,0 +1,58 @@ +# Example monitoring of gitlab-ci-pipelines-exporter with Jaeger + +## Requirements + +- **~5 min of your time** +- A personal access token on [gitlab.com](https://docs.gitlab.com/ee/user/profile/personal_access_tokens.html) (or your own instance) with `read_api` scope +- [git](https://git-scm.com/) & [docker-compose](https://docs.docker.com/compose/) + +## 🚀 + +```bash +# Clone this repository +~$ git clone https://github.com/mvisonneau/gitlab-ci-pipelines-exporter.git +~$ cd gitlab-ci-pipelines-exporter/examples/opentelemetry + +# Provide your personal GitLab API access token (needs read_api permissions) +~$ sed -i 's//xXF_xxjV_xxyzxzz/' gitlab-ci-pipelines-exporter.yml + +# Start gitlab-ci-pipelines-exporter, prometheus and grafana containers ! +~$ docker-compose up -d +Creating network "opentelemetry_default" with driver "bridge" +Creating opentelemetry_jaeger_1 ... done +Creating opentelemetry_redis_1 ... done +Creating opentelemetry_otel-collector_1 ... done +Creating opentelemetry_gitlab-ci-pipelines-exporter_1 ... done +Creating opentelemetry_prometheus_1 ... done +Creating opentelemetry_grafana_1 ... done +``` + +You should now have a stack completely configured and accessible at these locations: + +- `gitlab-ci-pipelines-exporter`: [http://localhost:8080/metrics](http://localhost:8080/metrics) +- `jaeger`: [http://localhost:16686](http://localhost:16686) +- `prometheus`: [http://localhost:9090](http://localhost:9090) +- `grafana`: [http://localhost:3000](http://localhost:3000) (if you want/need to login, creds are _admin/admin_) + +## Use and troubleshoot + +### Validate that containers are running + +```bash +~$ docker-compose ps + Name Command State Ports +----------------------------------------------------------------------------------------------------------------------------------------------------------------- +opentelemetry_gitlab-ci-pipelines-exporter_1 /usr/local/bin/gitlab-ci-p ... Up 0.0.0.0:8080->8080/tcp +opentelemetry_grafana_1 /run.sh Up 0.0.0.0:3000->3000/tcp +opentelemetry_jaeger_1 /go/bin/all-in-one-linux Up 14250/tcp, 14268/tcp, 0.0.0.0:16686->16686/tcp, 5775/udp, 5778/tcp, + 6831/udp, 6832/udp +opentelemetry_otel-collector_1 /otelcontribcol --config=/ ... Up 0.0.0.0:4317->4317/tcp, 55679/tcp, 55680/tcp +opentelemetry_prometheus_1 /bin/prometheus --config.f ... Up 0.0.0.0:9090->9090/tcp +opentelemetry_redis_1 /opt/bitnami/scripts/redis ... Up 0.0.0.0:6379->6379/tcp +``` + +## Cleanup + +```bash +~$ docker-compose down +``` diff --git a/examples/opentelemetry/docker-compose.yml b/examples/opentelemetry/docker-compose.yml new file mode 100644 index 00000000..c847de4b --- /dev/null +++ b/examples/opentelemetry/docker-compose.yml @@ -0,0 +1,71 @@ +--- +version: '3.8' +services: + redis: + image: docker.io/bitnami/redis:6.2 + ports: + - 6379:6379 + environment: + ALLOW_EMPTY_PASSWORD: 'yes' + + jaeger: + image: docker.io/jaegertracing/all-in-one:1.33 + volumes: + - ./prometheus/config.yml:/etc/prometheus/prometheus.yml + ports: + - 16686:16686 + + otel-collector: + image: docker.io/otel/opentelemetry-collector-contrib-dev:latest + command: ["--config=/etc/otel-collector-config.yml"] + volumes: + - ./otel-collector-config.yml:/etc/otel-collector-config.yml + ports: + - 4317:4317 + links: + - jaeger + + gitlab-ci-pipelines-exporter: + image: quay.io/mvisonneau/gitlab-ci-pipelines-exporter:v0.5.8 + # You can comment out the image name and use the following statement + # to build the image against the current version of the repository + # build: ../.. + ports: + - 8080:8080 + environment: + GCPE_GITLAB_TOKEN: ${GCPE_GITLAB_TOKEN} + GCPE_CONFIG: /etc/gitlab-ci-pipelines-exporter.yml + GCPE_INTERNAL_MONITORING_LISTENER_ADDRESS: tcp://127.0.0.1:8082 + links: + - redis + - otel-collector + volumes: + - type: bind + source: ./gitlab-ci-pipelines-exporter.yml + target: /etc/gitlab-ci-pipelines-exporter.yml + + prometheus: + image: docker.io/prom/prometheus:v2.44.0 + ports: + - 9090:9090 + links: + - gitlab-ci-pipelines-exporter + volumes: + - ./prometheus/config.yml:/etc/prometheus/prometheus.yml + + grafana: + image: docker.io/grafana/grafana:9.5.2 + ports: + - 3000:3000 + environment: + GF_AUTH_ANONYMOUS_ENABLED: 'true' + GF_INSTALL_PLUGINS: grafana-polystat-panel,yesoreyeram-boomtable-panel + links: + - prometheus + - jaeger + volumes: + - ./grafana/datasources.yml:/etc/grafana/provisioning/datasources/default.yml + +networks: + default: + driver: bridge diff --git a/examples/opentelemetry/gitlab-ci-pipelines-exporter.yml b/examples/opentelemetry/gitlab-ci-pipelines-exporter.yml new file mode 100644 index 00000000..a3264c9b --- /dev/null +++ b/examples/opentelemetry/gitlab-ci-pipelines-exporter.yml @@ -0,0 +1,25 @@ +--- +log: + level: trace + format: json + +opentelemetry: + grpc_endpoint: otel-collector:4317 + +gitlab: + url: https://gitlab.com + token: + +redis: + url: redis://redis:6379 + +# Example public projects to monitor +projects: + - name: gitlab-org/gitlab-runner + # Pull environments related metrics prefixed with 'stable' for this project + pull: + environments: + enabled: true + name_regexp: '^stable.*' + + - name: gitlab-org/charts/auto-deploy-app \ No newline at end of file diff --git a/examples/opentelemetry/grafana/datasources.yml b/examples/opentelemetry/grafana/datasources.yml new file mode 100644 index 00000000..5eae0e68 --- /dev/null +++ b/examples/opentelemetry/grafana/datasources.yml @@ -0,0 +1,17 @@ +datasources: +- name: 'prometheus' + type: 'prometheus' + access: 'proxy' + org_id: 1 + url: 'http://prometheus:9090' + is_default: true + version: 1 + editable: true +- name: 'jaeger' + type: 'jaeger' + access: 'proxy' + org_id: 1 + url: 'http://jaeger:16686' + is_default: false + version: 1 + editable: true \ No newline at end of file diff --git a/examples/opentelemetry/otel-collector-config.yml b/examples/opentelemetry/otel-collector-config.yml new file mode 100644 index 00000000..b69cfc48 --- /dev/null +++ b/examples/opentelemetry/otel-collector-config.yml @@ -0,0 +1,20 @@ +receivers: + otlp: + protocols: + grpc: + +exporters: + jaeger: + endpoint: jaeger:14250 + tls: + insecure: true + +processors: + batch: + +service: + pipelines: + traces: + receivers: [otlp] + processors: [batch] + exporters: [jaeger] diff --git a/examples/opentelemetry/prometheus/config.yml b/examples/opentelemetry/prometheus/config.yml new file mode 100644 index 00000000..60a05898 --- /dev/null +++ b/examples/opentelemetry/prometheus/config.yml @@ -0,0 +1,10 @@ +global: + scrape_interval: 15s + evaluation_interval: 15s + +scrape_configs: + - job_name: 'gitlab-ci-pipelines-exporter' + scrape_interval: 10s + scrape_timeout: 5s + static_configs: + - targets: ['gitlab-ci-pipelines-exporter:8080'] \ No newline at end of file diff --git a/examples/quickstart/docker-compose.yml b/examples/quickstart/docker-compose.yml index 922716b0..99f655d8 100644 --- a/examples/quickstart/docker-compose.yml +++ b/examples/quickstart/docker-compose.yml @@ -1,22 +1,24 @@ +--- version: '3.8' services: gitlab-ci-pipelines-exporter: - image: docker.io/mvisonneau/gitlab-ci-pipelines-exporter:v0.4.8 + image: quay.io/mvisonneau/gitlab-ci-pipelines-exporter:v0.5.8 # You can comment out the image name and use the following statement # to build the image against the current version of the repository # build: ../.. ports: - 8080:8080 environment: + GCPE_GITLAB_TOKEN: ${GCPE_GITLAB_TOKEN} GCPE_CONFIG: /etc/gitlab-ci-pipelines-exporter.yml - GCPE_LOG_LEVEL: debug + GCPE_INTERNAL_MONITORING_LISTENER_ADDRESS: tcp://127.0.0.1:8082 volumes: - type: bind source: ./gitlab-ci-pipelines-exporter.yml target: /etc/gitlab-ci-pipelines-exporter.yml prometheus: - image: docker.io/prom/prometheus:v2.22.2 + image: docker.io/prom/prometheus:v2.44.0 ports: - 9090:9090 links: @@ -25,7 +27,7 @@ services: - ./prometheus/config.yml:/etc/prometheus/prometheus.yml grafana: - image: docker.io/grafana/grafana:7.3.3 + image: docker.io/grafana/grafana:9.5.2 ports: - 3000:3000 environment: diff --git a/examples/quickstart/gitlab-ci-pipelines-exporter.yml b/examples/quickstart/gitlab-ci-pipelines-exporter.yml index 1f82619f..e6550dc3 100644 --- a/examples/quickstart/gitlab-ci-pipelines-exporter.yml +++ b/examples/quickstart/gitlab-ci-pipelines-exporter.yml @@ -1,3 +1,7 @@ +--- +log: + level: debug + gitlab: url: https://gitlab.com token: diff --git a/examples/quickstart/grafana/dashboards/dashboard_environments.json b/examples/quickstart/grafana/dashboards/dashboard_environments.json index e42e3994..91eb5255 100644 --- a/examples/quickstart/grafana/dashboards/dashboard_environments.json +++ b/examples/quickstart/grafana/dashboards/dashboard_environments.json @@ -144,7 +144,7 @@ ], "targets": [ { - "expr": "gitlab_ci_environment_behind_commits_count{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}", + "expr": "gitlab_ci_environment_behind_commits_count{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}", "format": "time_series", "hide": false, "instant": true, @@ -232,7 +232,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "count(gitlab_ci_environment_information{available=\"true\", project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) or vector(0)", + "expr": "count(gitlab_ci_environment_information{available=\"true\", project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) or vector(0)", "format": "time_series", "instant": false, "interval": "", @@ -315,7 +315,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "count(gitlab_ci_environment_behind_commits_count{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT.*\"} > 0) or vector(0)", + "expr": "count(gitlab_ci_environment_behind_commits_count{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT.*\"} > 0) or vector(0)", "format": "time_series", "instant": false, "interval": "", @@ -398,7 +398,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "count(gitlab_ci_environment_status{status=\"failed\", project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"} > 0) or vector(0)", + "expr": "count(gitlab_ci_environment_status{status=\"failed\", project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"} > 0) or vector(0)", "format": "time_series", "instant": false, "interval": "", @@ -428,7 +428,7 @@ }, "id": 118, "options": { - "content": "

", + "content": "

", "mode": "html" }, "pluginVersion": "7.3.1", @@ -440,7 +440,7 @@ }, { "cacheTimeout": null, - "datasource": null, + "datasource": "Prometheus", "fieldConfig": { "defaults": { "custom": {}, @@ -507,7 +507,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "count(gitlab_ci_environment_information{available=\"false\", project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) or vector(0)", + "expr": "count(gitlab_ci_environment_information{available=\"false\", project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) or vector(0)", "format": "time_series", "instant": false, "interval": "", @@ -591,7 +591,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "avg(gitlab_ci_environment_behind_commits_count{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"})", + "expr": "avg(gitlab_ci_environment_behind_commits_count{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"})", "format": "time_series", "instant": false, "interval": "", @@ -671,7 +671,7 @@ "steppedLine": false, "targets": [ { - "expr": "sum(increase(gitlab_ci_environment_deployment_count{project=~\"$PROJECT\",environment=~\"$ENVIRONMENT\"}[1m])) by (project, environment) / sum(increase(gitlab_ci_environment_deployment_count{project=~\"$PROJECT\",environment=~\"$ENVIRONMENT\"}[1m])) by (project, environment)", + "expr": "sum(increase(gitlab_ci_environment_deployment_count{project=~\"($OWNER).*\",project=~\"$PROJECT\",environment=~\"$ENVIRONMENT\"}[1m])) by (project, environment) / sum(increase(gitlab_ci_environment_deployment_count{project=~\"($OWNER).*\",project=~\"$PROJECT\",environment=~\"$ENVIRONMENT\"}[1m])) by (project, environment)", "format": "time_series", "instant": false, "interval": "", @@ -785,7 +785,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "sum(increase(gitlab_ci_environment_deployment_count{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}[1h]))", + "expr": "sum(increase(gitlab_ci_environment_deployment_count{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}[1h]))", "format": "time_series", "instant": false, "interval": "", @@ -869,7 +869,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "avg(gitlab_ci_environment_behind_duration_seconds{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"})", + "expr": "avg(gitlab_ci_environment_behind_duration_seconds{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"})", "format": "time_series", "interval": "", "intervalFactor": 1, @@ -926,8 +926,8 @@ "value": [ { "targetBlank": true, - "title": "View environment #${__value.numeric}", - "url": "https://${GITLAB_HOST}/${__data.fields.project}/-/environments/${__value.numeric}" + "title": "View environment #${__value.text}", + "url": "https://${GITLAB_HOST}/${__data.fields.project:raw}/-/environments/${__value.text}" } ] }, @@ -1179,7 +1179,7 @@ { "targetBlank": true, "title": "View job #${__value.numeric}", - "url": "https://${GITLAB_HOST}/${__data.fields.project}/-/jobs/${__value.numeric}" + "url": "https://${GITLAB_HOST}/${__data.fields.project:raw}/-/jobs/${__value.numeric}" } ] }, @@ -1280,9 +1280,13 @@ { "targetBlank": true, "title": "View commit ${__value.text} details", - "url": "https://${GITLAB_HOST}/${__data.fields.Project}/-/commit/${__value.text}" + "url": "https://${GITLAB_HOST}/${__data.fields.Project:raw}/-/commit/${__value.text}" } ] + }, + { + "id": "unit", + "value": "string" } ] }, @@ -1298,7 +1302,7 @@ { "targetBlank": true, "title": "Compare commits on GitLab", - "url": "https://${GITLAB_HOST}/${__data.fields.Project}/-/compare/${__data.fields[\"Deployed commit\"]}...${__data.fields[\"Latest commit\"]}" + "url": "https://${GITLAB_HOST}/${__data.fields.Project:raw}/-/compare/${__data.fields[\"Deployed commit\"]}...${__data.fields[\"Latest commit\"]}" } ] } @@ -1326,7 +1330,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "(max(gitlab_ci_environment_deployment_status{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\", status=~\"success\"}) by (project, environment) * 1) > 0 or\n(max(gitlab_ci_environment_deployment_status{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\", status=~\"running\"}) by (project, environment) * 2) > 0 or\n(max(gitlab_ci_environment_deployment_status{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\", status=~\"failed\"}) by (project, environment) * 3) > 0 or\n(max(gitlab_ci_environment_deployment_status{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\", status=~\"canceled\"}) by (project, environment) * 4) > 0 or\n(max(gitlab_ci_environment_deployment_status{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\", status=~\"skipped\"}) by (project, environment) * 5) > 0 or\n(max(gitlab_ci_environment_deployment_status{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\", status=~\"created|waiting_for_resource|preparing|pending|manual|scheduled\"}) by (project, environment) * 6) > 0", + "expr": "(max(gitlab_ci_environment_deployment_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\", status=~\"success\"}) by (project, environment) * 1) > 0 or\n(max(gitlab_ci_environment_deployment_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\", status=~\"running\"}) by (project, environment) * 2) > 0 or\n(max(gitlab_ci_environment_deployment_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\", status=~\"failed\"}) by (project, environment) * 3) > 0 or\n(max(gitlab_ci_environment_deployment_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\", status=~\"canceled\"}) by (project, environment) * 4) > 0 or\n(max(gitlab_ci_environment_deployment_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\", status=~\"skipped\"}) by (project, environment) * 5) > 0 or\n(max(gitlab_ci_environment_deployment_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\", status=~\"created|waiting_for_resource|preparing|pending|manual|scheduled\"}) by (project, environment) * 6) > 0", "format": "table", "hide": false, "instant": true, @@ -1336,7 +1340,7 @@ "refId": "A" }, { - "expr": "-max(time() - gitlab_ci_environment_deployment_timestamp{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) by (project, environment)", + "expr": "-max(time() - gitlab_ci_environment_deployment_timestamp{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) by (project, environment)", "format": "table", "hide": false, "instant": true, @@ -1346,7 +1350,7 @@ "refId": "B" }, { - "expr": "max(gitlab_ci_environment_deployment_duration_seconds{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) by (project, environment)", + "expr": "max(gitlab_ci_environment_deployment_duration_seconds{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) by (project, environment)", "format": "table", "hide": false, "instant": true, @@ -1356,7 +1360,7 @@ "refId": "C" }, { - "expr": "max(gitlab_ci_environment_information{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) by (environment_id, project, environment, username, current_commit_short_id, ref, latest_commit_short_id)", + "expr": "max(gitlab_ci_environment_information{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) by (environment_id, project, environment, author_email, current_commit_short_id, ref, latest_commit_short_id)", "format": "table", "instant": true, "interval": "", @@ -1364,7 +1368,7 @@ "refId": "D" }, { - "expr": "max(gitlab_ci_environment_behind_commits_count{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) by (project, environment)", + "expr": "max(gitlab_ci_environment_behind_commits_count{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) by (project, environment)", "format": "table", "instant": true, "interval": "", @@ -1372,7 +1376,7 @@ "refId": "E" }, { - "expr": "max(gitlab_ci_environment_behind_duration_seconds{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) by (project, environment)", + "expr": "max(gitlab_ci_environment_behind_duration_seconds{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) by (project, environment)", "format": "table", "instant": true, "interval": "", @@ -1380,7 +1384,7 @@ "refId": "F" }, { - "expr": "max(gitlab_ci_environment_deployment_job_id{project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) by (project, environment)", + "expr": "max(gitlab_ci_environment_deployment_job_id{project=~\"($OWNER).*\",project=~\"$PROJECT\", environment=~\"$ENVIRONMENT\"}) by (project, environment)", "format": "table", "instant": true, "interval": "", @@ -1507,7 +1511,7 @@ "useTags": false }, { - "allValue": "", + "allValue": ".*", "current": { "selected": true, "text": [ @@ -1518,7 +1522,7 @@ ] }, "datasource": "prometheus", - "definition": "label_values(gitlab_ci_environment_information{project=~\"$OWNER.*\"}, project)", + "definition": "label_values(gitlab_ci_environment_information{project=~\"($OWNER).*\"}, project)", "error": null, "hide": 0, "includeAll": true, @@ -1526,7 +1530,7 @@ "multi": true, "name": "PROJECT", "options": [], - "query": "label_values(gitlab_ci_environment_information{project=~\"$OWNER.*\"}, project)", + "query": "label_values(gitlab_ci_environment_information{project=~\"($OWNER).*\"}, project)", "refresh": 2, "regex": "", "skipUrlSync": false, @@ -1538,7 +1542,7 @@ "useTags": false }, { - "allValue": null, + "allValue": ".*", "current": { "selected": true, "text": [ @@ -1549,7 +1553,7 @@ ] }, "datasource": "prometheus", - "definition": "label_values(gitlab_ci_environment_information{project=~\"$PROJECT\"}, environment)", + "definition": "label_values(gitlab_ci_environment_information{project=~\"($OWNER).*\",project=~\"$PROJECT\"}, environment)", "error": null, "hide": 0, "includeAll": true, @@ -1557,7 +1561,7 @@ "multi": true, "name": "ENVIRONMENT", "options": [], - "query": "label_values(gitlab_ci_environment_information{project=~\"$PROJECT\"}, environment)", + "query": "label_values(gitlab_ci_environment_information{project=~\"($OWNER).*\",project=~\"$PROJECT\"}, environment)", "refresh": 2, "regex": "", "skipUrlSync": false, @@ -1601,5 +1605,5 @@ "timezone": "", "title": "GitLab CI environments & deployments", "uid": "gitlab_ci_environment_deployments", - "version": 1 -} \ No newline at end of file + "version": 2 +} diff --git a/examples/quickstart/grafana/dashboards/dashboard_jobs.json b/examples/quickstart/grafana/dashboards/dashboard_jobs.json index 8d3d9c18..82fb021f 100644 --- a/examples/quickstart/grafana/dashboards/dashboard_jobs.json +++ b/examples/quickstart/grafana/dashboards/dashboard_jobs.json @@ -35,7 +35,7 @@ }, "id": 116, "options": { - "content": "

", + "content": "

", "mode": "html" }, "pluginVersion": "7.3.1", @@ -110,7 +110,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "count(gitlab_ci_pipeline_job_run_count{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"})", + "expr": "count(gitlab_ci_pipeline_job_run_count{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"})", "format": "time_series", "hide": false, "instant": false, @@ -194,7 +194,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "count(gitlab_ci_pipeline_job_status{status=\"failed\", project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"} > 0) or vector(0)", + "expr": "count(gitlab_ci_pipeline_job_status{status=\"failed\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"} > 0) or vector(0)", "format": "time_series", "instant": false, "interval": "", @@ -273,7 +273,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "sum(increase(gitlab_ci_pipeline_job_run_count{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}[1h]))", + "expr": "sum(increase(gitlab_ci_pipeline_job_run_count{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}[1h]))", "format": "time_series", "instant": false, "interval": "", @@ -397,7 +397,7 @@ "savedOverrides": [], "targets": [ { - "expr": "(gitlab_ci_pipeline_job_status{status=\"success\", project=~\"$OWNER.*\", ref=~\"$REF\", job_name=~\"$JOB\"} * 1 > 0) or (gitlab_ci_pipeline_job_status{status=\"running\", project=~\"$OWNER.*\", ref=~\"$REF\", job_name=~\"$JOB\"} * 2 > 0) or (gitlab_ci_pipeline_job_status{status=~\"failed|canceled\", project=~\"$OWNER.*\", ref=~\"$REF\", job_name=~\"$JOB\"} * 3 > 0) or (gitlab_ci_pipeline_job_status{status!~\"success|running|failed|canceled\", project=~\"$OWNER.*\", ref=~\"$REF\", job_name=~\"$JOB\"} * 4 > 0)", + "expr": "(gitlab_ci_pipeline_job_status{status=\"success\", project=~\"($OWNER).*\", ref=~\"$REF\", job_name=~\"$JOB\"} * 1 > 0) or (gitlab_ci_pipeline_job_status{status=\"running\", project=~\"($OWNER).*\", ref=~\"$REF\", job_name=~\"$JOB\"} * 2 > 0) or (gitlab_ci_pipeline_job_status{status=~\"failed|canceled\", project=~\"($OWNER).*\", ref=~\"$REF\", job_name=~\"$JOB\"} * 3 > 0) or (gitlab_ci_pipeline_job_status{status!~\"success|running|failed|canceled\", project=~\"($OWNER).*\", ref=~\"$REF\", job_name=~\"$JOB\"} * 4 > 0)", "format": "time_series", "hide": false, "instant": true, @@ -490,7 +490,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "avg(time() - gitlab_ci_pipeline_job_timestamp{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"})", + "expr": "avg(time() - gitlab_ci_pipeline_job_timestamp{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"})", "format": "time_series", "interval": "", "intervalFactor": 1, @@ -573,7 +573,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "avg(gitlab_ci_pipeline_job_duration_seconds{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"})", + "expr": "avg(gitlab_ci_pipeline_job_duration_seconds{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"})", "format": "time_series", "interval": "", "intervalFactor": 1, @@ -595,21 +595,7 @@ "description": "", "fieldConfig": { "defaults": { - "custom": {}, - "mappings": [], - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green", - "value": null - }, - { - "color": "red", - "value": 80 - } - ] - } + "custom": {} }, "overrides": [] }, @@ -652,7 +638,7 @@ "steppedLine": false, "targets": [ { - "expr": "sum(increase(gitlab_ci_pipeline_job_run_count{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}[1m])) by (project, ref, job_name) / sum(increase(gitlab_ci_pipeline_job_run_count{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}[1m])) by (project, ref, job_name)", + "expr": "sum(increase(gitlab_ci_pipeline_job_run_count{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}[1m])) by (project, ref, job_name) / sum(increase(gitlab_ci_pipeline_job_run_count{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}[1m])) by (project, ref, job_name)", "format": "time_series", "instant": false, "interval": "", @@ -922,7 +908,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "-max(time() - gitlab_ci_pipeline_job_timestamp{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) unless max(gitlab_ci_pipeline_job_status{status!~\"success\", project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) > 0", + "expr": "-max(time() - gitlab_ci_pipeline_job_timestamp{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) unless max(gitlab_ci_pipeline_job_status{status!~\"success\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) > 0", "format": "table", "hide": false, "instant": true, @@ -932,7 +918,7 @@ "refId": "B" }, { - "expr": "max(gitlab_ci_pipeline_job_duration_seconds{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) unless (max(gitlab_ci_pipeline_job_status{status!~\"success\", project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) > 0)", + "expr": "max(gitlab_ci_pipeline_job_duration_seconds{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) unless (max(gitlab_ci_pipeline_job_status{status!~\"success\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) > 0)", "format": "table", "hide": false, "instant": true, @@ -942,7 +928,7 @@ "refId": "C" }, { - "expr": "max(gitlab_ci_pipeline_job_id{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, kind, job_name) unless (max(gitlab_ci_pipeline_job_status{status!~\"success\", project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, kind, job_name) > 0)", + "expr": "max(gitlab_ci_pipeline_job_id{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, kind, job_name) unless (max(gitlab_ci_pipeline_job_status{status!~\"success\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, kind, job_name) > 0)", "format": "table", "instant": true, "interval": "", @@ -1335,7 +1321,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "-max(time() - gitlab_ci_pipeline_job_timestamp{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) unless max(gitlab_ci_pipeline_job_status{status=~\"success\", project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) > 0", + "expr": "-max(time() - gitlab_ci_pipeline_job_timestamp{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) unless max(gitlab_ci_pipeline_job_status{status=~\"success\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) > 0", "format": "table", "hide": false, "instant": true, @@ -1345,7 +1331,7 @@ "refId": "B" }, { - "expr": "max(gitlab_ci_pipeline_job_duration_seconds{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) unless (max(gitlab_ci_pipeline_job_status{status=~\"success\", project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) > 0)", + "expr": "max(gitlab_ci_pipeline_job_duration_seconds{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) unless (max(gitlab_ci_pipeline_job_status{status=~\"success\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, job_name, kind) > 0)", "format": "table", "hide": false, "instant": true, @@ -1355,7 +1341,7 @@ "refId": "C" }, { - "expr": "(max(gitlab_ci_pipeline_job_status{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"running\"}) by (project, ref, job_name, kind) * 2) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"failed\"}) by (project, ref, job_name, kind) * 3) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"canceled\"}) by (project, ref, job_name, kind) * 4) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"created\"}) by (project, ref, job_name, kind) * 5) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"waiting_for_resource\"}) by (project, ref, job_name, kind) * 6) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"preparing\"}) by (project, ref, job_name, kind) * 7) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"pending\"}) by (project, ref, job_name, kind) * 8) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"skipped\"}) by (project, ref, job_name, kind) * 9) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"manual\"}) by (project, ref, job_name, kind) * 10) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"scheduled\"}) by (project, ref, job_name, kind) * 11) > 0", + "expr": "(max(gitlab_ci_pipeline_job_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"running\"}) by (project, ref, job_name, kind) * 2) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"failed\"}) by (project, ref, job_name, kind) * 3) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"canceled\"}) by (project, ref, job_name, kind) * 4) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"created\"}) by (project, ref, job_name, kind) * 5) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"waiting_for_resource\"}) by (project, ref, job_name, kind) * 6) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"preparing\"}) by (project, ref, job_name, kind) * 7) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"pending\"}) by (project, ref, job_name, kind) * 8) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"skipped\"}) by (project, ref, job_name, kind) * 9) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"manual\"}) by (project, ref, job_name, kind) * 10) > 0 or\n(max(gitlab_ci_pipeline_job_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\", status=~\"scheduled\"}) by (project, ref, job_name, kind) * 11) > 0", "format": "table", "hide": false, "instant": true, @@ -1365,7 +1351,7 @@ "refId": "A" }, { - "expr": "max(gitlab_ci_pipeline_job_id{project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, kind, job_name) unless (max(gitlab_ci_pipeline_job_status{status=~\"success\", project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, kind, job_name) > 0)", + "expr": "max(gitlab_ci_pipeline_job_id{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, kind, job_name) unless (max(gitlab_ci_pipeline_job_status{status=~\"success\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", job_name=~\"$JOB\"}) by (project, ref, kind, job_name) > 0)", "format": "table", "instant": true, "interval": "", @@ -1418,7 +1404,7 @@ "type": "table" } ], - "refresh": "10s", + "refresh": "", "schemaVersion": 26, "style": "dark", "tags": [], @@ -1488,7 +1474,7 @@ ] }, "datasource": "prometheus", - "definition": "label_values(gitlab_ci_pipeline_id{project=~\"$OWNER.*\"}, project)", + "definition": "label_values(gitlab_ci_pipeline_id{project=~\"($OWNER).*\"}, project)", "error": null, "hide": 0, "includeAll": true, @@ -1496,7 +1482,7 @@ "multi": true, "name": "PROJECT", "options": [], - "query": "label_values(gitlab_ci_pipeline_id{project=~\"$OWNER.*\"}, project)", + "query": "label_values(gitlab_ci_pipeline_id{project=~\"($OWNER).*\"}, project)", "refresh": 2, "regex": "", "skipUrlSync": false, @@ -1519,7 +1505,7 @@ ] }, "datasource": "prometheus", - "definition": "label_values(gitlab_ci_pipeline_id{project=~\"$PROJECT\"}, ref)", + "definition": "label_values(gitlab_ci_pipeline_id{project=~\"($OWNER).*\",project=~\"$PROJECT\"}, ref)", "error": null, "hide": 0, "includeAll": true, @@ -1527,7 +1513,7 @@ "multi": true, "name": "REF", "options": [], - "query": "label_values(gitlab_ci_pipeline_id{project=~\"$PROJECT\"}, ref)", + "query": "label_values(gitlab_ci_pipeline_id{project=~\"($OWNER).*\",project=~\"$PROJECT\"}, ref)", "refresh": 2, "regex": "", "skipUrlSync": false, @@ -1550,7 +1536,7 @@ ] }, "datasource": "prometheus", - "definition": "label_values(gitlab_ci_pipeline_job_id{project=~\"$PROJECT\", ref=~\"$REF\"}, job_name)", + "definition": "label_values(gitlab_ci_pipeline_job_id{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}, job_name)", "error": null, "hide": 0, "includeAll": true, @@ -1558,7 +1544,7 @@ "multi": true, "name": "JOB", "options": [], - "query": "label_values(gitlab_ci_pipeline_job_id{project=~\"$PROJECT\", ref=~\"$REF\"}, job_name)", + "query": "label_values(gitlab_ci_pipeline_job_id{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}, job_name)", "refresh": 2, "regex": "", "skipUrlSync": false, @@ -1602,5 +1588,5 @@ "timezone": "", "title": "GitLab CI jobs", "uid": "gitlab_ci_jobs", - "version": 1 + "version": 2 } \ No newline at end of file diff --git a/examples/quickstart/grafana/dashboards/dashboard_pipelines.json b/examples/quickstart/grafana/dashboards/dashboard_pipelines.json index acadd0bb..27cac808 100644 --- a/examples/quickstart/grafana/dashboards/dashboard_pipelines.json +++ b/examples/quickstart/grafana/dashboards/dashboard_pipelines.json @@ -35,7 +35,7 @@ }, "id": 116, "options": { - "content": "

", + "content": "

", "mode": "html" }, "pluginVersion": "7.3.1", @@ -110,7 +110,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "count(gitlab_ci_pipeline_run_count{project=~\"$PROJECT\", ref=~\"$REF\"})", + "expr": "count(gitlab_ci_pipeline_run_count{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"})", "format": "time_series", "instant": false, "interval": "", @@ -193,7 +193,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "count(gitlab_ci_pipeline_status{status=\"failed\", project=~\"$PROJECT\", ref=~\"$REF\"} > 0) or vector(0)", + "expr": "count(gitlab_ci_pipeline_status{status=\"failed\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"} > 0) or vector(0)", "format": "time_series", "instant": false, "interval": "", @@ -272,7 +272,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "sum(increase(gitlab_ci_pipeline_run_count{project=~\"$PROJECT\", ref=~\"$REF\"}[1h]))", + "expr": "sum(increase(gitlab_ci_pipeline_run_count{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}[1h]))", "format": "time_series", "instant": false, "interval": "", @@ -417,7 +417,7 @@ ], "targets": [ { - "expr": "(gitlab_ci_pipeline_status{status=\"success\", project=~\"$PROJECT\", ref=~\"$REF\"} * 1 > 0) or (gitlab_ci_pipeline_status{status=\"running\", project=~\"$PROJECT\", ref=~\"$REF\"} * 2 > 0) or (gitlab_ci_pipeline_status{status=~\"failed|canceled\", project=~\"$PROJECT\", ref=~\"$REF\"} * 3 > 0) or (gitlab_ci_pipeline_status{status!~\"success|running|failed|canceled\", project=~\"$PROJECT\", ref=~\"$REF\"} * 4 > 0)", + "expr": "(gitlab_ci_pipeline_status{status=\"success\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"} * 1 > 0) or (gitlab_ci_pipeline_status{status=\"running\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"} * 2 > 0) or (gitlab_ci_pipeline_status{status=~\"failed|canceled\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"} * 3 > 0) or (gitlab_ci_pipeline_status{status!~\"success|running|failed|canceled\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"} * 4 > 0)", "format": "time_series", "hide": false, "instant": true, @@ -510,7 +510,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "avg(time() - gitlab_ci_pipeline_timestamp{project=~\"$PROJECT\", ref=~\"$REF\"})", + "expr": "avg(time() - gitlab_ci_pipeline_timestamp{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"})", "format": "time_series", "interval": "", "intervalFactor": 1, @@ -593,7 +593,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "avg(gitlab_ci_pipeline_duration_seconds{project=~\"$PROJECT\", ref=~\"$REF\"})", + "expr": "avg(gitlab_ci_pipeline_duration_seconds{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"})", "format": "time_series", "interval": "", "intervalFactor": 1, @@ -672,7 +672,7 @@ "steppedLine": false, "targets": [ { - "expr": "sum(increase(gitlab_ci_pipeline_run_count{project=~\"$PROJECT\", ref=~\"$REF\"}[1m])) by (project, ref) / sum(increase(gitlab_ci_pipeline_run_count{project=~\"$PROJECT\", ref=~\"$REF\"}[1m])) by (project, ref)", + "expr": "sum(increase(gitlab_ci_pipeline_run_count{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}[1m])) by (project, ref) / sum(increase(gitlab_ci_pipeline_run_count{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}[1m])) by (project, ref)", "format": "time_series", "instant": false, "interval": "", @@ -766,7 +766,7 @@ { "targetBlank": true, "title": "View pipeline #${__value.numeric}", - "url": "https://${GITLAB_HOST}/${__data.fields.project}/pipelines/${__value.numeric}" + "url": "https://${GITLAB_HOST}/${__data.fields.project}/-/pipelines/${__value.numeric}" } ] }, @@ -942,7 +942,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "-max(time() - gitlab_ci_pipeline_timestamp{project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) unless max(gitlab_ci_pipeline_status{status!~\"success\", project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) > 0", + "expr": "-max(time() - gitlab_ci_pipeline_timestamp{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) unless max(gitlab_ci_pipeline_status{status!~\"success\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) > 0", "format": "table", "hide": false, "instant": true, @@ -952,7 +952,7 @@ "refId": "B" }, { - "expr": "max(gitlab_ci_pipeline_duration_seconds{project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) unless (max(gitlab_ci_pipeline_status{status!~\"success\", project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) > 0)", + "expr": "max(gitlab_ci_pipeline_duration_seconds{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) unless (max(gitlab_ci_pipeline_status{status!~\"success\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) > 0)", "format": "table", "hide": false, "instant": true, @@ -962,7 +962,7 @@ "refId": "C" }, { - "expr": "max(gitlab_ci_pipeline_id{project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) unless (max(gitlab_ci_pipeline_status{status!~\"success\", project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) > 0)", + "expr": "max(gitlab_ci_pipeline_id{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) unless (max(gitlab_ci_pipeline_status{status!~\"success\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) > 0)", "format": "table", "instant": true, "interval": "", @@ -1354,7 +1354,7 @@ "pluginVersion": "7.3.1", "targets": [ { - "expr": "-max(time() - gitlab_ci_pipeline_timestamp{project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) unless max(gitlab_ci_pipeline_status{status=~\"success\", project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) > 0", + "expr": "-max(time() - gitlab_ci_pipeline_timestamp{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) unless max(gitlab_ci_pipeline_status{status=~\"success\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) > 0", "format": "table", "hide": false, "instant": true, @@ -1364,7 +1364,7 @@ "refId": "B" }, { - "expr": "max(gitlab_ci_pipeline_duration_seconds{project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) unless (max(gitlab_ci_pipeline_status{status=~\"success\", project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) > 0)", + "expr": "max(gitlab_ci_pipeline_duration_seconds{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) unless (max(gitlab_ci_pipeline_status{status=~\"success\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind) > 0)", "format": "table", "hide": false, "instant": true, @@ -1374,7 +1374,7 @@ "refId": "C" }, { - "expr": "(max(gitlab_ci_pipeline_status{project=~\"$PROJECT\", ref=~\"$REF\", status=~\"running\"}) by (project, ref, kind) * 2) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"$PROJECT\", ref=~\"$REF\", status=~\"failed\"}) by (project, ref, kind) * 3) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"$PROJECT\", ref=~\"$REF\", status=~\"canceled\"}) by (project, ref, kind) * 4) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"$PROJECT\", ref=~\"$REF\", status=~\"created\"}) by (project, ref, kind) * 5) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"$PROJECT\", ref=~\"$REF\", status=~\"waiting_for_resource\"}) by (project, ref, kind) * 6) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"$PROJECT\", ref=~\"$REF\", status=~\"preparing\"}) by (project, ref, kind) * 7) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"$PROJECT\", ref=~\"$REF\", status=~\"pending\"}) by (project, ref, kind) * 8) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"$PROJECT\", ref=~\"$REF\", status=~\"skipped\"}) by (project, ref, kind) * 9) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"$PROJECT\", ref=~\"$REF\", status=~\"manual\"}) by (project, ref, kind) * 10) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"$PROJECT\", ref=~\"$REF\", status=~\"scheduled\"}) by (project, ref, kind) * 11) > 0", + "expr": "(max(gitlab_ci_pipeline_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", status=~\"running\"}) by (project, ref, kind) * 2) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", status=~\"failed\"}) by (project, ref, kind) * 3) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", status=~\"canceled\"}) by (project, ref, kind) * 4) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", status=~\"created\"}) by (project, ref, kind) * 5) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", status=~\"waiting_for_resource\"}) by (project, ref, kind) * 6) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", status=~\"preparing\"}) by (project, ref, kind) * 7) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", status=~\"pending\"}) by (project, ref, kind) * 8) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", status=~\"skipped\"}) by (project, ref, kind) * 9) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", status=~\"manual\"}) by (project, ref, kind) * 10) > 0 or\n(max(gitlab_ci_pipeline_status{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\", status=~\"scheduled\"}) by (project, ref, kind) * 11) > 0", "format": "table", "hide": false, "instant": true, @@ -1384,7 +1384,7 @@ "refId": "A" }, { - "expr": "max(gitlab_ci_pipeline_id{project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind, job_name) unless (max(gitlab_ci_pipeline_status{status=~\"success\", project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind, job_name) > 0)", + "expr": "max(gitlab_ci_pipeline_id{project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind, job_name) unless (max(gitlab_ci_pipeline_status{status=~\"success\", project=~\"($OWNER).*\",project=~\"$PROJECT\", ref=~\"$REF\"}) by (project, ref, kind, job_name) > 0)", "format": "table", "instant": true, "interval": "", @@ -1507,7 +1507,7 @@ ] }, "datasource": "prometheus", - "definition": "label_values(gitlab_ci_pipeline_id{project=~\"$OWNER.*\"}, project)", + "definition": "label_values(gitlab_ci_pipeline_id{project=~\"($OWNER).*\"}, project)", "error": null, "hide": 0, "includeAll": true, @@ -1515,7 +1515,7 @@ "multi": true, "name": "PROJECT", "options": [], - "query": "label_values(gitlab_ci_pipeline_id{project=~\"$OWNER.*\"}, project)", + "query": "label_values(gitlab_ci_pipeline_id{project=~\"($OWNER).*\"}, project)", "refresh": 1, "regex": "", "skipUrlSync": false, @@ -1538,7 +1538,7 @@ ] }, "datasource": "prometheus", - "definition": "label_values(gitlab_ci_pipeline_id{project=~\"$PROJECT\"}, ref)", + "definition": "label_values(gitlab_ci_pipeline_id{project=~\"($OWNER).*\",project=~\"$PROJECT\"}, ref)", "error": null, "hide": 0, "includeAll": true, @@ -1546,7 +1546,7 @@ "multi": true, "name": "REF", "options": [], - "query": "label_values(gitlab_ci_pipeline_id{project=~\"$PROJECT\"}, ref)", + "query": "label_values(gitlab_ci_pipeline_id{project=~\"($OWNER).*\",project=~\"$PROJECT\"}, ref)", "refresh": 1, "regex": "", "skipUrlSync": false, @@ -1590,5 +1590,5 @@ "timezone": "", "title": "GitLab CI pipelines", "uid": "gitlab_ci_pipelines", - "version": 1 + "version": 2 } \ No newline at end of file diff --git a/examples/webhooks/README.md b/examples/webhooks/README.md index 949a074b..2636b2d2 100644 --- a/examples/webhooks/README.md +++ b/examples/webhooks/README.md @@ -104,6 +104,7 @@ If you query the `/metrics` endpoint of the exporter you should be able to see a ```shell gitlab_ci_pipeline_coverage{kind="branch",project="foo/bar",ref="main",topics="",variables=""} 0 gitlab_ci_pipeline_duration_seconds{kind="branch",project="foo/bar",ref="main",topics="",variables=""} 494 +gitlab_ci_pipeline_queued_duration_seconds{kind="branch",project="foo/bar",ref="main",topics="",variables=""} 60 gitlab_ci_pipeline_id{kind="branch",project="foo/bar",ref="main",topics="",variables=""} 1.00308162e+08 gitlab_ci_pipeline_run_count{kind="branch",project="foo/bar",ref="main",topics="",variables=""} 0 gitlab_ci_pipeline_status{kind="branch",project="foo/bar",ref="main",status="canceled",topics="",variables=""} 0 diff --git a/examples/webhooks/docker-compose.yml b/examples/webhooks/docker-compose.yml index c60aea39..de7a7709 100644 --- a/examples/webhooks/docker-compose.yml +++ b/examples/webhooks/docker-compose.yml @@ -1,7 +1,8 @@ +--- version: '3.8' services: gitlab-ci-pipelines-exporter: - image: docker.io/mvisonneau/gitlab-ci-pipelines-exporter:v0.4.8 + image: quay.io/mvisonneau/gitlab-ci-pipelines-exporter:v0.5.8 # You can comment out the image name and use the following statement # to build the image against the current version of the repository # build: ../.. @@ -9,7 +10,7 @@ services: - 8080:8080 environment: GCPE_CONFIG: /etc/gitlab-ci-pipelines-exporter.yml - GCPE_LOG_LEVEL: debug + GCPE_INTERNAL_MONITORING_LISTENER_ADDRESS: tcp://127.0.0.1:8082 volumes: - type: bind source: ./gitlab-ci-pipelines-exporter.yml diff --git a/examples/webhooks/gitlab-ci-pipelines-exporter.yml b/examples/webhooks/gitlab-ci-pipelines-exporter.yml index a8fe154e..83135d9a 100644 --- a/examples/webhooks/gitlab-ci-pipelines-exporter.yml +++ b/examples/webhooks/gitlab-ci-pipelines-exporter.yml @@ -1,3 +1,7 @@ +--- +log: + level: debug + gitlab: url: https://gitlab.com token: diff --git a/go.mod b/go.mod index 465ec6bd..ea982a71 100644 --- a/go.mod +++ b/go.mod @@ -1,41 +1,109 @@ module github.com/mvisonneau/gitlab-ci-pipelines-exporter -go 1.16 +go 1.23.0 + +toolchain go1.23.4 require ( - github.com/alicebob/miniredis v2.5.0+incompatible - github.com/alicebob/miniredis/v2 v2.14.3 - github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect - github.com/go-redis/redis/v8 v8.7.1 - github.com/go-redis/redis_rate/v9 v9.1.1 - github.com/gomodule/redigo v1.8.3 // indirect - github.com/google/uuid v1.2.0 // indirect - github.com/hashicorp/go-cleanhttp v0.5.2 // indirect - github.com/hashicorp/go-retryablehttp v0.6.8 // indirect - github.com/heptiolabs/healthcheck v0.0.0-20180807145615-6ff867650f40 - github.com/klauspost/compress v1.11.12 // indirect - github.com/magefile/mage v1.11.0 // indirect + dario.cat/mergo v1.0.1 + github.com/alicebob/miniredis/v2 v2.33.0 + github.com/charmbracelet/bubbles v0.20.0 + github.com/charmbracelet/bubbletea v1.2.4 + github.com/charmbracelet/lipgloss v1.0.0 + github.com/creasty/defaults v1.8.0 + github.com/go-logr/stdr v1.2.2 + github.com/go-playground/validator/v10 v10.23.0 + github.com/go-redis/redis_rate/v10 v10.0.1 + github.com/google/uuid v1.6.0 + github.com/heptiolabs/healthcheck v0.0.0-20211123025425-613501dd5deb github.com/mvisonneau/go-helpers v0.0.1 - github.com/openlyinc/pointy v1.1.2 + github.com/paulbellamy/ratecounter v0.2.0 github.com/pkg/errors v0.9.1 - github.com/prometheus/client_golang v1.9.0 - github.com/prometheus/common v0.18.0 // indirect - github.com/prometheus/procfs v0.6.0 // indirect + github.com/prometheus/client_golang v1.20.5 + github.com/redis/go-redis/extra/redisotel/v9 v9.7.0 + github.com/redis/go-redis/v9 v9.7.0 + github.com/sirupsen/logrus v1.9.3 + github.com/stretchr/testify v1.10.0 + github.com/uptrace/opentelemetry-go-extra/otellogrus v0.3.2 + github.com/urfave/cli/v2 v2.27.5 + github.com/vmihailenco/msgpack/v5 v5.4.1 + github.com/vmihailenco/taskq/memqueue/v4 v4.0.0-beta.4 + github.com/vmihailenco/taskq/redisq/v4 v4.0.0-beta.4 + github.com/vmihailenco/taskq/v4 v4.0.0-beta.4 + github.com/xanzy/go-gitlab v0.115.0 + github.com/xeonx/timeago v1.0.0-rc5 + go.openly.dev/pointy v1.3.0 + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0 + go.opentelemetry.io/otel v1.33.0 + go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 + go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.33.0 + go.opentelemetry.io/otel/sdk v1.33.0 + go.opentelemetry.io/otel/trace v1.33.0 + golang.org/x/exp v0.0.0-20241210194714-1829a127f884 + golang.org/x/mod v0.22.0 + golang.org/x/time v0.8.0 + google.golang.org/grpc v1.69.0 + google.golang.org/protobuf v1.35.2 + gopkg.in/yaml.v3 v3.0.1 +) + +require ( + github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect + github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect + github.com/beorn7/perks v1.0.1 // indirect + github.com/bsm/redislock v0.9.4 // indirect + github.com/cenkalti/backoff/v4 v4.3.0 // indirect + github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/charmbracelet/harmonica v0.2.0 // indirect + github.com/charmbracelet/x/ansi v0.4.5 // indirect + github.com/charmbracelet/x/term v0.2.1 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 // indirect + github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect + github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect + github.com/felixge/httpsnoop v1.0.4 // indirect + github.com/gabriel-vasile/mimetype v1.4.3 // indirect + github.com/go-logr/logr v1.4.2 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/google/go-querystring v1.1.0 // indirect + github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0 // indirect + github.com/hashicorp/go-cleanhttp v0.5.2 // indirect + github.com/hashicorp/go-retryablehttp v0.7.7 // indirect + github.com/hashicorp/golang-lru v1.0.2 // indirect + github.com/klauspost/compress v1.17.9 // indirect + github.com/leodido/go-urn v1.4.0 // indirect + github.com/lucasb-eyer/go-colorful v1.2.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect + github.com/mattn/go-localereader v0.0.1 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect + github.com/muesli/cancelreader v0.2.2 // indirect + github.com/muesli/termenv v0.15.2 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/prometheus/client_model v0.6.1 // indirect + github.com/prometheus/common v0.55.0 // indirect + github.com/prometheus/procfs v0.15.1 // indirect + github.com/redis/go-redis/extra/rediscmd/v9 v9.7.0 // indirect + github.com/rivo/uniseg v0.4.7 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect - github.com/sirupsen/logrus v1.8.0 - github.com/stretchr/testify v1.7.0 - github.com/urfave/cli/v2 v2.3.0 - github.com/vmihailenco/msgpack/v5 v5.2.0 - github.com/vmihailenco/taskq/v3 v3.2.3 - github.com/xanzy/go-gitlab v0.46.0 - go.uber.org/ratelimit v0.2.0 - golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 // indirect - golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93 // indirect - golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b // indirect - golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect - google.golang.org/appengine v1.6.7 // indirect + github.com/uptrace/opentelemetry-go-extra/otelutil v0.3.2 // indirect + github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect + github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect + github.com/yuin/gopher-lua v1.1.1 // indirect + go.opentelemetry.io/auto/sdk v1.1.0 // indirect + go.opentelemetry.io/otel/log v0.6.0 // indirect + go.opentelemetry.io/otel/metric v1.33.0 // indirect + go.opentelemetry.io/proto/otlp v1.4.0 // indirect + golang.org/x/crypto v0.30.0 // indirect + golang.org/x/net v0.32.0 // indirect + golang.org/x/oauth2 v0.24.0 // indirect + golang.org/x/sync v0.10.0 // indirect + golang.org/x/sys v0.28.0 // indirect + golang.org/x/text v0.21.0 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20241209162323-e6fa225c2576 // indirect gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 // indirect - gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b ) - -replace github.com/vmihailenco/taskq/v3 => github.com/mvisonneau/taskq/v3 v3.2.4-0.20201127170227-fddacd1811f5 diff --git a/go.sum b/go.sum index fea6674b..c6d3cb16 100644 --- a/go.sum +++ b/go.sum @@ -1,818 +1,258 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= -cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= -cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= -cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= -cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= -cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= -cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= -cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= -cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= -cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= -cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= -cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= -cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= -cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= -cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= -cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= -cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= -cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= -cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= -cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= -cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= -github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= -github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= -github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= -github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= -github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= -github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= -github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= +dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a h1:HbKu58rmZpUGpz5+4FfNmIU+FmZg2P3Xaj2v2bfNWmk= github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc= -github.com/alicebob/miniredis v2.5.0+incompatible h1:yBHoLpsyjupjz3NL3MhKMVkR41j82Yjf3KFv7ApYzUI= -github.com/alicebob/miniredis v2.5.0+incompatible/go.mod h1:8HZjEj4yU0dwhYHky+DxYx+6BMjkBbe5ONFIF1MXffk= -github.com/alicebob/miniredis/v2 v2.14.3 h1:QWoo2wchYmLgOB6ctlTt2dewQ1Vu6phl+iQbwT8SYGo= -github.com/alicebob/miniredis/v2 v2.14.3/go.mod h1:gquAfGbzn92jvtrSC69+6zZnwSODVXVpYDRaGhWaL6I= -github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 h1:MzBOUgng9orim59UnfUTLRjMpd09C5uEVQ6RPGeCaVI= -github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129/go.mod h1:rFgpPQZYZ8vdbc+48xibu8ALc3yeyd64IhHS+PU6Yyg= -github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= -github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= -github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= -github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= -github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= -github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= -github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= -github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= -github.com/aws/aws-sdk-go v1.35.28 h1:S2LuRnfC8X05zgZLC8gy/Sb82TGv2Cpytzbzz7tkeHc= -github.com/aws/aws-sdk-go v1.35.28/go.mod h1:tlPOdRjfxPBpNIwqDj61rmsnA85v9jc0Ps9+muhnW+k= -github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= -github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= -github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/alicebob/miniredis/v2 v2.33.0 h1:uvTF0EDeu9RLnUEG27Db5I68ESoIxTiXbNUiji6lZrA= +github.com/alicebob/miniredis/v2 v2.33.0/go.mod h1:MhP4a3EU7aENRi9aO+tHfTBZicLqQevyi/DJpoj6mi0= +github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= +github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bsm/redislock v0.7.0 h1:RL7aZJhCKkuBjQbnSTKCeedTRifBWxd/ffP+GZ599Mo= -github.com/bsm/redislock v0.7.0/go.mod h1:3Kgu+cXw0JrkZ5pmY/JbcFpixGZ5M9v9G2PGWYqku+k= -github.com/capnm/sysinfo v0.0.0-20130621111458-5909a53897f3 h1:IHZ1Le1ejzkmS7Si7dIzJvYDWe+BIoNmqMnfWHBZSVw= -github.com/capnm/sysinfo v0.0.0-20130621111458-5909a53897f3/go.mod h1:M5XHQLu90v2JNm/bW2tdsYar+5vhV0gEcBcmDBNAN1Y= -github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ= -github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY= -github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= -github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= -github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI= -github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= -github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= -github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= -github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= -github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= +github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= +github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= +github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= +github.com/bsm/redislock v0.9.4 h1:X/Wse1DPpiQgHbVYRE9zv6m070UcKoOGekgvpNhiSvw= +github.com/bsm/redislock v0.9.4/go.mod h1:Epf7AJLiSFwLCiZcfi6pWFO/8eAYrYpQXFxEDPoDeAk= +github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= +github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= +github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= +github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/charmbracelet/bubbles v0.20.0 h1:jSZu6qD8cRQ6k9OMfR1WlM+ruM8fkPWkHvQWD9LIutE= +github.com/charmbracelet/bubbles v0.20.0/go.mod h1:39slydyswPy+uVOHZ5x/GjwVAFkCsV8IIVy+4MhzwwU= +github.com/charmbracelet/bubbletea v1.2.4 h1:KN8aCViA0eps9SCOThb2/XPIlea3ANJLUkv3KnQRNCE= +github.com/charmbracelet/bubbletea v1.2.4/go.mod h1:Qr6fVQw+wX7JkWWkVyXYk/ZUQ92a6XNekLXa3rR18MM= +github.com/charmbracelet/harmonica v0.2.0 h1:8NxJWRWg/bzKqqEaaeFNipOu77YR5t8aSwG4pgaUBiQ= +github.com/charmbracelet/harmonica v0.2.0/go.mod h1:KSri/1RMQOZLbw7AHqgcBycp8pgJnQMYYT8QZRqZ1Ao= +github.com/charmbracelet/lipgloss v1.0.0 h1:O7VkGDvqEdGi93X+DeqsQ7PKHDgtQfF8j8/O2qFMQNg= +github.com/charmbracelet/lipgloss v1.0.0/go.mod h1:U5fy9Z+C38obMs+T+tJqst9VGzlOYGj4ri9reL3qUlo= +github.com/charmbracelet/x/ansi v0.4.5 h1:LqK4vwBNaXw2AyGIICa5/29Sbdq58GbGdFngSexTdRM= +github.com/charmbracelet/x/ansi v0.4.5/go.mod h1:dk73KoMTT5AX5BsX0KrqhsTqAnhZZoCBjs7dGWp4Ktw= +github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= +github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= +github.com/cpuguy83/go-md2man/v2 v2.0.5 h1:ZtcqGrnekaHpVLArFSe4HK5DoKx1T0rq2DwVB0alcyc= +github.com/cpuguy83/go-md2man/v2 v2.0.5/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/creasty/defaults v1.8.0 h1:z27FJxCAa0JKt3utc0sCImAEb+spPucmKoOdLHvHYKk= +github.com/creasty/defaults v1.8.0/go.mod h1:iGzKe6pbEHnpMPtfDXZEr0NVxWnPTjb1bbDy08fPzYM= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 h1:fAjc9m62+UWV/WAFKLNi6ZS0675eEUC9y3AlwSbQu1Y= github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= -github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/eapache/go-resiliency v1.1.0/go.mod h1:kFI+JgMyC7bLPUVY133qvEBtVayf5mFgVsvEsIPBvNs= -github.com/eapache/go-xerial-snappy v0.0.0-20180814174437-776d5712da21/go.mod h1:+020luEh2TKB4/GOp8oxxtq0Daoen/Cii55CzbTV6DU= -github.com/eapache/queue v1.1.0/go.mod h1:6eCeP0CKFpHLu8blIFXhExK/dRa7WDZfr6jVFPTqq+I= -github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M= -github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4= -github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= +github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= +github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= +github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= +github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= +github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= -github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= -github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o= -github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= -github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= -github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= -github.com/go-redis/redis/v8 v8.1.0/go.mod h1:isLoQT/NFSP7V67lyvM9GmdvLdyZ7pEhsXvvyQtnQTo= -github.com/go-redis/redis/v8 v8.3.4/go.mod h1:jszGxBCez8QA1HWSmQxJO9Y82kNibbUmeYhKWrBejTU= -github.com/go-redis/redis/v8 v8.4.0/go.mod h1:A1tbYoHSa1fXwN+//ljcCYYJeLmVrwL9hbQN45Jdy0M= -github.com/go-redis/redis/v8 v8.7.1 h1:8IYi6RO83fNcG5amcUUYTN/qH2h4OjZHlim3KWGFSsA= -github.com/go-redis/redis/v8 v8.7.1/go.mod h1:BRxHBWn3pO3CfjyX6vAoyeRmCquvxr6QG+2onGV2gYs= -github.com/go-redis/redis_rate/v9 v9.1.0/go.mod h1:jjU9YxOSZ3cz0yj1QJVAJiy5ueKmL9o4AySJHcKyTSE= -github.com/go-redis/redis_rate/v9 v9.1.1 h1:7SIrbnhQ7zsTNEgIvprFhJf7/+l3wSpZc2iRVwUmaq8= -github.com/go-redis/redis_rate/v9 v9.1.1/go.mod h1:jjU9YxOSZ3cz0yj1QJVAJiy5ueKmL9o4AySJHcKyTSE= -github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= -github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= -github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= -github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= -github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= -github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.4.3 h1:JjCZWpVbqXDqFVmTfYWEVTMIYrL/NPdPSCHPJ0T/raM= -github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/gomodule/redigo v1.8.3 h1:HR0kYDX2RJZvAup8CsiJwxB4dTCSC0AaUq6S4SiLwUc= -github.com/gomodule/redigo v1.8.3/go.mod h1:P9dn9mFrCBvWhGE1wpxx6fgq7BAeLBk+UUUzlpkBYO0= -github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= +github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= +github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= +github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= +github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= +github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.23.0 h1:/PwmTwZhS0dPkav3cdK9kV1FsAmrL8sThn8IHr/sO+o= +github.com/go-playground/validator/v10 v10.23.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= +github.com/go-redis/redis_rate/v10 v10.0.1 h1:calPxi7tVlxojKunJwQ72kwfozdy25RjA0bCj1h0MUo= +github.com/go-redis/redis_rate/v10 v10.0.1/go.mod h1:EMiuO9+cjRkR7UvdvwMO7vbgqJkltQHtwbdIQvaBKIU= +github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= +github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.4 h1:L8R9j+yAqZuZjsqh/z+F1NCffTKKLShY6zXTItVIZ8M= -github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk= -github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= -github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= -github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= -github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= -github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.2.0 h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs= -github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= -github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= -github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= -github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= -github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= -github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= -github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= -github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= -github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= -github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= +github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= +github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0 h1:TmHmbvxPmaegwhDubVz0lICL0J5Ka2vwTzhoePEXsGE= +github.com/grpc-ecosystem/grpc-gateway/v2 v2.24.0/go.mod h1:qztMSjm835F2bXf+5HKAPIS5qsmQDqZna/PgVt4rWtI= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-hclog v0.9.2 h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI= -github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= -github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= -github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= -github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= -github.com/hashicorp/go-retryablehttp v0.6.4/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= -github.com/hashicorp/go-retryablehttp v0.6.8 h1:92lWxgpa+fF3FozM4B3UZtHZMJX8T5XT+TFdCxsPyWs= -github.com/hashicorp/go-retryablehttp v0.6.8/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= -github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= -github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= -github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= -github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= -github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= -github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= -github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= -github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= -github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= -github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= -github.com/heptiolabs/healthcheck v0.0.0-20180807145615-6ff867650f40 h1:GT4RsKmHh1uZyhmTkWJTDALRjSHYQp6FRKrotf0zhAs= -github.com/heptiolabs/healthcheck v0.0.0-20180807145615-6ff867650f40/go.mod h1:NtmN9h8vrTveVQRLHcX2HQ5wIPBDCsZ351TGbZWgg38= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= -github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= -github.com/iron-io/iron_go3 v0.0.0-20190916120531-a4a7f74b73ac h1:w5wltlINIIqRTqQ64dASrCo0fM7k9nosPbKCZnkL0W0= -github.com/iron-io/iron_go3 v0.0.0-20190916120531-a4a7f74b73ac/go.mod h1:gyMTRVO+ZkEy7wQDyD++okPsBN2q127EpuShhHMWG54= -github.com/jeffh/go.bdd v0.0.0-20120717032931-88f798ee0c74/go.mod h1:qNa9FlAfO0U/qNkzYBMH1JKYRMzC+sP9IcyV4U18l98= -github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= -github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= -github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= -github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= -github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= -github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= -github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= -github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= -github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= -github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= -github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= -github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/klauspost/compress v1.11.12 h1:famVnQVu7QwryBN4jNseQdUKES71ZAOnB6UQQJPZvqk= -github.com/klauspost/compress v1.11.12/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= -github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= -github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= -github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= -github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= -github.com/magefile/mage v1.10.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A= -github.com/magefile/mage v1.11.0 h1:C/55Ywp9BpgVVclD3lRnSYCwXTYxmSppIgLeDYlNuls= -github.com/magefile/mage v1.11.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A= -github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= -github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= -github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= -github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= -github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= -github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= -github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= -github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= -github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= -github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= +github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= +github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= +github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c= +github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/heptiolabs/healthcheck v0.0.0-20211123025425-613501dd5deb h1:tsEKRC3PU9rMw18w/uAptoijhgG4EvlA5kfJPtwrMDk= +github.com/heptiolabs/healthcheck v0.0.0-20211123025425-613501dd5deb/go.mod h1:NtmN9h8vrTveVQRLHcX2HQ5wIPBDCsZ351TGbZWgg38= +github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA= +github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= +github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= +github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= +github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= +github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= +github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4= +github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI= +github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo= +github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA= +github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo= +github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo= +github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/mvisonneau/go-helpers v0.0.1 h1:jp/eaRBixQeCwILkqSDlNIAtRjBdRR3AENTxx5Ts04Y= github.com/mvisonneau/go-helpers v0.0.1/go.mod h1:9gxWJlesYQqoVW4jj+okotqvG5CB8BfLD06UbyyfKZA= -github.com/mvisonneau/taskq/v3 v3.2.4-0.20201127170227-fddacd1811f5 h1:NSxZfOOzcdkCI9hh2Ly/3gzyFG1d+mTuYk0sGXJFKck= -github.com/mvisonneau/taskq/v3 v3.2.4-0.20201127170227-fddacd1811f5/go.mod h1:iSJyq8ZwX+KkOCJxZVzjatK5XcBq0xnns69Oj5OkZ8k= -github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= -github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= -github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k= -github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= -github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= -github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= -github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= -github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78= -github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= -github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= -github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= -github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.14.1/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= -github.com/onsi/ginkgo v1.14.2/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= -github.com/onsi/ginkgo v1.15.0 h1:1V1NfVQR87RtWAgp1lv9JZJ5Jap+XFGKPi00andXGi4= -github.com/onsi/ginkgo v1.15.0/go.mod h1:hF8qUzuuC8DJGygJH3726JnCZX4MYbRB8yFfISqnKUg= -github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.10.2/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc= -github.com/onsi/gomega v1.10.5 h1:7n6FEkpFmfCoo2t+YYqXH0evK+a9ICQz0xcAy9dYcaQ= -github.com/onsi/gomega v1.10.5/go.mod h1:gza4q3jKQJijlu05nKWRCW/GavJumGt8aNRxWg7mt48= -github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= -github.com/openlyinc/pointy v1.1.2 h1:LywVV2BWC5Sp5v7FoP4bUD+2Yn5k0VNeRbU5vq9jUMY= -github.com/openlyinc/pointy v1.1.2/go.mod h1:w2Sytx+0FVuMKn37xpXIAyBNhFNBIJGR/v2m7ik1WtM= -github.com/opentracing-contrib/go-observer v0.0.0-20170622124052-a52f23424492/go.mod h1:Ngi6UdF0k5OKD5t5wlmGhe/EDKPoUM3BXZSSfIuJbis= -github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= -github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= -github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5/go.mod h1:/wsWhb9smxSfWAKL3wpBW7V8scJMt8N8gnaMCS9E/cA= -github.com/openzipkin/zipkin-go v0.1.6/go.mod h1:QgAqvLzwWbR/WpD4A3cGpPtJrZXNIiJc5AZX7/PBEpw= -github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= -github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4= -github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM= -github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= -github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= -github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= -github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= -github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= -github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/oklog/ulid/v2 v2.1.0 h1:+9lhoxAP56we25tyYETBBY1YLA2SaoLvUFgrP2miPJU= +github.com/oklog/ulid/v2 v2.1.0/go.mod h1:rcEKHmBBKfef9DhnvX7y1HZBYxjXb0cP5ExxNsTT1QQ= +github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= +github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= +github.com/onsi/gomega v1.27.6 h1:ENqfyGeS5AX/rlXDd/ETokDz93u0YufY1Pgxuy/PvWE= +github.com/onsi/gomega v1.27.6/go.mod h1:PIQNjfQwkP3aQAH7lf7j87O/5FiNr+ZR8+ipb+qQlhg= +github.com/paulbellamy/ratecounter v0.2.0 h1:2L/RhJq+HA8gBQImDXtLPrDXK5qAj6ozWVK/zFXVJGs= +github.com/paulbellamy/ratecounter v0.2.0/go.mod h1:Hfx1hDpSGoqxkVVpBi/IlYD7kChlfo5C6hzIHwPqfFE= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= -github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= -github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= -github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= -github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeDPbaTKGT+JTgUa3og= -github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= -github.com/prometheus/client_golang v1.9.0 h1:Rrch9mh17XcxvEu9D9DEpb4isxjGBtcevQjKvxPRQIU= -github.com/prometheus/client_golang v1.9.0/go.mod h1:FqZLKOZnGdFAhOK4nqGHa7D66IdsO+O441Eve7ptJDU= -github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= -github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.1.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= -github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt26CguLLsqA= -github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= -github.com/prometheus/common v0.15.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s= -github.com/prometheus/common v0.18.0 h1:WCVKW7aL6LEe1uryfI9dnEc2ZqNB1Fn0ok930v0iL1Y= -github.com/prometheus/common v0.18.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s= -github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= -github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= -github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= -github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.2.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.6.0 h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4= -github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= -github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= -github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= -github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/prometheus/client_golang v1.20.5 h1:cxppBPuYhUnsO6yo/aoRol4L7q7UFfdm+bR9r+8l63Y= +github.com/prometheus/client_golang v1.20.5/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE= +github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= +github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= +github.com/prometheus/common v0.55.0 h1:KEi6DK7lXW/m7Ig5i47x0vRzuBsHuvJdi5ee6Y3G1dc= +github.com/prometheus/common v0.55.0/go.mod h1:2SECS4xJG1kd8XF9IcM1gMX6510RAEL65zxzNImwdc8= +github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= +github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= +github.com/redis/go-redis/extra/rediscmd/v9 v9.7.0 h1:BIx9TNZH/Jsr4l1i7VVxnV0JPiwYj8qyrHyuL0fGZrk= +github.com/redis/go-redis/extra/rediscmd/v9 v9.7.0/go.mod h1:eTg/YQtGYAZD5r3DlGlJptJ45AHA+/G+2NPn30PKzik= +github.com/redis/go-redis/extra/redisotel/v9 v9.7.0 h1:bQk8xiVFw+3ln4pfELVktpWgYdFpgLLU+quwSoeIof0= +github.com/redis/go-redis/extra/redisotel/v9 v9.7.0/go.mod h1:0LyN+GHLIJmKtjYRPF7nHyTTMV6E91YngoOopNifQRo= +github.com/redis/go-redis/v9 v9.7.0 h1:HhLSs+B6O021gwzl+locl0zEDnyNkxMtf/Z3NNBMa9E= +github.com/redis/go-redis/v9 v9.7.0/go.mod h1:f6zhXITC7JUJIlPEiBOTXxJgPLdZcA93GewI7inzyWw= +github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= -github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= -github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= -github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= -github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= -github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sirupsen/logrus v1.8.0 h1:nfhvjKcUMhBMVqbKHJlk5RPrrfYr/NMo3692g0dwfWU= -github.com/sirupsen/logrus v1.8.0/go.mod h1:4GuYW9TZmE769R5STWrRakJc4UqQ3+QQ95fyz7ENv1A= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= -github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= -github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= -github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= -github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= -github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= -github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= -github.com/urfave/cli v1.22.1 h1:+mkCCcOFKPnCmVYVcURKps1Xe+3zP90gSYGNfRkjoIY= -github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= -github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M= -github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= -github.com/vmihailenco/msgpack/v5 v5.0.0/go.mod h1:HVxBVPUK/+fZMonk4bi1islLa8V3cfnBug0+4dykPzo= -github.com/vmihailenco/msgpack/v5 v5.2.0 h1:ZhIAtVUP1mme8GIlpiAnmTzjSWMexA/uNF2We85DR0w= -github.com/vmihailenco/msgpack/v5 v5.2.0/go.mod h1:fEM7KuHcnm0GvDCztRpw9hV0PuoO2ciTismP6vjggcM= -github.com/vmihailenco/tagparser v0.1.2 h1:gnjoVuB/kljJ5wICEEOpx98oXMWPLj22G67Vbd1qPqc= -github.com/vmihailenco/tagparser v0.1.2/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= +github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= +github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/uptrace/opentelemetry-go-extra/otellogrus v0.3.2 h1:H8wwQwTe5sL6x30z71lUgNiwBdeCHQjrphCfLwqIHGo= +github.com/uptrace/opentelemetry-go-extra/otellogrus v0.3.2/go.mod h1:/kR4beFhlz2g+V5ik8jW+3PMiMQAPt29y6K64NNY53c= +github.com/uptrace/opentelemetry-go-extra/otelutil v0.3.2 h1:3/aHKUq7qaFMWxyQV0W2ryNgg8x8rVeKVA20KJUkfS0= +github.com/uptrace/opentelemetry-go-extra/otelutil v0.3.2/go.mod h1:Zit4b8AQXaXvA68+nzmbyDzqiyFRISyw1JiD5JqUBjw= +github.com/urfave/cli/v2 v2.27.5 h1:WoHEJLdsXr6dDWoJgMq/CboDmyY/8HMMH1fTECbih+w= +github.com/urfave/cli/v2 v2.27.5/go.mod h1:3Sevf16NykTbInEnD0yKkjDAeZDS0A6bzhBH5hrMvTQ= +github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8= +github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= -github.com/xanzy/go-gitlab v0.46.0 h1:Wc7MxSz1UAg9ULKiBtPe0StAh2pVW3UINKmVxtqxibE= -github.com/xanzy/go-gitlab v0.46.0/go.mod h1:sPLojNBn68fMUWSxIJtdVVIP8uSBYqesTfDUseX11Ug= -github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= -github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/gopher-lua v0.0.0-20200816102855-ee81675732da h1:NimzV1aGyq29m5ukMK0AMWEhFaL/lrEOaephfuoiARg= -github.com/yuin/gopher-lua v0.0.0-20200816102855-ee81675732da/go.mod h1:E1AXubJBdNmFERAOucpDIxNzeGfLzg0mYh+UfMWdChA= -go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= -go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= -go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= -go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= -go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= -go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= -go.opentelemetry.io/otel v0.11.0/go.mod h1:G8UCk+KooF2HLkgo8RHX9epABH/aRGYET7gQOqBVdB0= -go.opentelemetry.io/otel v0.13.0/go.mod h1:dlSNewoRYikTkotEnxdmuBHgzT+k/idJSfDv/FxEnOY= -go.opentelemetry.io/otel v0.14.0/go.mod h1:vH5xEuwy7Rts0GNtsCW3HYQoZDY+OmBJ6t1bFGGlxgw= -go.opentelemetry.io/otel v0.18.0 h1:d5Of7+Zw4ANFOJB+TIn2K3QWsgS2Ht7OU9DqZHI6qu8= -go.opentelemetry.io/otel v0.18.0/go.mod h1:PT5zQj4lTsR1YeARt8YNKcFb88/c2IKoSABK9mX0r78= -go.opentelemetry.io/otel/metric v0.18.0 h1:yuZCmY9e1ZTaMlZXLrrbAPmYW6tW1A5ozOZeOYGaTaY= -go.opentelemetry.io/otel/metric v0.18.0/go.mod h1:kEH2QtzAyBy3xDVQfGZKIcok4ZZFvd5xyKPfPcuK6pE= -go.opentelemetry.io/otel/oteltest v0.18.0 h1:FbKDFm/LnQDOHuGjED+fy3s5YMVg0z019GJ9Er66hYo= -go.opentelemetry.io/otel/oteltest v0.18.0/go.mod h1:NyierCU3/G8DLTva7KRzGii2fdxdR89zXKH1bNWY7Bo= -go.opentelemetry.io/otel/trace v0.18.0 h1:ilCfc/fptVKaDMK1vWk0elxpolurJbEgey9J6g6s+wk= -go.opentelemetry.io/otel/trace v0.18.0/go.mod h1:FzdUu3BPwZSZebfQ1vl5/tAa8LyMLXSJN57AXIt/iDk= -go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= -go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw= -go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= -go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= -go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= -go.uber.org/ratelimit v0.2.0 h1:UQE2Bgi7p2B85uP5dC2bbRtig0C+OeNRnNEafLjsLPA= -go.uber.org/ratelimit v0.2.0/go.mod h1:YYBV4e4naJvhpitQrWJu1vCpgB7CboMe0qhltKt6mUg= -go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= -go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= -go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= -golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= -golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= -golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= -golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= -golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20200908183739-ae8ad444f925/go.mod h1:1phAWC201xIgDyaFpmDeZkgf70Q4Pd/CNqfRtVPtxNw= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= -golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= -golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= -golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= -golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= -golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.1-0.20200828183125-ce943fd02449/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181108082009-03003ca0c849/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20201006153459-a7d1128ccaa0/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201027133719-8eef5233e2a1/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110 h1:qWPm9rbaAMKs8Bq/9LRpbMqxWRVUAQwMI9fVrssnTfw= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93 h1:alLDrZkL34Y2bnGHfvC1CYBRBXCXgx8AC2vY4MRtYX4= -golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +github.com/vmihailenco/taskq/memqueue/v4 v4.0.0-beta.4 h1:JgFgIjAqWC2UgiTq6CV3XNfiP5Pf642py9wacgvDftU= +github.com/vmihailenco/taskq/memqueue/v4 v4.0.0-beta.4/go.mod h1:sOzK8FN53CqeonkU0fq6IbRmOofG40wplNgNO1QIbbE= +github.com/vmihailenco/taskq/redisq/v4 v4.0.0-beta.4 h1:DJza5d/dAICXj0QMIOiLC/SVMM/JpKQ9+9hrr08vMwY= +github.com/vmihailenco/taskq/redisq/v4 v4.0.0-beta.4/go.mod h1:dgn2y0l9VGgCxpqaL8WikDQoVMJNVNk5ADKT1jK8Yig= +github.com/vmihailenco/taskq/taskqtest/v4 v4.0.0-beta.4 h1:HkxNl01xXIxSiZ5gGUEBEuFq82gYtl5gQ5b0aVfISpM= +github.com/vmihailenco/taskq/taskqtest/v4 v4.0.0-beta.4/go.mod h1:eFJBPc15KwfiX5P/1wdQH6s28uflseLuzrTcHGXufek= +github.com/vmihailenco/taskq/v4 v4.0.0-beta.4 h1:Scybb5OGiu6Vr5R/Py7bseNcPwBKjuTS38VO2oixifA= +github.com/vmihailenco/taskq/v4 v4.0.0-beta.4/go.mod h1:KcqARv9hRrEUGlJfTq44lNyNPseskPbvFH7G5VWgSKY= +github.com/xanzy/go-gitlab v0.115.0 h1:6DmtItNcVe+At/liXSgfE/DZNZrGfalQmBRmOcJjOn8= +github.com/xanzy/go-gitlab v0.115.0/go.mod h1:5XCDtM7AM6WMKmfDdOiEpyRWUqui2iS9ILfvCZ2gJ5M= +github.com/xeonx/timeago v1.0.0-rc5 h1:pwcQGpaH3eLfPtXeyPA4DmHWjoQt0Ea7/++FwpxqLxg= +github.com/xeonx/timeago v1.0.0-rc5/go.mod h1:qDLrYEFynLO7y5Ho7w3GwgtYgpy5UfhcXIIQvMKVDkA= +github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4= +github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM= +github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M= +github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= +go.openly.dev/pointy v1.3.0 h1:keht3ObkbDNdY8PWPwB7Kcqk+MAlNStk5kXZTxukE68= +go.openly.dev/pointy v1.3.0/go.mod h1:rccSKiQDQ2QkNfSVT2KG8Budnfhf3At8IWxy/3ElYes= +go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= +go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0 h1:yd02MEjBdJkG3uabWP9apV+OuWRIXGDuJEUJbOHmCFU= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0/go.mod h1:umTcuxiv1n/s/S6/c2AT/g2CQ7u5C59sHDNmfSwgz7Q= +go.opentelemetry.io/otel v1.33.0 h1:/FerN9bax5LoK51X/sI0SVYrjSE0/yUL7DpxW4K3FWw= +go.opentelemetry.io/otel v1.33.0/go.mod h1:SUUkR6csvUQl+yjReHu5uM3EtVV7MBm5FHKRlNx4I8I= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0 h1:Vh5HayB/0HHfOQA7Ctx69E/Y/DcQSMPpKANYVMQ7fBA= +go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.33.0/go.mod h1:cpgtDBaqD/6ok/UG0jT15/uKjAY8mRA53diogHBg3UI= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.33.0 h1:5pojmb1U1AogINhN3SurB+zm/nIcusopeBNp42f45QM= +go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.33.0/go.mod h1:57gTHJSE5S1tqg+EKsLPlTWhpHMsWlVmer+LA926XiA= +go.opentelemetry.io/otel/log v0.6.0 h1:nH66tr+dmEgW5y+F9LanGJUBYPrRgP4g2EkmPE3LeK8= +go.opentelemetry.io/otel/log v0.6.0/go.mod h1:KdySypjQHhP069JX0z/t26VHwa8vSwzgaKmXtIB3fJM= +go.opentelemetry.io/otel/metric v1.33.0 h1:r+JOocAyeRVXD8lZpjdQjzMadVZp2M4WmQ+5WtEnklQ= +go.opentelemetry.io/otel/metric v1.33.0/go.mod h1:L9+Fyctbp6HFTddIxClbQkjtubW6O9QS3Ann/M82u6M= +go.opentelemetry.io/otel/sdk v1.33.0 h1:iax7M131HuAm9QkZotNHEfstof92xM+N8sr3uHXc2IM= +go.opentelemetry.io/otel/sdk v1.33.0/go.mod h1:A1Q5oi7/9XaMlIWzPSxLRWOI8nG3FnzHJNbiENQuihM= +go.opentelemetry.io/otel/sdk/metric v1.31.0 h1:i9hxxLJF/9kkvfHppyLL55aW7iIJz4JjxTeYusH7zMc= +go.opentelemetry.io/otel/sdk/metric v1.31.0/go.mod h1:CRInTMVvNhUKgSAMbKyTMxqOBC0zgyxzW55lZzX43Y8= +go.opentelemetry.io/otel/trace v1.33.0 h1:cCJuF7LRjUFso9LPnEAHJDB2pqzp+hbO8eu1qqW2d/s= +go.opentelemetry.io/otel/trace v1.33.0/go.mod h1:uIcdVUZMpTAmz0tI1z04GoVSezK37CbGV4fr1f2nBck= +go.opentelemetry.io/proto/otlp v1.4.0 h1:TA9WRvW6zMwP+Ssb6fLoUIuirti1gGbP28GcKG1jgeg= +go.opentelemetry.io/proto/otlp v1.4.0/go.mod h1:PPBWZIP98o2ElSqI35IHfu7hIhSwvc5N38Jw8pXuGFY= +go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= +go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= +golang.org/x/crypto v0.30.0 h1:RwoQn3GkWiMkzlX562cLB7OxWvjH1L8xutO2WoJcRoY= +golang.org/x/crypto v0.30.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= +golang.org/x/exp v0.0.0-20241210194714-1829a127f884 h1:Y/Mj/94zIQQGHVSv1tTtQBDaQaJe62U9bkDZKKyhPCU= +golang.org/x/exp v0.0.0-20241210194714-1829a127f884/go.mod h1:qj5a5QZpwLU2NLQudwIN5koi3beDhSAlJwa67PuM98c= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/net v0.32.0 h1:ZqPmj8Kzc+Y6e0+skZsuACbx+wzMgo5MQsJh9Qd6aYI= +golang.org/x/net v0.32.0/go.mod h1:CwU0IoeOlnQQWJ6ioyFrfRuomB8GKF6KbYXZVyeXNfs= +golang.org/x/oauth2 v0.24.0 h1:KTBBxWqUa0ykRPLtV69rRto9TLXcqYkeswu48x/gvNE= +golang.org/x/oauth2 v0.24.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= +golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201214210602-f9fddec55a1e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b h1:ggRgirZABFolTmi3sn6Ivd9SipZwLedQ5wR0aAKnFxU= -golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba h1:O8mE0/t419eoIwhTFpKVkHiTs/Igowgfkj25AcZrtiE= -golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= -golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= -golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= -golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg= +golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk= -google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= -google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= -google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= -google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= -google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= -google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= -google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= -google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= -google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= -google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= -google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= -google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= -google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= -google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= -google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= -google.golang.org/protobuf v1.25.0 h1:Ejskq+SyPohKW+1uil0JJMtmHCgJPJ/qWTxr8qp+R4c= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576 h1:CkkIfIt50+lT6NHAVoRYEyAvQGFM7xEwXUUywFvEb3Q= +google.golang.org/genproto/googleapis/api v0.0.0-20241209162323-e6fa225c2576/go.mod h1:1R3kvZ1dtP3+4p4d3G8uJ8rFk/fWlScl38vanWACI08= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241209162323-e6fa225c2576 h1:8ZmaLZE4XWrtU3MyClkYqqtl6Oegr3235h7jxsDyqCY= +google.golang.org/genproto/googleapis/rpc v0.0.0-20241209162323-e6fa225c2576/go.mod h1:5uTbfoYQed2U9p3KIj2/Zzm02PYhndfdmML0qC3q3FU= +google.golang.org/grpc v1.69.0 h1:quSiOM1GJPmPH5XtU+BCoVXcDVJJAzNcoyfC2cCjGkI= +google.golang.org/grpc v1.69.0/go.mod h1:vyjdE6jLBI76dgpDojsFGNaHlxdjXN9ghpnd2o7JGZ4= +google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io= +google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0 h1:FVCohIoYO7IJoDDVpV2pdq7SgrMH6wHnuTyrdrxJNoY= gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0/go.mod h1:OdE7CF6DbADk7lN8LIKRzRJTTZXIjtWgA5THM5lhBAw= -gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= -gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= -gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= -honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= -rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= -rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= -sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= -sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/cli/cli.go b/internal/cli/cli.go index 1e116621..ff04823f 100644 --- a/internal/cli/cli.go +++ b/internal/cli/cli.go @@ -5,11 +5,12 @@ import ( "os" "time" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/internal/cmd" "github.com/urfave/cli/v2" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/internal/cmd" ) -// Run handles the instanciation of the CLI application +// Run handles the instanciation of the CLI application. func Run(version string, args []string) { err := NewApp(version, time.Now()).Run(args) if err != nil { @@ -18,7 +19,7 @@ func Run(version string, args []string) { } } -// NewApp configures the CLI application +// NewApp configures the CLI application. func NewApp(version string, start time.Time) (app *cli.App) { app = cli.NewApp() app.Name = "gitlab-ci-pipelines-exporter" @@ -28,43 +29,55 @@ func NewApp(version string, start time.Time) (app *cli.App) { app.Flags = cli.FlagsByName{ &cli.StringFlag{ - Name: "config", - Aliases: []string{"c"}, - EnvVars: []string{"GCPE_CONFIG"}, - Usage: "config `file`", - Value: "./gitlab-ci-pipelines-exporter.yml", - }, - &cli.StringFlag{ - Name: "redis-url", - EnvVars: []string{"GCPE_REDIS_URL"}, - Usage: "redis `url` for an HA setup (format: redis[s]://[:password@]host[:port][/db-number][?option=value])", - }, - &cli.StringFlag{ - Name: "gitlab-token", - EnvVars: []string{"GCPE_GITLAB_TOKEN"}, - Usage: "GitLab API access `token` (can be used to override the value set in the config file)", + Name: "internal-monitoring-listener-address", + Aliases: []string{"m"}, + EnvVars: []string{"GCPE_INTERNAL_MONITORING_LISTENER_ADDRESS"}, + Usage: "internal monitoring listener address", }, - &cli.StringFlag{ - Name: "webhook-secret-token", - EnvVars: []string{"GCPE_WEBHOOK_SECRET_TOKEN"}, - Usage: "`token` used to authenticate legitimate requests (can be used to override the value set in the config file)", - }, - &cli.StringFlag{ - Name: "log-level", - EnvVars: []string{"GCPE_LOG_LEVEL"}, - Usage: "log `level` (debug,info,warn,fatal,panic)", - Value: "info", + } + + app.Commands = cli.CommandsByName{ + { + Name: "run", + Usage: "start the exporter", + Action: cmd.ExecWrapper(cmd.Run), + Flags: cli.FlagsByName{ + &cli.StringFlag{ + Name: "config", + Aliases: []string{"c"}, + EnvVars: []string{"GCPE_CONFIG"}, + Usage: "config `file`", + Value: "./gitlab-ci-pipelines-exporter.yml", + }, + &cli.StringFlag{ + Name: "redis-url", + EnvVars: []string{"GCPE_REDIS_URL"}, + Usage: "redis `url` for an HA setup (format: redis[s]://[:password@]host[:port][/db-number][?option=value]) (overrides config file parameter)", + }, + &cli.StringFlag{ + Name: "gitlab-token", + EnvVars: []string{"GCPE_GITLAB_TOKEN"}, + Usage: "GitLab API access `token` (overrides config file parameter)", + }, + &cli.StringFlag{ + Name: "webhook-secret-token", + EnvVars: []string{"GCPE_WEBHOOK_SECRET_TOKEN"}, + Usage: "`token` used to authenticate legitimate requests (overrides config file parameter)", + }, + &cli.StringFlag{ + Name: "gitlab-health-url", + EnvVars: []string{"GCPE_GITLAB_HEALTH_URL"}, + Usage: "GitLab health URL (overrides config file parameter)", + }, + }, }, - &cli.StringFlag{ - Name: "log-format", - EnvVars: []string{"GCPE_LOG_FORMAT"}, - Usage: "log `format` (json,text)", - Value: "text", + { + Name: "monitor", + Usage: "display information about the currently running exporter", + Action: cmd.ExecWrapper(cmd.Monitor), }, } - app.Action = cmd.ExecWrapper(cmd.Run) - app.Metadata = map[string]interface{}{ "startTime": start, } diff --git a/internal/cmd/monitor.go b/internal/cmd/monitor.go new file mode 100644 index 00000000..53344a5e --- /dev/null +++ b/internal/cmd/monitor.go @@ -0,0 +1,22 @@ +package cmd + +import ( + "github.com/urfave/cli/v2" + + monitorUI "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/monitor/ui" +) + +// Monitor .. +func Monitor(ctx *cli.Context) (int, error) { + cfg, err := parseGlobalFlags(ctx) + if err != nil { + return 1, err + } + + monitorUI.Start( + ctx.App.Version, + cfg.InternalMonitoringListenerAddress, + ) + + return 0, nil +} diff --git a/internal/cmd/run.go b/internal/cmd/run.go index 998fa919..8417818f 100644 --- a/internal/cmd/run.go +++ b/internal/cmd/run.go @@ -1,16 +1,113 @@ package cmd import ( - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/exporter" + "context" + "net/http" + "net/http/pprof" + "os" + "os/signal" + "syscall" + "time" + + log "github.com/sirupsen/logrus" "github.com/urfave/cli/v2" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/controller" + monitoringServer "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/monitor/server" ) -// Run launches the exporter -func Run(ctx *cli.Context) (int, error) { - if err := configure(ctx); err != nil { +// Run launches the exporter. +func Run(cliCtx *cli.Context) (int, error) { + cfg, err := configure(cliCtx) + if err != nil { return 1, err } - exporter.Run() + ctx, ctxCancel := context.WithCancel(context.Background()) + defer ctxCancel() + + c, err := controller.New(ctx, cfg, cliCtx.App.Version) + if err != nil { + return 1, err + } + + // Start the monitoring RPC server + go func(c *controller.Controller) { + s := monitoringServer.NewServer( + c.Gitlab, + c.Config, + c.Store, + c.TaskController.TaskSchedulingMonitoring, + ) + s.Serve() + }(&c) + + // Graceful shutdowns + onShutdown := make(chan os.Signal, 1) + signal.Notify(onShutdown, syscall.SIGINT, syscall.SIGTERM, syscall.SIGABRT) + + // HTTP server + mux := http.NewServeMux() + srv := &http.Server{ + Addr: cfg.Server.ListenAddress, + Handler: mux, + } + + // health endpoints + health := c.HealthCheckHandler(ctx) + mux.HandleFunc("/health/live", health.LiveEndpoint) + mux.HandleFunc("/health/ready", health.ReadyEndpoint) + + // metrics endpoint + if cfg.Server.Metrics.Enabled { + mux.HandleFunc("/metrics", c.MetricsHandler) + } + + // pprof/debug endpoints + if cfg.Server.EnablePprof { + mux.HandleFunc("/debug/pprof/", pprof.Index) + mux.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline) + mux.HandleFunc("/debug/pprof/profile", pprof.Profile) + mux.HandleFunc("/debug/pprof/symbol", pprof.Symbol) + mux.HandleFunc("/debug/pprof/trace", pprof.Trace) + } + + // webhook endpoints + if cfg.Server.Webhook.Enabled { + mux.HandleFunc("/webhook", c.WebhookHandler) + } + + go func() { + if err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed { + log.WithContext(ctx). + WithError(err). + Fatal() + } + }() + + log.WithFields( + log.Fields{ + "listen-address": cfg.Server.ListenAddress, + "pprof-endpoint-enabled": cfg.Server.EnablePprof, + "metrics-endpoint-enabled": cfg.Server.Metrics.Enabled, + "webhook-endpoint-enabled": cfg.Server.Webhook.Enabled, + "openmetrics-encoding-enabled": cfg.Server.Metrics.EnableOpenmetricsEncoding, + "controller-uuid": c.UUID, + }, + ).Info("http server started") + + <-onShutdown + log.Info("received signal, attempting to gracefully exit..") + ctxCancel() + + httpServerContext, forceHTTPServerShutdown := context.WithTimeout(context.Background(), 5*time.Second) + defer forceHTTPServerShutdown() + + if err := srv.Shutdown(httpServerContext); err != nil { + return 1, err + } + + log.Info("stopped!") + return 0, nil } diff --git a/internal/cmd/run_test.go b/internal/cmd/run_test.go index 93e49cf9..8856ddf9 100644 --- a/internal/cmd/run_test.go +++ b/internal/cmd/run_test.go @@ -6,17 +6,19 @@ import ( "github.com/stretchr/testify/assert" ) -func TestRunWrongLogLevel(t *testing.T) { - ctx, flags := NewTestContext() - flags.String("log-format", "foo", "") - exitCode, err := Run(ctx) - assert.Equal(t, 1, exitCode) - assert.Error(t, err) -} +// func TestRunWrongLogLevel(t *testing.T) { +// ctx, flags := NewTestContext() +// flags.String("log-format", "foo", "") +// exitCode, err := Run(ctx) +// assert.Equal(t, 1, exitCode) +// assert.Error(t, err) +// } func TestRunInvalidConfigFile(t *testing.T) { ctx, flags := NewTestContext() + flags.String("config", "path_does_not_exist", "") + exitCode, err := Run(ctx) assert.Equal(t, 1, exitCode) assert.Error(t, err) diff --git a/internal/cmd/utils.go b/internal/cmd/utils.go index 50e86f59..7603f5a6 100644 --- a/internal/cmd/utils.go +++ b/internal/cmd/utils.go @@ -3,96 +3,85 @@ package cmd import ( "fmt" stdlibLog "log" + "net/url" + "os" "time" - "github.com/go-redis/redis/v8" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/exporter" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/mvisonneau/go-helpers/logger" - "github.com/pkg/errors" - "github.com/vmihailenco/taskq/v3" - + "github.com/go-logr/stdr" log "github.com/sirupsen/logrus" + "github.com/uptrace/opentelemetry-go-extra/otellogrus" "github.com/urfave/cli/v2" + "github.com/vmihailenco/taskq/v4" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/go-helpers/logger" ) var start time.Time -func configure(ctx *cli.Context) (err error) { +func configure(ctx *cli.Context) (cfg config.Config, err error) { start = ctx.App.Metadata["startTime"].(time.Time) - // Configure logger - if err = logger.Configure(logger.Config{ - Level: ctx.String("log-level"), - Format: ctx.String("log-format"), - }); err != nil { + assertStringVariableDefined(ctx, "config") + + cfg, err = config.ParseFile(ctx.String("config")) + if err != nil { return } - // This hack is to embed taskq logs with logrus - taskq.SetLogger(stdlibLog.New(log.StandardLogger().WriterLevel(log.WarnLevel), "taskq", 0)) - - // Initialize config - var cfg schemas.Config - if cfg, err = schemas.ParseConfigFile(ctx.String("config")); err != nil { + cfg.Global, err = parseGlobalFlags(ctx) + if err != nil { return } - if len(ctx.String("gitlab-token")) > 0 { - cfg.Gitlab.Token = ctx.String("gitlab-token") - } + configCliOverrides(ctx, &cfg) - if len(cfg.Gitlab.Token) == 0 { - return fmt.Errorf("--gitlab-token' must be defined") - } - - if cfg.Server.Webhook.Enabled { - if len(ctx.String("webhook-secret-token")) > 0 { - cfg.Server.Webhook.SecretToken = ctx.String("webhook-secret-token") - } - if len(cfg.Server.Webhook.SecretToken) == 0 { - return fmt.Errorf("--webhook-secret-token' must be defined") - } + if err = cfg.Validate(); err != nil { + return } - schemas.UpdateProjectDefaults(cfg.ProjectDefaults) - - if len(ctx.String("redis-url")) > 0 { - cfg.Redis.URL = ctx.String("redis-url") + // Configure logger + if err = logger.Configure(logger.Config{ + Level: cfg.Log.Level, + Format: cfg.Log.Format, + }); err != nil { + return } - if len(cfg.Redis.URL) > 0 { - log.Info("redis url configured, initializing connection..") - var opt *redis.Options - if opt, err = redis.ParseURL(cfg.Redis.URL); err != nil { - return errors.Wrap(err, "parsing redis-url") - } + log.AddHook(otellogrus.NewHook(otellogrus.WithLevels( + log.PanicLevel, + log.FatalLevel, + log.ErrorLevel, + log.WarnLevel, + ))) - if err = exporter.ConfigureRedisClient(redis.NewClient(opt)); err != nil { - return - } - } - - if err = exporter.Configure(cfg, ctx.App.Version); err != nil { - return - } + // This hack is to embed taskq logs with logrus + taskq.SetLogger(stdr.New(stdlibLog.New(log.StandardLogger().WriterLevel(log.WarnLevel), "taskq", 0))) log.WithFields( log.Fields{ - "gitlab-endpoint": cfg.Gitlab.URL, - "pull-rate-limit": fmt.Sprintf("%drps", cfg.Pull.MaximumGitLabAPIRequestsPerSecond), + "gitlab-endpoint": cfg.Gitlab.URL, + "gitlab-rate-limit": fmt.Sprintf("%drps", cfg.Gitlab.MaximumRequestsPerSecond), }, - ).Info("exporter configured") + ).Info("configured") + + log.WithFields(config.SchedulerConfig(cfg.Pull.ProjectsFromWildcards).Log()).Info("pull projects from wildcards") + log.WithFields(config.SchedulerConfig(cfg.Pull.EnvironmentsFromProjects).Log()).Info("pull environments from projects") + log.WithFields(config.SchedulerConfig(cfg.Pull.RefsFromProjects).Log()).Info("pull refs from projects") + log.WithFields(config.SchedulerConfig(cfg.Pull.Metrics).Log()).Info("pull metrics") + + log.WithFields(config.SchedulerConfig(cfg.GarbageCollect.Projects).Log()).Info("garbage collect projects") + log.WithFields(config.SchedulerConfig(cfg.GarbageCollect.Environments).Log()).Info("garbage collect environments") + log.WithFields(config.SchedulerConfig(cfg.GarbageCollect.Refs).Log()).Info("garbage collect refs") + log.WithFields(config.SchedulerConfig(cfg.GarbageCollect.Metrics).Log()).Info("garbage collect metrics") - log.WithFields(cfg.Pull.ProjectsFromWildcards.Log()).Info("pull projects from wildcards") - log.WithFields(cfg.Pull.EnvironmentsFromProjects.Log()).Info("pull environments from projects") - log.WithFields(cfg.Pull.RefsFromProjects.Log()).Info("pull refs from projects") - log.WithFields(cfg.Pull.Metrics.Log()).Info("pull metrics") + return +} - log.WithFields(cfg.GarbageCollect.Projects.Log()).Info("garbage collect projects") - log.WithFields(cfg.GarbageCollect.Environments.Log()).Info("garbage collect environments") - log.WithFields(cfg.GarbageCollect.Refs.Log()).Info("garbage collect refs") - log.WithFields(cfg.GarbageCollect.Metrics.Log()).Info("garbage collect metrics") +func parseGlobalFlags(ctx *cli.Context) (cfg config.Global, err error) { + if listenerAddr := ctx.String("internal-monitoring-listener-address"); listenerAddr != "" { + cfg.InternalMonitoringListenerAddress, err = url.Parse(listenerAddr) + } return } @@ -105,15 +94,45 @@ func exit(exitCode int, err error) cli.ExitCoder { ).Debug("exited..") if err != nil { - log.Error(err.Error()) + log.WithError(err).Error() } - return cli.NewExitError("", exitCode) + return cli.Exit("", exitCode) } -// ExecWrapper gracefully logs and exits our `run` functions +// ExecWrapper gracefully logs and exits our `run` functions. func ExecWrapper(f func(ctx *cli.Context) (int, error)) cli.ActionFunc { return func(ctx *cli.Context) error { return exit(f(ctx)) } } + +func configCliOverrides(ctx *cli.Context, cfg *config.Config) { + if ctx.String("gitlab-token") != "" { + cfg.Gitlab.Token = ctx.String("gitlab-token") + } + + if cfg.Server.Webhook.Enabled { + if ctx.String("webhook-secret-token") != "" { + cfg.Server.Webhook.SecretToken = ctx.String("webhook-secret-token") + } + } + + if ctx.String("redis-url") != "" { + cfg.Redis.URL = ctx.String("redis-url") + } + + if healthURL := ctx.String("gitlab-health-url"); healthURL != "" { + cfg.Gitlab.HealthURL = healthURL + cfg.Gitlab.EnableHealthCheck = true + } +} + +func assertStringVariableDefined(ctx *cli.Context, k string) { + if len(ctx.String(k)) == 0 { + _ = cli.ShowAppHelp(ctx) + + log.Errorf("'--%s' must be set!", k) + os.Exit(2) + } +} diff --git a/internal/cmd/utils_test.go b/internal/cmd/utils_test.go index 66696fb2..9fc81d1e 100644 --- a/internal/cmd/utils_test.go +++ b/internal/cmd/utils_test.go @@ -10,6 +10,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/urfave/cli/v2" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" ) func NewTestContext() (ctx *cli.Context, flags *flag.FlagSet) { @@ -27,10 +29,19 @@ func NewTestContext() (ctx *cli.Context, flags *flag.FlagSet) { } func TestConfigure(t *testing.T) { - f, err := ioutil.TempFile("/tmp", "test-") + var ( + cfg config.Config + err error + ) + + f, err := ioutil.TempFile(".", "test-*.yml") assert.NoError(t, err) + defer os.Remove(f.Name()) + // Webhook endpoint enabled + ioutil.WriteFile(f.Name(), []byte(`wildcards: [{}]`), 0o644) + ctx, flags := NewTestContext() flags.String("log-format", "text", "") flags.String("log-level", "debug", "") @@ -38,48 +49,50 @@ func TestConfigure(t *testing.T) { // Undefined gitlab-token flags.String("gitlab-token", "", "") - assert.Error(t, configure(ctx)) + + _, err = configure(ctx) + assert.Error(t, err) // Valid configuration flags.Set("gitlab-token", "secret") - assert.NoError(t, configure(ctx)) + + cfg, err = configure(ctx) + assert.NoError(t, err) + assert.Equal(t, "secret", cfg.Gitlab.Token) // Invalid config file syntax ioutil.WriteFile(f.Name(), []byte("["), 0o644) - assert.Error(t, configure(ctx)) + + cfg, err = configure(ctx) + assert.Error(t, err) // Webhook endpoint enabled ioutil.WriteFile(f.Name(), []byte(` +wildcards: [{}] server: webhook: enabled: true `), 0o644) // No secret token defined for the webhook endpoint - assert.Error(t, configure(ctx)) + cfg, err = configure(ctx) + assert.Error(t, err) // Defining the webhook secret token flags.String("webhook-secret-token", "secret", "") - assert.NoError(t, configure(ctx)) - - // Invalid redis-url - flags.String("redis-url", "[", "") - assert.Error(t, configure(ctx)) - - // Valid redis-url with unreachable server - flags.Set("redis-url", "redis://localhost:6379") - assert.Error(t, configure(ctx)) - - // Valid redis-url with reachable server - // TODO: Figure out how to make it work without failing other tests by timing out - // s, err := miniredis.Run() - // if err != nil { - // panic(err) - // } - // defer s.Close() - - // flags.Set("redis-url", fmt.Sprintf("redis://%s", s.Addr())) - // assert.NoError(t, configure(ctx)) + + cfg, err = configure(ctx) + assert.NoError(t, err) + assert.Equal(t, "secret", cfg.Server.Webhook.SecretToken) + + // Test health url flag + healthURL := "https://gitlab.com/-/readiness?token" + flags.String("gitlab-health-url", healthURL, "") + + cfg, err = configure(ctx) + assert.NoError(t, err) + assert.Equal(t, cfg.Gitlab.HealthURL, healthURL) + assert.True(t, cfg.Gitlab.EnableHealthCheck) } func TestExit(t *testing.T) { diff --git a/pkg/config/config.go b/pkg/config/config.go new file mode 100644 index 00000000..274ef694 --- /dev/null +++ b/pkg/config/config.go @@ -0,0 +1,324 @@ +package config + +import ( + "fmt" + + "github.com/creasty/defaults" + "github.com/go-playground/validator/v10" + log "github.com/sirupsen/logrus" + "gopkg.in/yaml.v3" +) + +var validate *validator.Validate + +// Config represents all the parameters required for the app to be configured properly. +type Config struct { + // Global .. + Global Global `yaml:",omitempty"` + + // Log configuration for the exporter + Log Log `yaml:"log"` + + // OpenTelemetry configuration + OpenTelemetry OpenTelemetry `yaml:"opentelemetry"` + + // Server related configuration + Server Server `yaml:"server"` + + // GitLab related configuration + Gitlab Gitlab `yaml:"gitlab"` + + // Redis related configuration + Redis Redis `yaml:"redis"` + + // Pull configuration + Pull Pull `yaml:"pull"` + + // GarbageCollect configuration + GarbageCollect GarbageCollect `yaml:"garbage_collect"` + + // Default parameters which can be overridden at either the Project or Wildcard level + ProjectDefaults ProjectParameters `yaml:"project_defaults"` + + // List of projects to pull + Projects []Project `validate:"unique,at-least-1-project-or-wildcard,dive" yaml:"projects"` + + // List of wildcards to search projects from + Wildcards []Wildcard `validate:"unique,at-least-1-project-or-wildcard,dive" yaml:"wildcards"` +} + +// Log holds runtime logging configuration. +type Log struct { + // Log level + Level string `default:"info" validate:"required,oneof=trace debug info warning error fatal panic"` + + // Log format + Format string `default:"text" validate:"oneof=text json"` +} + +// OpenTelemetry related configuration. +type OpenTelemetry struct { + // gRPC endpoint of the opentelemetry collector + GRPCEndpoint string `yaml:"grpc_endpoint"` +} + +// Server .. +type Server struct { + // Enable profiling pages + EnablePprof bool `default:"false" yaml:"enable_pprof"` + + // [address:port] to make the process listen upon + ListenAddress string `default:":8080" yaml:"listen_address"` + + Metrics ServerMetrics `yaml:"metrics"` + Webhook ServerWebhook `yaml:"webhook"` +} + +// ServerMetrics .. +type ServerMetrics struct { + // Enable /metrics endpoint + Enabled bool `default:"true" yaml:"enabled"` + + // Enable OpenMetrics content encoding in prometheus HTTP handler + EnableOpenmetricsEncoding bool `default:"false" yaml:"enable_openmetrics_encoding"` +} + +// ServerWebhook .. +type ServerWebhook struct { + // Enable /webhook endpoint to support GitLab requests + Enabled bool `default:"false" yaml:"enabled"` + + // Secret token to authenticate legitimate webhook requests coming from the GitLab server + SecretToken string `validate:"required_if=Enabled true" yaml:"secret_token"` +} + +// Gitlab .. +type Gitlab struct { + // The URL of the GitLab server/api + URL string `default:"https://gitlab.com" validate:"required,url" yaml:"url"` + + // Token to use to authenticate against the API + Token string `validate:"required" yaml:"token"` + + // The URL of the GitLab server/api health endpoint (default to /users/sign_in which is publicly available on gitlab.com) + HealthURL string `default:"https://gitlab.com/explore" validate:"required,url" yaml:"health_url"` + + // Whether to validate the service is reachable calling HealthURL + EnableHealthCheck bool `default:"true" yaml:"enable_health_check"` + + // Whether to skip TLS validation when querying HealthURL + EnableTLSVerify bool `default:"true" yaml:"enable_tls_verify"` + + // Maximum limit for the GitLab API requests/sec + MaximumRequestsPerSecond int `default:"1" validate:"gte=1" yaml:"maximum_requests_per_second"` + + // Burstable limit for the GitLab API requests/sec + BurstableRequestsPerSecond int `default:"5" validate:"gte=1" yaml:"burstable_requests_per_second"` + + // Maximum amount of jobs to keep queue, if this limit is reached + // newly created ones will get dropped. As a best practice you should not change this value. + // Workarounds to avoid hitting the limit are: + // - increase polling intervals + // - increase API rate limit + // - reduce the amount of projects, refs, environments or metrics you are looking into + // - leverage webhooks instead of polling schedules + // + MaximumJobsQueueSize int `default:"1000" validate:"gte=10" yaml:"maximum_jobs_queue_size"` +} + +// Redis .. +type Redis struct { + // URL used to connect onto the redis endpoint + // format: redis[s]://[:password@]host[:port][/db-number][?option=value]) + URL string `yaml:"url"` +} + +// Pull .. +type Pull struct { + // ProjectsFromWildcards configuration + ProjectsFromWildcards struct { + OnInit bool `default:"true" yaml:"on_init"` + Scheduled bool `default:"true" yaml:"scheduled"` + IntervalSeconds int `default:"1800" validate:"gte=1" yaml:"interval_seconds"` + } `yaml:"projects_from_wildcards"` + + // EnvironmentsFromProjects configuration + EnvironmentsFromProjects struct { + OnInit bool `default:"true" yaml:"on_init"` + Scheduled bool `default:"true" yaml:"scheduled"` + IntervalSeconds int `default:"1800" validate:"gte=1" yaml:"interval_seconds"` + } `yaml:"environments_from_projects"` + + // RefsFromProjects configuration + RefsFromProjects struct { + OnInit bool `default:"true" yaml:"on_init"` + Scheduled bool `default:"true" yaml:"scheduled"` + IntervalSeconds int `default:"300" validate:"gte=1" yaml:"interval_seconds"` + } `yaml:"refs_from_projects"` + + // Metrics configuration + Metrics struct { + OnInit bool `default:"true" yaml:"on_init"` + Scheduled bool `default:"true" yaml:"scheduled"` + IntervalSeconds int `default:"30" validate:"gte=1" yaml:"interval_seconds"` + } `yaml:"metrics"` +} + +// GarbageCollect .. +type GarbageCollect struct { + // Projects configuration + Projects struct { + OnInit bool `default:"false" yaml:"on_init"` + Scheduled bool `default:"true" yaml:"scheduled"` + IntervalSeconds int `default:"14400" validate:"gte=1" yaml:"interval_seconds"` + } `yaml:"projects"` + + // Environments configuration + Environments struct { + OnInit bool `default:"false" yaml:"on_init"` + Scheduled bool `default:"true" yaml:"scheduled"` + IntervalSeconds int `default:"14400" validate:"gte=1" yaml:"interval_seconds"` + } `yaml:"environments"` + + // Refs configuration + Refs struct { + OnInit bool `default:"false" yaml:"on_init"` + Scheduled bool `default:"true" yaml:"scheduled"` + IntervalSeconds int `default:"1800" validate:"gte=1" yaml:"interval_seconds"` + } `yaml:"refs"` + + // Metrics configuration + Metrics struct { + OnInit bool `default:"false" yaml:"on_init"` + Scheduled bool `default:"true" yaml:"scheduled"` + IntervalSeconds int `default:"600" validate:"gte=1" yaml:"interval_seconds"` + } `yaml:"metrics"` +} + +// UnmarshalYAML allows us to correctly hydrate our configuration using some custom logic. +func (c *Config) UnmarshalYAML(v *yaml.Node) (err error) { + type localConfig struct { + Log Log `yaml:"log"` + OpenTelemetry OpenTelemetry `yaml:"opentelemetry"` + Server Server `yaml:"server"` + Gitlab Gitlab `yaml:"gitlab"` + Redis Redis `yaml:"redis"` + Pull Pull `yaml:"pull"` + GarbageCollect GarbageCollect `yaml:"garbage_collect"` + ProjectDefaults ProjectParameters `yaml:"project_defaults"` + + Projects []yaml.Node `yaml:"projects"` + Wildcards []yaml.Node `yaml:"wildcards"` + } + + _cfg := localConfig{} + defaults.MustSet(&_cfg) + + if err = v.Decode(&_cfg); err != nil { + return + } + + c.Log = _cfg.Log + c.OpenTelemetry = _cfg.OpenTelemetry + c.Server = _cfg.Server + c.Gitlab = _cfg.Gitlab + c.Redis = _cfg.Redis + c.Pull = _cfg.Pull + c.GarbageCollect = _cfg.GarbageCollect + c.ProjectDefaults = _cfg.ProjectDefaults + + for _, n := range _cfg.Projects { + p := c.NewProject() + if err = n.Decode(&p); err != nil { + return + } + + c.Projects = append(c.Projects, p) + } + + for _, n := range _cfg.Wildcards { + w := c.NewWildcard() + if err = n.Decode(&w); err != nil { + return + } + + c.Wildcards = append(c.Wildcards, w) + } + + return +} + +// ToYAML .. +func (c Config) ToYAML() string { + c.Global = Global{} + c.Server.Webhook.SecretToken = "*******" + c.Gitlab.Token = "*******" + + b, err := yaml.Marshal(c) + if err != nil { + panic(err) + } + + return string(b) +} + +// Validate will throw an error if the Config parameters are whether incomplete or incorrects. +func (c Config) Validate() error { + if validate == nil { + validate = validator.New() + _ = validate.RegisterValidation("at-least-1-project-or-wildcard", ValidateAtLeastOneProjectOrWildcard) + } + + return validate.Struct(c) +} + +// SchedulerConfig .. +type SchedulerConfig struct { + OnInit bool + Scheduled bool + IntervalSeconds int +} + +// Log returns some logging fields to showcase the configuration to the enduser. +func (sc SchedulerConfig) Log() log.Fields { + onInit, scheduled := "no", "no" + if sc.OnInit { + onInit = "yes" + } + + if sc.Scheduled { + scheduled = fmt.Sprintf("every %vs", sc.IntervalSeconds) + } + + return log.Fields{ + "on-init": onInit, + "scheduled": scheduled, + } +} + +// ValidateAtLeastOneProjectOrWildcard implements validator.Func +// assess that we have at least one projet or wildcard configured. +func ValidateAtLeastOneProjectOrWildcard(v validator.FieldLevel) bool { + return v.Parent().FieldByName("Projects").Len() > 0 || v.Parent().FieldByName("Wildcards").Len() > 0 +} + +// New returns a new config with the default parameters. +func New() (c Config) { + defaults.MustSet(&c) + + return +} + +// NewProject returns a new project with the config default parameters. +func (c Config) NewProject() (p Project) { + p.ProjectParameters = c.ProjectDefaults + + return +} + +// NewWildcard returns a new wildcard with the config default parameters. +func (c Config) NewWildcard() (w Wildcard) { + w.ProjectParameters = c.ProjectDefaults + + return +} diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go new file mode 100644 index 00000000..cb15964a --- /dev/null +++ b/pkg/config/config_test.go @@ -0,0 +1,98 @@ +package config + +import ( + "testing" + + log "github.com/sirupsen/logrus" + "github.com/stretchr/testify/assert" +) + +func TestNew(t *testing.T) { + c := Config{} + + c.Log.Level = "info" + c.Log.Format = "text" + + c.OpenTelemetry.GRPCEndpoint = "" + + c.Server.ListenAddress = ":8080" + c.Server.Metrics.Enabled = true + + c.Gitlab.URL = "https://gitlab.com" + c.Gitlab.HealthURL = "https://gitlab.com/explore" + c.Gitlab.EnableHealthCheck = true + c.Gitlab.EnableTLSVerify = true + c.Gitlab.MaximumRequestsPerSecond = 1 + c.Gitlab.BurstableRequestsPerSecond = 5 + c.Gitlab.MaximumJobsQueueSize = 1000 + + c.Pull.ProjectsFromWildcards.OnInit = true + c.Pull.ProjectsFromWildcards.Scheduled = true + c.Pull.ProjectsFromWildcards.IntervalSeconds = 1800 + + c.Pull.EnvironmentsFromProjects.OnInit = true + c.Pull.EnvironmentsFromProjects.Scheduled = true + c.Pull.EnvironmentsFromProjects.IntervalSeconds = 1800 + + c.Pull.RefsFromProjects.OnInit = true + c.Pull.RefsFromProjects.Scheduled = true + c.Pull.RefsFromProjects.IntervalSeconds = 300 + + c.Pull.Metrics.OnInit = true + c.Pull.Metrics.Scheduled = true + c.Pull.Metrics.IntervalSeconds = 30 + + c.GarbageCollect.Projects.Scheduled = true + c.GarbageCollect.Projects.IntervalSeconds = 14400 + + c.GarbageCollect.Environments.Scheduled = true + c.GarbageCollect.Environments.IntervalSeconds = 14400 + + c.GarbageCollect.Refs.Scheduled = true + c.GarbageCollect.Refs.IntervalSeconds = 1800 + + c.GarbageCollect.Metrics.Scheduled = true + c.GarbageCollect.Metrics.IntervalSeconds = 600 + + c.ProjectDefaults.OutputSparseStatusMetrics = true + + c.ProjectDefaults.Pull.Environments.Regexp = `.*` + c.ProjectDefaults.Pull.Environments.ExcludeStopped = true + + c.ProjectDefaults.Pull.Refs.Branches.Enabled = true + c.ProjectDefaults.Pull.Refs.Branches.Regexp = `^(?:main|master)$` + c.ProjectDefaults.Pull.Refs.Branches.ExcludeDeleted = true + + c.ProjectDefaults.Pull.Refs.Tags.Enabled = true + c.ProjectDefaults.Pull.Refs.Tags.Regexp = `.*` + c.ProjectDefaults.Pull.Refs.Tags.ExcludeDeleted = true + + c.ProjectDefaults.Pull.Pipeline.Jobs.FromChildPipelines.Enabled = true + c.ProjectDefaults.Pull.Pipeline.Jobs.RunnerDescription.Enabled = true + c.ProjectDefaults.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexp = `shared-runners-manager-(\d*)\.gitlab\.com` + c.ProjectDefaults.Pull.Pipeline.Variables.Regexp = `.*` + + assert.Equal(t, c, New()) +} + +func TestValidConfig(t *testing.T) { + cfg := New() + + cfg.Gitlab.Token = "foo" + cfg.Projects = append(cfg.Projects, NewProject("bar")) + + assert.NoError(t, cfg.Validate()) +} + +func TestSchedulerConfigLog(t *testing.T) { + sc := SchedulerConfig{ + OnInit: true, + Scheduled: true, + IntervalSeconds: 300, + } + + assert.Equal(t, log.Fields{ + "on-init": "yes", + "scheduled": "every 300s", + }, sc.Log()) +} diff --git a/pkg/config/global.go b/pkg/config/global.go new file mode 100644 index 00000000..93666f03 --- /dev/null +++ b/pkg/config/global.go @@ -0,0 +1,12 @@ +package config + +import ( + "net/url" +) + +// Global is used for globally shared exporter config. +type Global struct { + // InternalMonitoringListenerAddress can be used to access + // some metrics related to the exporter internals + InternalMonitoringListenerAddress *url.URL +} diff --git a/pkg/config/parser.go b/pkg/config/parser.go new file mode 100644 index 00000000..9811694a --- /dev/null +++ b/pkg/config/parser.go @@ -0,0 +1,72 @@ +package config + +import ( + "fmt" + "io/ioutil" + "path/filepath" + + "gopkg.in/yaml.v3" +) + +// Format represents the format of the config file. +type Format uint8 + +const ( + // FormatYAML represents a Config written in yaml format. + FormatYAML Format = iota +) + +// ParseFile reads the content of a file and attempt to unmarshal it +// into a Config. +func ParseFile(filename string) (c Config, err error) { + var ( + t Format + fileBytes []byte + ) + + // Figure out what type of config file we provided + t, err = GetTypeFromFileExtension(filename) + if err != nil { + return + } + + // Read the content of the config file + fileBytes, err = ioutil.ReadFile(filepath.Clean(filename)) + if err != nil { + return + } + + // Parse the content and return Config + return Parse(t, fileBytes) +} + +// Parse unmarshal provided bytes with given ConfigType into a Config object. +func Parse(f Format, bytes []byte) (cfg Config, err error) { + switch f { + case FormatYAML: + err = yaml.Unmarshal(bytes, &cfg) + default: + err = fmt.Errorf("unsupported config type '%+v'", f) + } + + // hack: automatically update the cfg.GitLab.HealthURL for self-hosted GitLab + if cfg.Gitlab.URL != "https://gitlab.com" && + cfg.Gitlab.HealthURL == "https://gitlab.com/explore" { + cfg.Gitlab.HealthURL = fmt.Sprintf("%s/-/health", cfg.Gitlab.URL) + } + + return +} + +// GetTypeFromFileExtension returns the ConfigType based upon the extension of +// the file. +func GetTypeFromFileExtension(filename string) (f Format, err error) { + switch ext := filepath.Ext(filename); ext { + case ".yml", ".yaml": + f = FormatYAML + default: + err = fmt.Errorf("unsupported config type '%s', expected .y(a)ml", ext) + } + + return +} diff --git a/pkg/config/parser_test.go b/pkg/config/parser_test.go new file mode 100644 index 00000000..e4af5c6f --- /dev/null +++ b/pkg/config/parser_test.go @@ -0,0 +1,275 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseFileInvalidPath(t *testing.T) { + cfg, err := ParseFile("/path_do_not_exist.yml") + assert.Error(t, err) + assert.Equal(t, Config{}, cfg) +} + +func TestParseInvalidYaml(t *testing.T) { + cfg, err := Parse(FormatYAML, []byte("invalid_yaml")) + assert.Error(t, err) + assert.Equal(t, Config{}, cfg) +} + +func TestParseValidYaml(t *testing.T) { + yamlConfig := ` +--- +log: + level: trace + format: json + +opentelemetry: + grpc_endpoint: otlp-collector:4317 + +server: + enable_pprof: true + listen_address: :1025 + + metrics: + enabled: false + enable_openmetrics_encoding: false + + webhook: + enabled: true + secret_token: secret + +gitlab: + url: https://gitlab.example.com + token: xrN14n9-ywvAFxxxxxx + health_url: https://gitlab.example.com/-/health + enable_health_check: false + enable_tls_verify: false + maximum_requests_per_second: 2 + +redis: + url: redis://popopo:1337 + +pull: + projects_from_wildcards: + on_init: false + scheduled: false + interval_seconds: 1 + environments_from_projects: + on_init: false + scheduled: false + interval_seconds: 2 + refs_from_projects: + on_init: false + scheduled: false + interval_seconds: 3 + metrics: + on_init: false + scheduled: false + interval_seconds: 4 + +garbage_collect: + projects: + on_init: true + scheduled: false + interval_seconds: 1 + environments: + on_init: true + scheduled: false + interval_seconds: 2 + refs: + on_init: true + scheduled: false + interval_seconds: 3 + metrics: + on_init: true + scheduled: false + interval_seconds: 4 + +project_defaults: + output_sparse_status_metrics: false + pull: + environments: + enabled: true + regexp: "^baz$" + refs: + branches: + enabled: false + regexp: "^foo$" + most_recent: 1 + max_age_seconds: 1 + exclude_deleted: false + tags: + enabled: false + regexp: "^bar$" + most_recent: 2 + max_age_seconds: 2 + exclude_deleted: false + merge_requests: + enabled: true + most_recent: 3 + max_age_seconds: 3 + pipeline: + jobs: + enabled: true + variables: + enabled: true + regexp: "^CI_" + +projects: + - name: foo/project + - name: bar/project + pull: + environments: + enabled: false + regexp: "^foo$" + refs: + branches: + regexp: "^foo$" + max_age_seconds: 2 + +wildcards: + - owner: + name: foo + kind: group + search: 'bar' + archived: true + pull: + environments: + enabled: false + regexp: "^foo$" + refs: + branches: + regexp: "^yolo$" + max_age_seconds: 4 +` + + cfg, err := Parse(FormatYAML, []byte(yamlConfig)) + assert.NoError(t, err) + + xcfg := New() + + xcfg.Log.Level = "trace" + xcfg.Log.Format = "json" + + xcfg.OpenTelemetry.GRPCEndpoint = "otlp-collector:4317" + + xcfg.Server.EnablePprof = true + xcfg.Server.ListenAddress = ":1025" + xcfg.Server.Metrics.Enabled = false + xcfg.Server.Metrics.EnableOpenmetricsEncoding = false + + xcfg.Server.Webhook.Enabled = true + xcfg.Server.Webhook.SecretToken = "secret" + + xcfg.Gitlab.URL = "https://gitlab.example.com" + xcfg.Gitlab.HealthURL = "https://gitlab.example.com/-/health" + xcfg.Gitlab.Token = "xrN14n9-ywvAFxxxxxx" + xcfg.Gitlab.EnableHealthCheck = false + xcfg.Gitlab.EnableTLSVerify = false + xcfg.Gitlab.MaximumRequestsPerSecond = 2 + + xcfg.Redis.URL = "redis://popopo:1337" + + xcfg.Pull.ProjectsFromWildcards.OnInit = false + xcfg.Pull.ProjectsFromWildcards.Scheduled = false + xcfg.Pull.ProjectsFromWildcards.IntervalSeconds = 1 + + xcfg.Pull.EnvironmentsFromProjects.OnInit = false + xcfg.Pull.EnvironmentsFromProjects.Scheduled = false + xcfg.Pull.EnvironmentsFromProjects.IntervalSeconds = 2 + + xcfg.Pull.RefsFromProjects.OnInit = false + xcfg.Pull.RefsFromProjects.Scheduled = false + xcfg.Pull.RefsFromProjects.IntervalSeconds = 3 + + xcfg.Pull.Metrics.OnInit = false + xcfg.Pull.Metrics.Scheduled = false + xcfg.Pull.Metrics.IntervalSeconds = 4 + + xcfg.GarbageCollect.Projects.OnInit = true + xcfg.GarbageCollect.Projects.Scheduled = false + xcfg.GarbageCollect.Projects.IntervalSeconds = 1 + + xcfg.GarbageCollect.Environments.OnInit = true + xcfg.GarbageCollect.Environments.Scheduled = false + xcfg.GarbageCollect.Environments.IntervalSeconds = 2 + + xcfg.GarbageCollect.Refs.OnInit = true + xcfg.GarbageCollect.Refs.Scheduled = false + xcfg.GarbageCollect.Refs.IntervalSeconds = 3 + + xcfg.GarbageCollect.Metrics.OnInit = true + xcfg.GarbageCollect.Metrics.Scheduled = false + xcfg.GarbageCollect.Metrics.IntervalSeconds = 4 + + xcfg.ProjectDefaults.OutputSparseStatusMetrics = false + + xcfg.ProjectDefaults.Pull.Environments.Enabled = true + xcfg.ProjectDefaults.Pull.Environments.Regexp = `^baz$` + + xcfg.ProjectDefaults.Pull.Refs.Branches.Enabled = false + xcfg.ProjectDefaults.Pull.Refs.Branches.Regexp = `^foo$` + xcfg.ProjectDefaults.Pull.Refs.Branches.MostRecent = 1 + xcfg.ProjectDefaults.Pull.Refs.Branches.MaxAgeSeconds = 1 + xcfg.ProjectDefaults.Pull.Refs.Branches.ExcludeDeleted = false + + xcfg.ProjectDefaults.Pull.Refs.Tags.Enabled = false + xcfg.ProjectDefaults.Pull.Refs.Tags.Regexp = `^bar$` + xcfg.ProjectDefaults.Pull.Refs.Tags.MostRecent = 2 + xcfg.ProjectDefaults.Pull.Refs.Tags.MaxAgeSeconds = 2 + xcfg.ProjectDefaults.Pull.Refs.Tags.ExcludeDeleted = false + + xcfg.ProjectDefaults.Pull.Refs.MergeRequests.Enabled = true + xcfg.ProjectDefaults.Pull.Refs.MergeRequests.MostRecent = 3 + xcfg.ProjectDefaults.Pull.Refs.MergeRequests.MaxAgeSeconds = 3 + + xcfg.ProjectDefaults.Pull.Pipeline.Jobs.Enabled = true + xcfg.ProjectDefaults.Pull.Pipeline.Variables.Enabled = true + xcfg.ProjectDefaults.Pull.Pipeline.Variables.Regexp = `^CI_` + + p1 := NewProject("foo/project") + p1.ProjectParameters = xcfg.ProjectDefaults + + p2 := NewProject("bar/project") + p2.ProjectParameters = xcfg.ProjectDefaults + + p2.Pull.Environments.Enabled = false + p2.Pull.Environments.Regexp = `^foo$` + p2.Pull.Refs.Branches.Regexp = `^foo$` + p2.Pull.Refs.Branches.MaxAgeSeconds = 2 + + xcfg.Projects = []Project{p1, p2} + + w1 := NewWildcard() + w1.ProjectParameters = xcfg.ProjectDefaults + w1.Search = "bar" + w1.Archived = true + w1.Owner.Name = "foo" + w1.Owner.Kind = "group" + w1.Pull.Environments.Enabled = false + w1.Pull.Environments.Regexp = `^foo$` + w1.Pull.Refs.Branches.Regexp = `^yolo$` + w1.Pull.Refs.Branches.MaxAgeSeconds = 4 + + xcfg.Wildcards = []Wildcard{w1} + + // Test variable assignments + assert.Equal(t, xcfg, cfg) +} + +func TestParseConfigSelfHostedGitLab(t *testing.T) { + yamlConfig := ` +--- +gitlab: + url: https://gitlab.example.com +` + cfg, err := Parse( + FormatYAML, + []byte(yamlConfig), + ) + + assert.NoError(t, err) + assert.Equal(t, "https://gitlab.example.com/-/health", cfg.Gitlab.HealthURL) +} diff --git a/pkg/config/project.go b/pkg/config/project.go new file mode 100644 index 00000000..c35ca4b0 --- /dev/null +++ b/pkg/config/project.go @@ -0,0 +1,179 @@ +package config + +import ( + "github.com/creasty/defaults" +) + +// ProjectParameters for the fetching configuration of Projects and Wildcards. +type ProjectParameters struct { + // From handles ProjectPullParameters configuration. + Pull ProjectPull `yaml:"pull"` + + // Whether or not to export all pipeline/job statuses (being 0) or solely the one of the last job (being 1). + OutputSparseStatusMetrics bool `default:"true" yaml:"output_sparse_status_metrics"` +} + +// ProjectPull .. +type ProjectPull struct { + Environments ProjectPullEnvironments `yaml:"environments"` + Refs ProjectPullRefs `yaml:"refs"` + Pipeline ProjectPullPipeline `yaml:"pipeline"` +} + +// ProjectPullEnvironments .. +type ProjectPullEnvironments struct { + // Whether to pull environments/deployments or not for this project + Enabled bool `default:"false" yaml:"enabled"` + + // Regular expression to filter environments to fetch by their names + Regexp string `default:".*" yaml:"regexp"` + + // Prevent exporting metrics for stopped environments + ExcludeStopped bool `default:"true" yaml:"exclude_stopped"` +} + +// ProjectPullRefs .. +type ProjectPullRefs struct { + // Configuration for pulling branches + Branches ProjectPullRefsBranches `yaml:"branches"` + + // Configuration for pulling tags + Tags ProjectPullRefsTags `yaml:"tags"` + + // Configuration for pulling merge requests + MergeRequests ProjectPullRefsMergeRequests `yaml:"merge_requests"` +} + +// ProjectPullRefsBranches .. +type ProjectPullRefsBranches struct { + // Monitor pipelines related to project branches + Enabled bool `default:"true" yaml:"enabled"` + + // Filter for branches to include + Regexp string `default:"^(?:main|master)$" yaml:"regexp"` + + // Only keep most 'n' recently updated branches + MostRecent uint `default:"0" yaml:"most_recent"` + + // If the most recent pipeline for the branch was last updated at + // time greater than this value the metrics won't be exported + MaxAgeSeconds uint `default:"0" yaml:"max_age_seconds"` + + // Prevent exporting metrics for deleted branches + ExcludeDeleted bool `default:"true" yaml:"exclude_deleted"` +} + +// ProjectPullRefsTags .. +type ProjectPullRefsTags struct { + // Monitor pipelines related to project tags. + Enabled bool `default:"true" yaml:"enabled"` + + // Filter for tags to include. + Regexp string `default:".*" yaml:"regexp"` + + // Only keep most 'n' recently updated tags. + MostRecent uint `default:"0" yaml:"most_recent"` + + // If the most recent pipeline for the tag was last updated at + // time greater than this value the metrics won't be exported. + MaxAgeSeconds uint `default:"0" yaml:"max_age_seconds"` + + // Prevent exporting metrics for deleted tags. + ExcludeDeleted bool `default:"true" yaml:"exclude_deleted"` +} + +// ProjectPullRefsMergeRequests .. +type ProjectPullRefsMergeRequests struct { + // Monitor pipelines related to project merge requests. + Enabled bool `yaml:"enabled"` + + // Only keep most 'n' recently updated merge requests. + MostRecent uint `default:"0" yaml:"most_recent"` + + // If the most recent pipeline for the merge request was last updated at + // time greater than this value the metrics won't be exported. + MaxAgeSeconds uint `default:"0" yaml:"max_age_seconds"` +} + +// ProjectPullPipeline .. +type ProjectPullPipeline struct { + Jobs ProjectPullPipelineJobs `yaml:"jobs"` + Variables ProjectPullPipelineVariables `yaml:"variables"` + TestReports ProjectPullPipelineTestReports `yaml:"test_reports"` +} + +// ProjectPullPipelineJobs .. +type ProjectPullPipelineJobs struct { + // Enabled set to true will pull pipeline jobs related metrics. + Enabled bool `default:"false" yaml:"enabled"` + + // Pull pipeline jobs from child/downstream pipelines. + FromChildPipelines ProjectPullPipelineJobsFromChildPipelines `yaml:"from_child_pipelines"` + + // Configure the export of the runner description which ran the job. + RunnerDescription ProjectPullPipelineJobsRunnerDescription `yaml:"runner_description"` +} + +// ProjectPullPipelineJobsFromChildPipelines .. +type ProjectPullPipelineJobsFromChildPipelines struct { + // Enabled set to true will pull pipeline jobs from child/downstream pipelines related metrics. + Enabled bool `default:"true" yaml:"enabled"` +} + +// ProjectPullPipelineJobsRunnerDescription .. +type ProjectPullPipelineJobsRunnerDescription struct { + // Enabled set to true will export the description of the runner which ran the job. + Enabled bool `default:"true" yaml:"enabled"` + + // Regular expression to be able to reduce the cardinality of the exported value when necessary. + AggregationRegexp string `default:"shared-runners-manager-(\\d*)\\.gitlab\\.com" yaml:"aggregation_regexp"` +} + +// ProjectPullPipelineVariables .. +type ProjectPullPipelineVariables struct { + // Enabled set to true will attempt to retrieve variables included in the pipeline. + Enabled bool `default:"false" yaml:"enabled"` + + // Regexp to filter pipeline variables values to fetch. + Regexp string `default:".*" yaml:"regexp"` +} + +// ProjectPullPipelineTestReports .. +type ProjectPullPipelineTestReports struct { + // Enabled set to true will attempt to retrieve the test report included in the pipeline. + Enabled bool `default:"false" yaml:"enabled"` + FromChildPipelines ProjectPullPipelineTestReportsFromChildPipelines `yaml:"from_child_pipelines"` + TestCases ProjectPullPipelineTestReportsTestCases `yaml:"test_cases"` +} + +// ProjectPullPipelineJobsFromChildPipelines .. +type ProjectPullPipelineTestReportsFromChildPipelines struct { + // Enabled set to true will pull pipeline jobs from child/downstream pipelines related metrics. + Enabled bool `default:"false" yaml:"enabled"` +} + +// ProjectPullPipelineTestCases .. +type ProjectPullPipelineTestReportsTestCases struct { + // Enabled set to true will attempt to retrieve the test report included in the pipeline. + Enabled bool `default:"false" yaml:"enabled"` +} + +// Project holds information about a GitLab project. +type Project struct { + // ProjectParameters holds parameters specific to this project. + ProjectParameters `yaml:",inline"` + + // Name is actually what is commonly referred as path_with_namespace on GitLab. + Name string `yaml:"name"` +} + +// Projects .. +type Projects []Project + +// NewProject returns a new project composed with the default parameters. +func NewProject(name string) (p Project) { + defaults.MustSet(&p) + p.Name = name + + return +} diff --git a/pkg/config/project_test.go b/pkg/config/project_test.go new file mode 100644 index 00000000..e4ef7bda --- /dev/null +++ b/pkg/config/project_test.go @@ -0,0 +1,33 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewProject(t *testing.T) { + p := Project{} + + p.Name = "foo/bar" + + p.OutputSparseStatusMetrics = true + + p.Pull.Environments.Regexp = `.*` + p.Pull.Environments.ExcludeStopped = true + + p.Pull.Refs.Branches.Enabled = true + p.Pull.Refs.Branches.Regexp = `^(?:main|master)$` + p.Pull.Refs.Branches.ExcludeDeleted = true + + p.Pull.Refs.Tags.Enabled = true + p.Pull.Refs.Tags.Regexp = `.*` + p.Pull.Refs.Tags.ExcludeDeleted = true + + p.Pull.Pipeline.Jobs.FromChildPipelines.Enabled = true + p.Pull.Pipeline.Jobs.RunnerDescription.Enabled = true + p.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexp = `shared-runners-manager-(\d*)\.gitlab\.com` + p.Pull.Pipeline.Variables.Regexp = `.*` + + assert.Equal(t, p, NewProject("foo/bar")) +} diff --git a/pkg/schemas/wildcard.go b/pkg/config/wildcard.go similarity index 67% rename from pkg/schemas/wildcard.go rename to pkg/config/wildcard.go index 112f1552..eb4c4d79 100644 --- a/pkg/schemas/wildcard.go +++ b/pkg/config/wildcard.go @@ -1,15 +1,13 @@ -package schemas +package config import ( - "fmt" - "hash/crc32" - "strconv" + "github.com/creasty/defaults" ) -// Wildcard is a specific handler to dynamically search projects +// Wildcard is a specific handler to dynamically search projects. type Wildcard struct { // ProjectParameters holds parameters specific to the projects which - // will be discovered using this wildcard + // will be discovered using this wildcard. ProjectParameters `yaml:",inline"` Search string `yaml:"search"` @@ -27,10 +25,9 @@ type WildcardOwner struct { // Wildcards .. type Wildcards []Wildcard -// WildcardKey .. -type WildcardKey string +// NewWildcard returns a new wildcard with the default parameters. +func NewWildcard() (w Wildcard) { + defaults.MustSet(&w) -// Key .. -func (w Wildcard) Key() WildcardKey { - return WildcardKey(strconv.Itoa(int(crc32.ChecksumIEEE([]byte(fmt.Sprintf("%v", w)))))) + return } diff --git a/pkg/config/wildcard_test.go b/pkg/config/wildcard_test.go new file mode 100644 index 00000000..5b0afc7b --- /dev/null +++ b/pkg/config/wildcard_test.go @@ -0,0 +1,31 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewWildcard(t *testing.T) { + w := Wildcard{} + + w.OutputSparseStatusMetrics = true + + w.Pull.Environments.Regexp = `.*` + w.Pull.Environments.ExcludeStopped = true + + w.Pull.Refs.Branches.Enabled = true + w.Pull.Refs.Branches.Regexp = `^(?:main|master)$` + w.Pull.Refs.Branches.ExcludeDeleted = true + + w.Pull.Refs.Tags.Enabled = true + w.Pull.Refs.Tags.Regexp = `.*` + w.Pull.Refs.Tags.ExcludeDeleted = true + + w.Pull.Pipeline.Jobs.FromChildPipelines.Enabled = true + w.Pull.Pipeline.Jobs.RunnerDescription.Enabled = true + w.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexp = `shared-runners-manager-(\d*)\.gitlab\.com` + w.Pull.Pipeline.Variables.Regexp = `.*` + + assert.Equal(t, w, NewWildcard()) +} diff --git a/pkg/controller/collectors.go b/pkg/controller/collectors.go new file mode 100644 index 00000000..a9265cec --- /dev/null +++ b/pkg/controller/collectors.go @@ -0,0 +1,509 @@ +package controller + +import "github.com/prometheus/client_golang/prometheus" + +var ( + defaultLabels = []string{"project", "topics", "kind", "ref", "source", "variables"} + jobLabels = []string{"stage", "job_name", "runner_description", "tag_list", "failure_reason"} + statusLabels = []string{"status"} + environmentLabels = []string{"project", "environment"} + environmentInformationLabels = []string{"environment_id", "external_url", "kind", "ref", "latest_commit_short_id", "current_commit_short_id", "available", "username"} + testSuiteLabels = []string{"test_suite_name"} + testCaseLabels = []string{"test_case_name", "test_case_classname"} + statusesList = [...]string{"created", "waiting_for_resource", "preparing", "pending", "running", "success", "failed", "canceled", "skipped", "manual", "scheduled", "error"} +) + +// NewInternalCollectorCurrentlyQueuedTasksCount returns a new collector for the gcpe_currently_queued_tasks_count metric. +func NewInternalCollectorCurrentlyQueuedTasksCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gcpe_currently_queued_tasks_count", + Help: "Number of tasks in the queue", + }, + []string{}, + ) +} + +// NewInternalCollectorEnvironmentsCount returns a new collector for the gcpe_environments_count metric. +func NewInternalCollectorEnvironmentsCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gcpe_environments_count", + Help: "Number of GitLab environments being exported", + }, + []string{}, + ) +} + +// NewInternalCollectorExecutedTasksCount returns a new collector for the gcpe_executed_tasks_count metric. +func NewInternalCollectorExecutedTasksCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gcpe_executed_tasks_count", + Help: "Number of tasks executed", + }, + []string{}, + ) +} + +// NewInternalCollectorGitLabAPIRequestsCount returns a new collector for the gcpe_gitlab_api_requests_count metric. +func NewInternalCollectorGitLabAPIRequestsCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gcpe_gitlab_api_requests_count", + Help: "GitLab API requests count", + }, + []string{}, + ) +} + +// NewInternalCollectorGitLabAPIRequestsRemaining returns a new collector for the gcpe_gitlab_api_requests_remaining metric. +func NewInternalCollectorGitLabAPIRequestsRemaining() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gcpe_gitlab_api_requests_remaining", + Help: "GitLab API requests remaining in the api limit", + }, + []string{}, + ) +} + +// NewInternalCollectorGitLabAPIRequestsLimit returns a new collector for the gcpe_gitlab_api_requests_limit metric. +func NewInternalCollectorGitLabAPIRequestsLimit() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gcpe_gitlab_api_requests_limit", + Help: "GitLab API requests available in the api limit", + }, + []string{}, + ) +} + +// NewInternalCollectorMetricsCount returns a new collector for the gcpe_metrics_count metric. +func NewInternalCollectorMetricsCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gcpe_metrics_count", + Help: "Number of GitLab pipelines metrics being exported", + }, + []string{}, + ) +} + +// NewInternalCollectorProjectsCount returns a new collector for the gcpe_projects_count metric. +func NewInternalCollectorProjectsCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gcpe_projects_count", + Help: "Number of GitLab projects being exported", + }, + []string{}, + ) +} + +// NewInternalCollectorRefsCount returns a new collector for the gcpe_refs_count metric. +func NewInternalCollectorRefsCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gcpe_refs_count", + Help: "Number of GitLab refs being exported", + }, + []string{}, + ) +} + +// NewCollectorCoverage returns a new collector for the gitlab_ci_pipeline_coverage metric. +func NewCollectorCoverage() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_coverage", + Help: "Coverage of the most recent pipeline", + }, + defaultLabels, + ) +} + +// NewCollectorDurationSeconds returns a new collector for the gitlab_ci_pipeline_duration_seconds metric. +func NewCollectorDurationSeconds() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_duration_seconds", + Help: "Duration in seconds of the most recent pipeline", + }, + defaultLabels, + ) +} + +// NewCollectorQueuedDurationSeconds returns a new collector for the gitlab_ci_pipeline_queued_duration_seconds metric. +func NewCollectorQueuedDurationSeconds() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_queued_duration_seconds", + Help: "Duration in seconds the most recent pipeline has been queued before starting", + }, + defaultLabels, + ) +} + +// NewCollectorEnvironmentBehindCommitsCount returns a new collector for the gitlab_ci_environment_behind_commits_count metric. +func NewCollectorEnvironmentBehindCommitsCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_environment_behind_commits_count", + Help: "Number of commits the environment is behind given its last deployment", + }, + environmentLabels, + ) +} + +// NewCollectorEnvironmentBehindDurationSeconds returns a new collector for the gitlab_ci_environment_behind_duration_seconds metric. +func NewCollectorEnvironmentBehindDurationSeconds() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_environment_behind_duration_seconds", + Help: "Duration in seconds the environment is behind the most recent commit given its last deployment", + }, + environmentLabels, + ) +} + +// NewCollectorEnvironmentDeploymentCount returns a new collector for the gitlab_ci_environment_deployment_count metric. +func NewCollectorEnvironmentDeploymentCount() prometheus.Collector { + return prometheus.NewCounterVec( + prometheus.CounterOpts{ + Name: "gitlab_ci_environment_deployment_count", + Help: "Number of deployments for an environment", + }, + environmentLabels, + ) +} + +// NewCollectorEnvironmentDeploymentDurationSeconds returns a new collector for the gitlab_ci_environment_deployment_duration_seconds metric. +func NewCollectorEnvironmentDeploymentDurationSeconds() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_environment_deployment_duration_seconds", + Help: "Duration in seconds of the most recent deployment of the environment", + }, + environmentLabels, + ) +} + +// NewCollectorEnvironmentDeploymentJobID returns a new collector for the gitlab_ci_environment_deployment_id metric. +func NewCollectorEnvironmentDeploymentJobID() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_environment_deployment_job_id", + Help: "ID of the most recent deployment job of the environment", + }, + environmentLabels, + ) +} + +// NewCollectorEnvironmentDeploymentStatus returns a new collector for the gitlab_ci_environment_deployment_status metric. +func NewCollectorEnvironmentDeploymentStatus() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_environment_deployment_status", + Help: "Status of the most recent deployment of the environment", + }, + append(environmentLabels, "status"), + ) +} + +// NewCollectorEnvironmentDeploymentTimestamp returns a new collector for the gitlab_ci_environment_deployment_timestamp metric. +func NewCollectorEnvironmentDeploymentTimestamp() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_environment_deployment_timestamp", + Help: "Creation date of the most recent deployment of the environment", + }, + environmentLabels, + ) +} + +// NewCollectorEnvironmentInformation returns a new collector for the gitlab_ci_environment_information metric. +func NewCollectorEnvironmentInformation() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_environment_information", + Help: "Information about the environment", + }, + append(environmentLabels, environmentInformationLabels...), + ) +} + +// NewCollectorID returns a new collector for the gitlab_ci_pipeline_id metric. +func NewCollectorID() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_id", + Help: "ID of the most recent pipeline", + }, + defaultLabels, + ) +} + +// NewCollectorJobArtifactSizeBytes returns a new collector for the gitlab_ci_pipeline_job_artifact_size_bytes metric. +func NewCollectorJobArtifactSizeBytes() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_job_artifact_size_bytes", + Help: "Artifact size in bytes (sum of all of them) of the most recent job", + }, + append(defaultLabels, jobLabels...), + ) +} + +// NewCollectorJobDurationSeconds returns a new collector for the gitlab_ci_pipeline_job_duration_seconds metric. +func NewCollectorJobDurationSeconds() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_job_duration_seconds", + Help: "Duration in seconds of the most recent job", + }, + append(defaultLabels, jobLabels...), + ) +} + +// NewCollectorJobID returns a new collector for the gitlab_ci_pipeline_job_id metric. +func NewCollectorJobID() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_job_id", + Help: "ID of the most recent job", + }, + append(defaultLabels, jobLabels...), + ) +} + +// NewCollectorJobQueuedDurationSeconds returns a new collector for the gitlab_ci_pipeline_job_queued_duration_seconds metric. +func NewCollectorJobQueuedDurationSeconds() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_job_queued_duration_seconds", + Help: "Duration in seconds the most recent job has been queued before starting", + }, + append(defaultLabels, jobLabels...), + ) +} + +// NewCollectorJobRunCount returns a new collector for the gitlab_ci_pipeline_job_run_count metric. +func NewCollectorJobRunCount() prometheus.Collector { + return prometheus.NewCounterVec( + prometheus.CounterOpts{ + Name: "gitlab_ci_pipeline_job_run_count", + Help: "Number of executions of a job", + }, + append(defaultLabels, jobLabels...), + ) +} + +// NewCollectorJobStatus returns a new collector for the gitlab_ci_pipeline_job_status metric. +func NewCollectorJobStatus() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_job_status", + Help: "Status of the most recent job", + }, + append(defaultLabels, append(jobLabels, statusLabels...)...), + ) +} + +// NewCollectorJobTimestamp returns a new collector for the gitlab_ci_pipeline_job_timestamp metric. +func NewCollectorJobTimestamp() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_job_timestamp", + Help: "Creation date timestamp of the most recent job", + }, + append(defaultLabels, jobLabels...), + ) +} + +// NewCollectorStatus returns a new collector for the gitlab_ci_pipeline_status metric. +func NewCollectorStatus() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_status", + Help: "Status of the most recent pipeline", + }, + append(defaultLabels, "status"), + ) +} + +// NewCollectorTimestamp returns a new collector for the gitlab_ci_pipeline_timestamp metric. +func NewCollectorTimestamp() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_timestamp", + Help: "Timestamp of the last update of the most recent pipeline", + }, + defaultLabels, + ) +} + +// NewCollectorRunCount returns a new collector for the gitlab_ci_pipeline_run_count metric. +func NewCollectorRunCount() prometheus.Collector { + return prometheus.NewCounterVec( + prometheus.CounterOpts{ + Name: "gitlab_ci_pipeline_run_count", + Help: "Number of executions of a pipeline", + }, + defaultLabels, + ) +} + +// NewCollectorTestReportTotalTime returns a new collector for the gitlab_ci_pipeline_test_report_total_time metric. +func NewCollectorTestReportTotalTime() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_test_report_total_time", + Help: "Duration in seconds of all the tests in the most recently finished pipeline", + }, + defaultLabels, + ) +} + +// NewCollectorTestReportTotalCount returns a new collector for the gitlab_ci_pipeline_test_report_total_count metric. +func NewCollectorTestReportTotalCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_test_report_total_count", + Help: "Number of total tests in the most recently finished pipeline", + }, + defaultLabels, + ) +} + +// NewCollectorTestReportSuccessCount returns a new collector for the gitlab_ci_pipeline_test_report_success_count metric. +func NewCollectorTestReportSuccessCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_test_report_success_count", + Help: "Number of successful tests in the most recently finished pipeline", + }, + defaultLabels, + ) +} + +// NewCollectorTestReportFailedCount returns a new collector for the gitlab_ci_pipeline_test_report_failed_count metric. +func NewCollectorTestReportFailedCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_test_report_failed_count", + Help: "Number of failed tests in the most recently finished pipeline", + }, + defaultLabels, + ) +} + +// NewCollectorTestReportSkippedCount returns a new collector for the gitlab_ci_pipeline_test_report_skipped_count metric. +func NewCollectorTestReportSkippedCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_test_report_skipped_count", + Help: "Number of skipped tests in the most recently finished pipeline", + }, + defaultLabels, + ) +} + +// NewCollectorTestReportErrorCount returns a new collector for the gitlab_ci_pipeline_test_report_error_count metric. +func NewCollectorTestReportErrorCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_test_report_error_count", + Help: "Number of errored tests in the most recently finished pipeline", + }, + defaultLabels, + ) +} + +// NewCollectorTestSuiteTotalTime returns a new collector for the gitlab_ci_pipeline_test_suite_total_time metric. +func NewCollectorTestSuiteTotalTime() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_test_suite_total_time", + Help: "Duration in seconds for the test suite", + }, + append(defaultLabels, testSuiteLabels...), + ) +} + +// NewCollectorTestSuiteTotalCount returns a new collector for the gitlab_ci_pipeline_test_suite_total_count metric. +func NewCollectorTestSuiteTotalCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_test_suite_total_count", + Help: "Number of total tests for the test suite", + }, + append(defaultLabels, testSuiteLabels...), + ) +} + +// NewCollectorTestSuiteSuccessCount returns a new collector for the gitlab_ci_pipeline_test_suite_success_count metric. +func NewCollectorTestSuiteSuccessCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_test_suite_success_count", + Help: "Number of successful tests for the test suite", + }, + append(defaultLabels, testSuiteLabels...), + ) +} + +// NewCollectorTestSuiteFailedCount returns a new collector for the gitlab_ci_pipeline_test_suite_failed_count metric. +func NewCollectorTestSuiteFailedCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_test_suite_failed_count", + Help: "Number of failed tests for the test suite", + }, + append(defaultLabels, testSuiteLabels...), + ) +} + +// NewCollectorTestSuiteSkippedCount returns a new collector for the gitlab_ci_pipeline_test_suite_skipped_count metric. +func NewCollectorTestSuiteSkippedCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_test_suite_skipped_count", + Help: "Number of skipped tests for the test suite", + }, + append(defaultLabels, testSuiteLabels...), + ) +} + +// NewCollectorTestSuiteErrorCount returns a new collector for the gitlab_ci_pipeline_test_suite_error_count metric. +func NewCollectorTestSuiteErrorCount() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_test_suite_error_count", + Help: "Number of errors for the test suite", + }, + append(defaultLabels, testSuiteLabels...), + ) +} + +// NewCollectorTestCaseExecutionTime returns a new collector for the gitlab_ci_pipeline_test_case_execution_time metric. +func NewCollectorTestCaseExecutionTime() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_test_case_execution_time", + Help: "Duration in seconds for the test case", + }, + append(defaultLabels, append(testSuiteLabels, testCaseLabels...)...), + ) +} + +// NewCollectorTestCaseStatus returns a new collector for the gitlab_ci_pipeline_test_case_status metric. +func NewCollectorTestCaseStatus() prometheus.Collector { + return prometheus.NewGaugeVec( + prometheus.GaugeOpts{ + Name: "gitlab_ci_pipeline_test_case_status", + Help: "Status of the test case in most recent job", + }, + append(defaultLabels, append(testSuiteLabels, append(testCaseLabels, statusLabels...)...)...), + ) +} diff --git a/pkg/exporter/collectors_test.go b/pkg/controller/collectors_test.go similarity index 67% rename from pkg/exporter/collectors_test.go rename to pkg/controller/collectors_test.go index ce101323..f7ff03c0 100644 --- a/pkg/exporter/collectors_test.go +++ b/pkg/controller/collectors_test.go @@ -1,4 +1,4 @@ -package exporter +package controller import ( "testing" @@ -8,7 +8,14 @@ import ( ) func TestNewCollectorFunctions(t *testing.T) { - for _, f := range [](func() prometheus.Collector){ + for _, f := range []func() prometheus.Collector{ + NewInternalCollectorCurrentlyQueuedTasksCount, + NewInternalCollectorEnvironmentsCount, + NewInternalCollectorExecutedTasksCount, + NewInternalCollectorGitLabAPIRequestsCount, + NewInternalCollectorMetricsCount, + NewInternalCollectorProjectsCount, + NewInternalCollectorRefsCount, NewCollectorCoverage, NewCollectorDurationSeconds, NewCollectorEnvironmentBehindCommitsCount, @@ -22,8 +29,10 @@ func TestNewCollectorFunctions(t *testing.T) { NewCollectorJobArtifactSizeBytes, NewCollectorJobDurationSeconds, NewCollectorJobID, + NewCollectorJobQueuedDurationSeconds, NewCollectorJobStatus, NewCollectorJobTimestamp, + NewCollectorQueuedDurationSeconds, NewCollectorStatus, NewCollectorTimestamp, } { @@ -32,7 +41,7 @@ func TestNewCollectorFunctions(t *testing.T) { assert.IsType(t, &prometheus.GaugeVec{}, c) } - for _, f := range [](func() prometheus.Collector){ + for _, f := range []func() prometheus.Collector{ NewCollectorJobRunCount, NewCollectorRunCount, NewCollectorEnvironmentDeploymentCount, diff --git a/pkg/controller/controller.go b/pkg/controller/controller.go new file mode 100644 index 00000000..7dcad65a --- /dev/null +++ b/pkg/controller/controller.go @@ -0,0 +1,204 @@ +package controller + +import ( + "context" + + "github.com/google/uuid" + "github.com/pkg/errors" + "github.com/redis/go-redis/extra/redisotel/v9" + "github.com/redis/go-redis/v9" + log "github.com/sirupsen/logrus" + "github.com/vmihailenco/taskq/v4" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace" + "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc" + "go.opentelemetry.io/otel/sdk/resource" + sdktrace "go.opentelemetry.io/otel/sdk/trace" + semconv "go.opentelemetry.io/otel/semconv/v1.7.0" + "google.golang.org/grpc" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/gitlab" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/ratelimit" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/store" +) + +const tracerName = "gitlab-ci-pipelines-exporter" + +// Controller holds the necessary clients to run the app and handle requests. +type Controller struct { + Config config.Config + Redis *redis.Client + Gitlab *gitlab.Client + Store store.Store + TaskController TaskController + + // UUID is used to identify this controller/process amongst others when + // the exporter is running in cluster mode, leveraging Redis. + UUID uuid.UUID +} + +// New creates a new controller. +func New(ctx context.Context, cfg config.Config, version string) (c Controller, err error) { + c.Config = cfg + c.UUID = uuid.New() + + if err = configureTracing(ctx, cfg.OpenTelemetry.GRPCEndpoint); err != nil { + return + } + + if err = c.configureRedis(ctx, cfg.Redis.URL); err != nil { + return + } + + c.TaskController = NewTaskController(ctx, c.Redis, cfg.Gitlab.MaximumJobsQueueSize) + c.registerTasks() + + c.Store = store.New(ctx, c.Redis, c.Config.Projects) + + if err = c.configureGitlab(cfg.Gitlab, version); err != nil { + return + } + + // Start the scheduler + c.Schedule(ctx, cfg.Pull, cfg.GarbageCollect) + + return +} + +func (c *Controller) registerTasks() { + for n, h := range map[schemas.TaskType]interface{}{ + schemas.TaskTypeGarbageCollectEnvironments: c.TaskHandlerGarbageCollectEnvironments, + schemas.TaskTypeGarbageCollectMetrics: c.TaskHandlerGarbageCollectMetrics, + schemas.TaskTypeGarbageCollectProjects: c.TaskHandlerGarbageCollectProjects, + schemas.TaskTypeGarbageCollectRefs: c.TaskHandlerGarbageCollectRefs, + schemas.TaskTypePullEnvironmentMetrics: c.TaskHandlerPullEnvironmentMetrics, + schemas.TaskTypePullEnvironmentsFromProject: c.TaskHandlerPullEnvironmentsFromProject, + schemas.TaskTypePullEnvironmentsFromProjects: c.TaskHandlerPullEnvironmentsFromProjects, + schemas.TaskTypePullMetrics: c.TaskHandlerPullMetrics, + schemas.TaskTypePullProject: c.TaskHandlerPullProject, + schemas.TaskTypePullProjectsFromWildcard: c.TaskHandlerPullProjectsFromWildcard, + schemas.TaskTypePullProjectsFromWildcards: c.TaskHandlerPullProjectsFromWildcards, + schemas.TaskTypePullRefMetrics: c.TaskHandlerPullRefMetrics, + schemas.TaskTypePullRefsFromProject: c.TaskHandlerPullRefsFromProject, + schemas.TaskTypePullRefsFromProjects: c.TaskHandlerPullRefsFromProjects, + } { + _, _ = c.TaskController.TaskMap.Register(string(n), &taskq.TaskConfig{ + Handler: h, + RetryLimit: 1, + }) + } +} + +func (c *Controller) unqueueTask(ctx context.Context, tt schemas.TaskType, uniqueID string) { + if err := c.Store.UnqueueTask(ctx, tt, uniqueID); err != nil { + log.WithContext(ctx). + WithFields(log.Fields{ + "task_type": tt, + "task_unique_id": uniqueID, + }). + WithError(err). + Warn("unqueuing task") + } +} + +func configureTracing(ctx context.Context, grpcEndpoint string) error { + if len(grpcEndpoint) == 0 { + log.Debug("opentelemetry.grpc_endpoint is not configured, skipping open telemetry support") + + return nil + } + + log.WithFields(log.Fields{ + "opentelemetry_grpc_endpoint": grpcEndpoint, + }).Info("opentelemetry gRPC endpoint provided, initializing connection..") + + traceClient := otlptracegrpc.NewClient( + otlptracegrpc.WithInsecure(), + otlptracegrpc.WithEndpoint(grpcEndpoint), + otlptracegrpc.WithDialOption(grpc.WithBlock())) + + traceExp, err := otlptrace.New(ctx, traceClient) + if err != nil { + return err + } + + res, err := resource.New(ctx, + resource.WithFromEnv(), + resource.WithProcess(), + resource.WithTelemetrySDK(), + resource.WithHost(), + resource.WithAttributes( + semconv.ServiceNameKey.String("gitlab-ci-pipelines-exporter"), + ), + ) + if err != nil { + return err + } + + bsp := sdktrace.NewBatchSpanProcessor(traceExp) + tracerProvider := sdktrace.NewTracerProvider( + sdktrace.WithSampler(sdktrace.AlwaysSample()), + sdktrace.WithResource(res), + sdktrace.WithSpanProcessor(bsp), + ) + + otel.SetTracerProvider(tracerProvider) + + return nil +} + +func (c *Controller) configureGitlab(cfg config.Gitlab, version string) (err error) { + var rl ratelimit.Limiter + + if c.Redis != nil { + rl = ratelimit.NewRedisLimiter(c.Redis, cfg.MaximumRequestsPerSecond) + } else { + rl = ratelimit.NewLocalLimiter(cfg.MaximumRequestsPerSecond, cfg.BurstableRequestsPerSecond) + } + + c.Gitlab, err = gitlab.NewClient(gitlab.ClientConfig{ + URL: cfg.URL, + Token: cfg.Token, + DisableTLSVerify: !cfg.EnableTLSVerify, + UserAgentVersion: version, + RateLimiter: rl, + ReadinessURL: cfg.HealthURL, + }) + + return +} + +func (c *Controller) configureRedis(ctx context.Context, url string) (err error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "controller:configureRedis") + defer span.End() + + if len(url) <= 0 { + log.Debug("redis url is not configured, skipping configuration & using local driver") + + return + } + + log.Info("redis url configured, initializing connection..") + + var opt *redis.Options + + if opt, err = redis.ParseURL(url); err != nil { + return + } + + c.Redis = redis.NewClient(opt) + + if err = redisotel.InstrumentTracing(c.Redis); err != nil { + return + } + + if _, err := c.Redis.Ping(ctx).Result(); err != nil { + return errors.Wrap(err, "connecting to redis") + } + + log.Info("connected to redis") + + return +} diff --git a/pkg/controller/controller_test.go b/pkg/controller/controller_test.go new file mode 100644 index 00000000..15402b00 --- /dev/null +++ b/pkg/controller/controller_test.go @@ -0,0 +1,103 @@ +package controller + +import ( + "context" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" +) + +func newMockedGitlabAPIServer() (mux *http.ServeMux, srv *httptest.Server) { + mux = http.NewServeMux() + srv = httptest.NewServer(mux) + + return +} + +func newTestController(cfg config.Config) (ctx context.Context, c Controller, mux *http.ServeMux, srv *httptest.Server) { + ctx = context.Background() + mux, srv = newMockedGitlabAPIServer() + + cfg.Gitlab.URL = srv.URL + if cfg.Gitlab.MaximumRequestsPerSecond < 1 { + cfg.Gitlab.MaximumRequestsPerSecond = 1000 + } + + if cfg.Gitlab.BurstableRequestsPerSecond < 1 { + cfg.Gitlab.BurstableRequestsPerSecond = 1 + } + + c, _ = New(context.Background(), cfg, "0.0.0-ci") + + return +} + +func TestConfigureGitlab(t *testing.T) { + c := Controller{} + assert.NoError(t, c.configureGitlab( + config.Gitlab{ + MaximumRequestsPerSecond: 5, + }, + "0.0.0", + )) + assert.NotNil(t, c.Gitlab) +} + +// func TestConfigureRedisClient(t *testing.T) { + +// s, err := miniredis.Run() +// if err != nil { +// panic(err) +// } +// defer s.Close() + +// c := redis.NewClient(&redis.Options{Addr: s.Addr()}) +// assert.NoError(t, ConfigureRedisClient(c)) +// assert.Equal(t, redisClient, c) + +// s.Close() +// assert.Error(t, ConfigureRedisClient(c)) +// } + +// func TestConfigureStore(t *testing.T) { +// cfg = config.Config{ +// Projects: []config.Project{ +// { +// Name: "foo/bar", +// }, +// }, +// } + +// // Test with local storage +// configureStore() +// assert.NotNil(t, store) + +// projects, err := store.Projects() +// assert.NoError(t, err) + +// expectedProjects := config.Projects{ +// "3861188962": config.Project{ +// Name: "foo/bar", +// }, +// } +// assert.Equal(t, expectedProjects, projects) + +// // Test with redis storage +// s, err := miniredis.Run() +// if err != nil { +// panic(err) +// } +// defer s.Close() + +// c := redis.NewClient(&redis.Options{Addr: s.Addr()}) +// assert.NoError(t, ConfigureRedisClient(c)) + +// configureStore() +// projects, err = store.Projects() +// assert.NoError(t, err) +// assert.Equal(t, expectedProjects, projects) +// } diff --git a/pkg/exporter/environments.go b/pkg/controller/environments.go similarity index 62% rename from pkg/exporter/environments.go rename to pkg/controller/environments.go index c99366c1..e1cd1616 100644 --- a/pkg/exporter/environments.go +++ b/pkg/controller/environments.go @@ -1,39 +1,34 @@ -package exporter +package controller import ( "context" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" log "github.com/sirupsen/logrus" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) -func pullEnvironmentsFromProject(p schemas.Project) error { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() +// PullEnvironmentsFromProject .. +func (c *Controller) PullEnvironmentsFromProject(ctx context.Context, p schemas.Project) (err error) { + var envs schemas.Environments - envs, err := gitlabClient.GetProjectEnvironments(p.Name, p.Pull.Environments.NameRegexp()) + envs, err = c.Gitlab.GetProjectEnvironments(ctx, p) if err != nil { - return err + return } - for envID, envName := range envs { - env := schemas.Environment{ - ProjectName: p.Name, - Name: envName, - ID: envID, + for k := range envs { + var exists bool - TagsRegexp: p.Pull.Environments.TagsRegexp(), - OutputSparseStatusMetrics: p.OutputSparseStatusMetrics(), - } - - envExists, err := store.EnvironmentExists(env.Key()) + exists, err = c.Store.EnvironmentExists(ctx, k) if err != nil { - return err + return } - if !envExists { - if err = updateEnvironment(&env); err != nil { - return err + if !exists { + env := envs[k] + if err = c.UpdateEnvironment(ctx, &env); err != nil { + return } log.WithFields(log.Fields{ @@ -42,14 +37,16 @@ func pullEnvironmentsFromProject(p schemas.Project) error { "environment-name": env.Name, }).Info("discovered new environment") - go schedulePullEnvironmentMetrics(context.Background(), env) + c.ScheduleTask(ctx, schemas.TaskTypePullEnvironmentMetrics, string(env.Key()), env) } } - return nil + + return } -func updateEnvironment(env *schemas.Environment) error { - pulledEnv, err := gitlabClient.GetEnvironment(env.ProjectName, env.ID) +// UpdateEnvironment .. +func (c *Controller) UpdateEnvironment(ctx context.Context, env *schemas.Environment) error { + pulledEnv, err := c.Gitlab.GetEnvironment(ctx, env.ProjectName, env.ID) if err != nil { return err } @@ -58,32 +55,35 @@ func updateEnvironment(env *schemas.Environment) error { env.ExternalURL = pulledEnv.ExternalURL env.LatestDeployment = pulledEnv.LatestDeployment - return store.SetEnvironment(*env) + return c.Store.SetEnvironment(ctx, *env) } -func pullEnvironmentMetrics(env schemas.Environment) (err error) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - +// PullEnvironmentMetrics .. +func (c *Controller) PullEnvironmentMetrics(ctx context.Context, env schemas.Environment) (err error) { // At scale, the scheduled environment may be behind the actual state being stored // to avoid issues, we refresh it from the store before manipulating it - if err := store.GetEnvironment(&env); err != nil { + if err := c.Store.GetEnvironment(ctx, &env); err != nil { return err } // Save the existing deployment ID before we updated environment from the API deploymentJobID := env.LatestDeployment.JobID - if err = updateEnvironment(&env); err != nil { + + if err = c.UpdateEnvironment(ctx, &env); err != nil { return } - infoLabels := env.InformationLabelsValues() - var commitDate float64 + var ( + infoLabels = env.InformationLabelsValues() + commitDate float64 + ) + switch env.LatestDeployment.RefKind { case schemas.RefKindBranch: - infoLabels["latest_commit_short_id"], commitDate, err = gitlabClient.GetBranchLatestCommit(env.ProjectName, env.LatestDeployment.RefName) + infoLabels["latest_commit_short_id"], commitDate, err = c.Gitlab.GetBranchLatestCommit(ctx, env.ProjectName, env.LatestDeployment.RefName) case schemas.RefKindTag: - infoLabels["latest_commit_short_id"], commitDate, err = gitlabClient.GetProjectMostRecentTagCommit(env.ProjectName, env.TagsRegexp) + // TODO: Review how to manage this in a nicier fashion + infoLabels["latest_commit_short_id"], commitDate, err = c.Gitlab.GetProjectMostRecentTagCommit(ctx, env.ProjectName, ".*") default: infoLabels["latest_commit_short_id"] = env.LatestDeployment.CommitShortID commitDate = env.LatestDeployment.Timestamp @@ -112,27 +112,30 @@ func pullEnvironmentMetrics(env schemas.Environment) (err error) { } var commitCount int - if err = store.GetMetric(&infoMetric); err != nil { + + if err = c.Store.GetMetric(ctx, &infoMetric); err != nil { return err } if infoMetric.Labels["latest_commit_short_id"] != infoLabels["latest_commit_short_id"] || infoMetric.Labels["current_commit_short_id"] != infoLabels["current_commit_short_id"] { - commitCount, err = gitlabClient.GetCommitCountBetweenRefs(env.ProjectName, infoLabels["current_commit_short_id"], infoLabels["latest_commit_short_id"]) + commitCount, err = c.Gitlab.GetCommitCountBetweenRefs(ctx, env.ProjectName, infoLabels["current_commit_short_id"], infoLabels["latest_commit_short_id"]) if err != nil { return err } + envBehindCommitCount = float64(commitCount) } else { // TODO: Find a more efficient way - if err = store.GetMetric(&behindCommitsCountMetric); err != nil { + if err = c.Store.GetMetric(ctx, &behindCommitsCountMetric); err != nil { return err } + envBehindCommitCount = behindCommitsCountMetric.Value } } - storeSetMetric(schemas.Metric{ + storeSetMetric(ctx, c.Store, schemas.Metric{ Kind: schemas.MetricKindEnvironmentBehindCommitsCount, Labels: env.DefaultLabelsValues(), Value: envBehindCommitCount, @@ -147,31 +150,35 @@ func pullEnvironmentMetrics(env schemas.Environment) (err error) { Labels: env.DefaultLabelsValues(), } - storeGetMetric(&envDeploymentCount) + storeGetMetric(ctx, c.Store, &envDeploymentCount) + if env.LatestDeployment.JobID > deploymentJobID { envDeploymentCount.Value++ } - storeSetMetric(envDeploymentCount) - storeSetMetric(schemas.Metric{ + storeSetMetric(ctx, c.Store, envDeploymentCount) + + storeSetMetric(ctx, c.Store, schemas.Metric{ Kind: schemas.MetricKindEnvironmentBehindDurationSeconds, Labels: env.DefaultLabelsValues(), Value: envBehindDurationSeconds, }) - storeSetMetric(schemas.Metric{ + storeSetMetric(ctx, c.Store, schemas.Metric{ Kind: schemas.MetricKindEnvironmentDeploymentDurationSeconds, Labels: env.DefaultLabelsValues(), Value: env.LatestDeployment.DurationSeconds, }) - storeSetMetric(schemas.Metric{ + storeSetMetric(ctx, c.Store, schemas.Metric{ Kind: schemas.MetricKindEnvironmentDeploymentJobID, Labels: env.DefaultLabelsValues(), Value: float64(env.LatestDeployment.JobID), }) emitStatusMetric( + ctx, + c.Store, schemas.MetricKindEnvironmentDeploymentStatus, env.DefaultLabelsValues(), statusesList[:], @@ -179,13 +186,13 @@ func pullEnvironmentMetrics(env schemas.Environment) (err error) { env.OutputSparseStatusMetrics, ) - storeSetMetric(schemas.Metric{ + storeSetMetric(ctx, c.Store, schemas.Metric{ Kind: schemas.MetricKindEnvironmentDeploymentTimestamp, Labels: env.DefaultLabelsValues(), Value: env.LatestDeployment.Timestamp, }) - storeSetMetric(schemas.Metric{ + storeSetMetric(ctx, c.Store, schemas.Metric{ Kind: schemas.MetricKindEnvironmentInformation, Labels: infoLabels, Value: 1, diff --git a/pkg/exporter/environments_test.go b/pkg/controller/environments_test.go similarity index 86% rename from pkg/exporter/environments_test.go rename to pkg/controller/environments_test.go index 6fee194d..c34056f6 100644 --- a/pkg/exporter/environments_test.go +++ b/pkg/controller/environments_test.go @@ -1,19 +1,19 @@ -package exporter +package controller import ( "fmt" "net/http" "testing" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/openlyinc/pointy" "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) func TestPullEnvironmentsFromProject(t *testing.T) { - resetGlobalValues() - mux, server := configureMockedGitlabClient() - defer server.Close() + ctx, c, mux, srv := newTestController(config.Config{}) + defer srv.Close() mux.HandleFunc(fmt.Sprintf("/api/v4/projects/foo/environments"), func(w http.ResponseWriter, r *http.Request) { @@ -47,11 +47,11 @@ func TestPullEnvironmentsFromProject(t *testing.T) { }`) }) - p := schemas.Project{Name: "foo"} - p.Pull.Environments.NameRegexpValue = pointy.String("^prod") - assert.NoError(t, pullEnvironmentsFromProject(p)) + p := schemas.NewProject("foo") + p.Pull.Environments.Regexp = "^prod" + assert.NoError(t, c.PullEnvironmentsFromProject(ctx, p)) - storedEnvironments, _ := store.Environments() + storedEnvironments, _ := c.Store.Environments(ctx) expectedEnvironments := schemas.Environments{ "54146361": schemas.Environment{ ProjectName: "foo", @@ -69,7 +69,6 @@ func TestPullEnvironmentsFromProject(t *testing.T) { CommitShortID: "416d8ea1", Status: "success", }, - TagsRegexp: ".*", OutputSparseStatusMetrics: true, }, } @@ -77,9 +76,8 @@ func TestPullEnvironmentsFromProject(t *testing.T) { } func TestPullEnvironmentMetricsSucceed(t *testing.T) { - resetGlobalValues() - mux, server := configureMockedGitlabClient() - defer server.Close() + ctx, c, mux, srv := newTestController(config.Config{}) + defer srv.Close() mux.HandleFunc("/api/v4/projects/foo/environments/1", func(w http.ResponseWriter, r *http.Request) { @@ -126,10 +124,10 @@ func TestPullEnvironmentMetricsSucceed(t *testing.T) { } // Metrics pull shall succeed - assert.NoError(t, pullEnvironmentMetrics(env)) + assert.NoError(t, c.PullEnvironmentMetrics(ctx, env)) // Check if all the metrics exist - metrics, _ := store.Metrics() + metrics, _ := c.Store.Metrics(ctx) labels := map[string]string{ "project": "foo", "environment": "prod", diff --git a/pkg/exporter/garbage_collector.go b/pkg/controller/garbage_collector.go similarity index 51% rename from pkg/exporter/garbage_collector.go rename to pkg/controller/garbage_collector.go index 6093161e..c4a6e3ce 100644 --- a/pkg/exporter/garbage_collector.go +++ b/pkg/controller/garbage_collector.go @@ -1,31 +1,36 @@ -package exporter +package controller import ( + "context" + "reflect" "regexp" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" + "dario.cat/mergo" log "github.com/sirupsen/logrus" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/store" ) -func garbageCollectProjects() error { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() +// GarbageCollectProjects .. +func (c *Controller) GarbageCollectProjects(ctx context.Context) error { log.Info("starting 'projects' garbage collection") defer log.Info("ending 'projects' garbage collection") - storedProjects, err := store.Projects() + storedProjects, err := c.Store.Projects(ctx) if err != nil { return err } // Loop through all configured projects - for _, p := range config.Projects { + for _, cp := range c.Config.Projects { + p := schemas.Project{Project: cp} delete(storedProjects, p.Key()) } // Loop through what can be found from the wildcards - for _, w := range config.Wildcards { - foundProjects, err := gitlabClient.ListProjects(w) + for _, w := range c.Config.Wildcards { + foundProjects, err := c.Gitlab.ListProjects(ctx, w) if err != nil { return err } @@ -40,7 +45,7 @@ func garbageCollectProjects() error { }).Debug("found projects to garbage collect") for k, p := range storedProjects { - if err = store.DelProject(k); err != nil { + if err = c.Store.DelProject(ctx, k); err != nil { return err } @@ -52,72 +57,67 @@ func garbageCollectProjects() error { return nil } -func garbageCollectEnvironments() error { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() +// GarbageCollectEnvironments .. +func (c *Controller) GarbageCollectEnvironments(ctx context.Context) error { log.Info("starting 'environments' garbage collection") defer log.Info("ending 'environments' garbage collection") - storedEnvironments, err := store.Environments() + storedEnvironments, err := c.Store.Environments(ctx) if err != nil { return err } - envProjects := make(map[string]string) - for k, env := range storedEnvironments { - p := schemas.Project{ - Name: env.ProjectName, - } + envProjects := make(map[schemas.Project]bool) + + for _, env := range storedEnvironments { + p := schemas.NewProject(env.ProjectName) - projectExists, err := store.ProjectExists(p.Key()) + projectExists, err := c.Store.ProjectExists(ctx, p.Key()) if err != nil { return err } // If the project does not exist anymore, delete the environment if !projectExists { - if err = store.DelEnvironment(k); err != nil { + if err = deleteEnv(ctx, c.Store, env, "non-existent-project"); err != nil { return err } - log.WithFields(log.Fields{ - "project-name": env.ProjectName, - "environment-name": env.Name, - "reason": "non-existent-project", - }).Info("deleted environment from the store") continue } - if err = store.GetProject(&p); err != nil { + if err = c.Store.GetProject(ctx, &p); err != nil { return err } + // If the environment is not configured to be pulled anymore, delete it + if !p.Pull.Environments.Enabled { + if err = deleteEnv(ctx, c.Store, env, "project-pull-environments-disabled"); err != nil { + return err + } + + continue + } + // Store the project information to be able to refresh its environments // from the API later on - envProjects[p.Name] = p.Pull.Environments.NameRegexp() + envProjects[p] = true // If the environment is not configured to be pulled anymore, delete it - re := regexp.MustCompile(p.Pull.Environments.NameRegexp()) + re := regexp.MustCompile(p.Pull.Environments.Regexp) if !re.MatchString(env.Name) { - if err = store.DelEnvironment(k); err != nil { + if err = deleteEnv(ctx, c.Store, env, "environment-not-in-regexp"); err != nil { return err } - log.WithFields(log.Fields{ - "project-name": env.ProjectName, - "environment-name": env.Name, - "reason": "environment-not-in-regexp", - }).Info("deleted environment from the store") continue } // Check if the latest configuration of the project in store matches the environment one - if env.OutputSparseStatusMetrics != p.OutputSparseStatusMetrics() || - env.TagsRegexp != p.Pull.Environments.TagsRegexp() { - env.OutputSparseStatusMetrics = p.OutputSparseStatusMetrics() - env.TagsRegexp = p.Pull.Environments.TagsRegexp() + if env.OutputSparseStatusMetrics != p.OutputSparseStatusMetrics { + env.OutputSparseStatusMetrics = p.OutputSparseStatusMetrics - if err = store.SetEnvironment(env); err != nil { + if err = c.Store.SetEnvironment(ctx, env); err != nil { return err } @@ -129,202 +129,148 @@ func garbageCollectEnvironments() error { } // Refresh the environments from the API - existingEnvs := make(map[schemas.EnvironmentKey]struct{}) - for projectName, envRegexp := range envProjects { - envs, err := gitlabClient.GetProjectEnvironments(projectName, envRegexp) + existingEnvs := make(schemas.Environments) + + for p := range envProjects { + projectEnvs, err := c.Gitlab.GetProjectEnvironments(ctx, p) if err != nil { return err } - for _, envName := range envs { - existingEnvs[schemas.Environment{ - ProjectName: projectName, - Name: envName, - }.Key()] = struct{}{} + if err = mergo.Merge(&existingEnvs, projectEnvs); err != nil { + return err } } - storedEnvironments, err = store.Environments() + storedEnvironments, err = c.Store.Environments(ctx) if err != nil { return err } for k, env := range storedEnvironments { if _, exists := existingEnvs[k]; !exists { - if err = store.DelEnvironment(k); err != nil { + if err = deleteEnv(ctx, c.Store, env, "non-existent-environment"); err != nil { return err } - - log.WithFields(log.Fields{ - "project-name": env.ProjectName, - "environment-name": env.Name, - "reason": "non-existent-environment", - }).Info("deleted environment from the store") } } return nil } -func garbageCollectRefs() error { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() +// GarbageCollectRefs .. +func (c *Controller) GarbageCollectRefs(ctx context.Context) error { log.Info("starting 'refs' garbage collection") defer log.Info("ending 'refs' garbage collection") - storedRefs, err := store.Refs() + storedRefs, err := c.Store.Refs(ctx) if err != nil { return err } - refProjects := make(map[string]schemas.ProjectPullRefs) - for k, ref := range storedRefs { - p := schemas.Project{Name: ref.ProjectName} - projectExists, err := store.ProjectExists(p.Key()) + for _, ref := range storedRefs { + projectExists, err := c.Store.ProjectExists(ctx, ref.Project.Key()) if err != nil { return err } // If the project does not exist anymore, delete the ref if !projectExists { - if err = store.DelRef(k); err != nil { + if err = deleteRef(ctx, c.Store, ref, "non-existent-project"); err != nil { return err } - log.WithFields(log.Fields{ - "project-name": ref.ProjectName, - "ref": ref.Name, - "reason": "non-existent-project", - }).Info("deleted ref from the store") continue } - if err = store.GetProject(&p); err != nil { - return err - } + // If the ref is not configured to be pulled anymore, delete the ref + var re *regexp.Regexp - // Store the project information to be able to refresh all refs - // from the API later on - refProjects[p.Name] = p.Pull.Refs + if re, err = schemas.GetRefRegexp(ref.Project.Pull.Refs, ref.Kind); err != nil { + if err = deleteRef(ctx, c.Store, ref, "invalid-ref-kind"); err != nil { + return err + } + } - // If the ref is not configured to be pulled anymore, delete the ref - re := regexp.MustCompile(p.Pull.Refs.Regexp()) if !re.MatchString(ref.Name) { - if err = store.DelRef(k); err != nil { + if err = deleteRef(ctx, c.Store, ref, "ref-not-matching-regexp"); err != nil { return err } - - log.WithFields(log.Fields{ - "project-name": ref.ProjectName, - "ref": ref.Name, - "reason": "ref-not-in-regexp", - }).Info("deleted ref from the store") - continue } // Check if the latest configuration of the project in store matches the ref one - if ref.OutputSparseStatusMetrics != p.OutputSparseStatusMetrics() || - ref.PullPipelineJobsEnabled != p.Pull.Pipeline.Jobs.Enabled() || - ref.PullPipelineVariablesEnabled != p.Pull.Pipeline.Variables.Enabled() || - ref.PullPipelineVariablesRegexp != p.Pull.Pipeline.Variables.Regexp() { - ref.OutputSparseStatusMetrics = p.OutputSparseStatusMetrics() - ref.PullPipelineJobsEnabled = p.Pull.Pipeline.Jobs.Enabled() - ref.PullPipelineVariablesEnabled = p.Pull.Pipeline.Variables.Enabled() - ref.PullPipelineVariablesRegexp = p.Pull.Pipeline.Variables.Regexp() - if err = store.SetRef(ref); err != nil { + p := ref.Project + + if err = c.Store.GetProject(ctx, &p); err != nil { + return err + } + + if !reflect.DeepEqual(ref.Project, p) { + ref.Project = p + + if err = c.Store.SetRef(ctx, ref); err != nil { return err } + log.WithFields(log.Fields{ - "project-name": ref.ProjectName, + "project-name": ref.Project.Name, "ref": ref.Name, }).Info("updated ref, associated project configuration was not in sync") } } // Refresh the refs from the API - existingRefs := make(map[schemas.RefKey]struct{}) - for projectName, projectPullRefs := range refProjects { - branches, err := gitlabClient.GetProjectBranches(projectName, projectPullRefs.Regexp(), projectPullRefs.MaxAgeSeconds()) - if err != nil { - return err - } + projects, err := c.Store.Projects(ctx) + if err != nil { + return err + } - for _, branch := range branches { - existingRefs[schemas.Ref{ - Kind: schemas.RefKindBranch, - ProjectName: projectName, - Name: branch, - }.Key()] = struct{}{} - } + expectedRefs := make(map[schemas.RefKey]bool) - tags, err := gitlabClient.GetProjectTags(projectName, projectPullRefs.Regexp(), projectPullRefs.MaxAgeSeconds()) + for _, p := range projects { + refs, err := c.GetRefs(ctx, p) if err != nil { return err } - for _, tag := range tags { - existingRefs[schemas.Ref{ - Kind: schemas.RefKindTag, - ProjectName: projectName, - Name: tag, - }.Key()] = struct{}{} - } - - if projectPullRefs.From.MergeRequests.Enabled() { - mergeRequests, err := gitlabClient.GetProjectMergeRequestsPipelines(projectName, projectPullRefs.From.MergeRequests.Depth(), projectPullRefs.MaxAgeSeconds()) - if err != nil { - return err - } - - for _, mr := range mergeRequests { - existingRefs[schemas.Ref{ - Kind: schemas.RefKindMergeRequest, - ProjectName: projectName, - Name: mr, - }.Key()] = struct{}{} - } + for _, ref := range refs { + expectedRefs[ref.Key()] = true } } - storedRefs, err = store.Refs() + // Refresh the stored refs as we may have already removed some + storedRefs, err = c.Store.Refs(ctx) if err != nil { return err } for k, ref := range storedRefs { - if _, exists := existingRefs[k]; !exists { - if err = store.DelRef(k); err != nil { + if _, expected := expectedRefs[k]; !expected { + if err = deleteRef(ctx, c.Store, ref, "not-expected"); err != nil { return err } - - log.WithFields(log.Fields{ - "project-name": ref.ProjectName, - "ref-name": ref.Name, - "reason": "non-existent-ref", - }).Info("deleted ref from the store") } } return nil } -func garbageCollectMetrics() error { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() +// GarbageCollectMetrics .. +func (c *Controller) GarbageCollectMetrics(ctx context.Context) error { log.Info("starting 'metrics' garbage collection") defer log.Info("ending 'metrics' garbage collection") - storedEnvironments, err := store.Environments() + storedEnvironments, err := c.Store.Environments(ctx) if err != nil { return err } - storedRefs, err := store.Refs() + storedRefs, err := c.Store.Refs(ctx) if err != nil { return err } - storedMetrics, err := store.Metrics() + storedMetrics, err := c.Store.Metrics(ctx) if err != nil { return err } @@ -337,7 +283,7 @@ func garbageCollectMetrics() error { metricLabelEnvironment, metricLabelEnvironmentExists := m.Labels["environment"] if !metricLabelProjectExists || (!metricLabelRefExists && !metricLabelEnvironmentExists) { - if err = store.DelMetric(k); err != nil { + if err = c.Store.DelMetric(ctx, k); err != nil { return err } @@ -349,17 +295,17 @@ func garbageCollectMetrics() error { } if metricLabelRefExists && !metricLabelEnvironmentExists { - refKey := schemas.Ref{ - Kind: schemas.RefKind(m.Labels["kind"]), - ProjectName: metricLabelProject, - Name: metricLabelRef, - }.Key() + refKey := schemas.NewRef( + schemas.NewProject(metricLabelProject), + schemas.RefKind(m.Labels["kind"]), + metricLabelRef, + ).Key() ref, refExists := storedRefs[refKey] // If the ref does not exist anymore, delete the metric if !refExists { - if err = store.DelMetric(k); err != nil { + if err = c.Store.DelMetric(ctx, k); err != nil { return err } @@ -368,6 +314,7 @@ func garbageCollectMetrics() error { "metric-labels": m.Labels, "reason": "non-existent-ref", }).Info("deleted metric from the store") + continue } @@ -379,9 +326,8 @@ func garbageCollectMetrics() error { schemas.MetricKindJobRunCount, schemas.MetricKindJobStatus, schemas.MetricKindJobTimestamp: - - if !ref.PullPipelineJobsEnabled { - if err = store.DelMetric(k); err != nil { + if !ref.Project.Pull.Pipeline.Jobs.Enabled { + if err = c.Store.DelMetric(ctx, k); err != nil { return err } @@ -390,6 +336,7 @@ func garbageCollectMetrics() error { "metric-labels": m.Labels, "reason": "jobs-metrics-disabled-on-ref", }).Info("deleted metric from the store") + continue } @@ -400,9 +347,8 @@ func garbageCollectMetrics() error { switch m.Kind { case schemas.MetricKindJobStatus, schemas.MetricKindStatus: - - if ref.OutputSparseStatusMetrics && m.Value != 1 { - if err = store.DelMetric(k); err != nil { + if ref.Project.OutputSparseStatusMetrics && m.Value != 1 { + if err = c.Store.DelMetric(ctx, k); err != nil { return err } @@ -411,9 +357,9 @@ func garbageCollectMetrics() error { "metric-labels": m.Labels, "reason": "output-sparse-metrics-enabled-on-ref", }).Info("deleted metric from the store") + continue } - default: } } @@ -428,7 +374,7 @@ func garbageCollectMetrics() error { // If the ref does not exist anymore, delete the metric if !envExists { - if err = store.DelMetric(k); err != nil { + if err = c.Store.DelMetric(ctx, k); err != nil { return err } @@ -437,6 +383,7 @@ func garbageCollectMetrics() error { "metric-labels": m.Labels, "reason": "non-existent-environment", }).Info("deleted metric from the store") + continue } @@ -444,7 +391,7 @@ func garbageCollectMetrics() error { switch m.Kind { case schemas.MetricKindEnvironmentDeploymentStatus: if env.OutputSparseStatusMetrics && m.Value != 1 { - if err = store.DelMetric(k); err != nil { + if err = c.Store.DelMetric(ctx, k); err != nil { return err } @@ -453,6 +400,7 @@ func garbageCollectMetrics() error { "metric-labels": m.Labels, "reason": "output-sparse-metrics-enabled-on-environment", }).Info("deleted metric from the store") + continue } } @@ -461,3 +409,32 @@ func garbageCollectMetrics() error { return nil } + +func deleteEnv(ctx context.Context, s store.Store, env schemas.Environment, reason string) (err error) { + if err = s.DelEnvironment(ctx, env.Key()); err != nil { + return + } + + log.WithFields(log.Fields{ + "project-name": env.ProjectName, + "environment-name": env.Name, + "reason": reason, + }).Info("deleted environment from the store") + + return +} + +func deleteRef(ctx context.Context, s store.Store, ref schemas.Ref, reason string) (err error) { + if err = s.DelRef(ctx, ref.Key()); err != nil { + return + } + + log.WithFields(log.Fields{ + "project-name": ref.Project.Name, + "ref": ref.Name, + "ref-kind": ref.Kind, + "reason": reason, + }).Info("deleted ref from the store") + + return +} diff --git a/pkg/controller/garbage_collector_test.go b/pkg/controller/garbage_collector_test.go new file mode 100644 index 00000000..7a4be0ae --- /dev/null +++ b/pkg/controller/garbage_collector_test.go @@ -0,0 +1,166 @@ +package controller + +import ( + "context" + "fmt" + "net/http" + "testing" + + "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +func TestGarbageCollectProjects(t *testing.T) { + p1 := schemas.NewProject("cfg/p1") + p2 := schemas.NewProject("cfg/p2") + p3 := schemas.NewProject("wc/p3") + p4 := schemas.NewProject("wc/p4") + + ctx, c, mux, srv := newTestController(config.Config{ + Projects: []config.Project{p1.Project}, + Wildcards: config.Wildcards{ + config.Wildcard{ + Owner: config.WildcardOwner{ + Kind: "group", + Name: "wc", + }, + }, + }, + }) + defer srv.Close() + + mux.HandleFunc("/api/v4/groups/wc/projects", + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `[{"id":1, "path_with_namespace": "wc/p3", "jobs_enabled": true}]`) + }) + + c.Store.SetProject(ctx, p1) + c.Store.SetProject(ctx, p2) + c.Store.SetProject(ctx, p3) + c.Store.SetProject(ctx, p4) + + assert.NoError(t, c.GarbageCollectProjects(context.Background())) + storedProjects, err := c.Store.Projects(ctx) + assert.NoError(t, err) + + expectedProjects := schemas.Projects{ + p1.Key(): p1, + p3.Key(): p3, + } + assert.Equal(t, expectedProjects, storedProjects) +} + +func TestGarbageCollectEnvironments(t *testing.T) { + ctx, c, mux, srv := newTestController(config.Config{}) + defer srv.Close() + + mux.HandleFunc("/api/v4/projects/p2/environments", + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `[{"name": "main"}]`) + }) + + p2 := schemas.NewProject("p2") + p2.Pull.Environments.Enabled = true + p2.Pull.Environments.Regexp = "^main$" + + envp1main := schemas.Environment{ProjectName: "p1", Name: "main"} + envp2dev := schemas.Environment{ProjectName: "p2", Name: "dev"} + envp2main := schemas.Environment{ProjectName: "p2", Name: "main"} + + c.Store.SetProject(ctx, p2) + c.Store.SetEnvironment(ctx, envp1main) + c.Store.SetEnvironment(ctx, envp2dev) + c.Store.SetEnvironment(ctx, envp2main) + + assert.NoError(t, c.GarbageCollectEnvironments(context.Background())) + storedEnvironments, err := c.Store.Environments(ctx) + assert.NoError(t, err) + + expectedEnvironments := schemas.Environments{ + envp2main.Key(): schemas.Environment{ + ProjectName: "p2", + Name: "main", + OutputSparseStatusMetrics: true, + }, + } + assert.Equal(t, expectedEnvironments, storedEnvironments) +} + +func TestGarbageCollectRefs(t *testing.T) { + ctx, c, mux, srv := newTestController(config.Config{}) + defer srv.Close() + + mux.HandleFunc("/api/v4/projects/p2/repository/branches", + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `[{"name": "main"}]`) + }) + + mux.HandleFunc("/api/v4/projects/p2/repository/tags", + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `[{"name": "main"}]`) + }) + + pr1dev := schemas.NewRef(schemas.NewProject("p1"), schemas.RefKindBranch, "dev") + pr1main := schemas.NewRef(schemas.NewProject("p1"), schemas.RefKindBranch, "main") + + p2 := schemas.NewProject("p2") + p2.Pull.Environments.Regexp = "^main$" + + pr2dev := schemas.NewRef(p2, schemas.RefKindBranch, "dev") + pr2main := schemas.NewRef(p2, schemas.RefKindBranch, "main") + + c.Store.SetProject(ctx, p2) + c.Store.SetRef(ctx, pr1dev) + c.Store.SetRef(ctx, pr1main) + c.Store.SetRef(ctx, pr2dev) + c.Store.SetRef(ctx, pr2main) + + assert.NoError(t, c.GarbageCollectRefs(context.Background())) + storedRefs, err := c.Store.Refs(ctx) + assert.NoError(t, err) + + newPR2main := schemas.NewRef(p2, schemas.RefKindBranch, "main") + expectedRefs := schemas.Refs{ + newPR2main.Key(): newPR2main, + } + assert.Equal(t, expectedRefs, storedRefs) +} + +func TestGarbageCollectMetrics(t *testing.T) { + ctx, c, _, srv := newTestController(config.Config{}) + srv.Close() + + p1 := schemas.NewProject("p1") + p1.Pull.Pipeline.Jobs.Enabled = true + + ref1 := schemas.NewRef(p1, schemas.RefKindBranch, "foo") + + ref1m1 := schemas.Metric{Kind: schemas.MetricKindCoverage, Labels: prometheus.Labels{"project": "p1", "ref": "foo", "kind": "branch"}} + ref1m2 := schemas.Metric{Kind: schemas.MetricKindStatus, Labels: prometheus.Labels{"project": "p1", "ref": "foo", "kind": "branch"}} + ref1m3 := schemas.Metric{Kind: schemas.MetricKindJobDurationSeconds, Labels: prometheus.Labels{"project": "p1", "ref": "foo", "kind": "branch"}} + + ref2m1 := schemas.Metric{Kind: schemas.MetricKindCoverage, Labels: prometheus.Labels{"project": "p2", "ref": "bar", "kind": "branch"}} + ref3m1 := schemas.Metric{Kind: schemas.MetricKindCoverage, Labels: prometheus.Labels{"project": "foo", "kind": "branch"}} + ref4m1 := schemas.Metric{Kind: schemas.MetricKindCoverage, Labels: prometheus.Labels{"ref": "bar", "kind": "branch"}} + + c.Store.SetRef(ctx, ref1) + c.Store.SetMetric(ctx, ref1m1) + c.Store.SetMetric(ctx, ref1m2) + c.Store.SetMetric(ctx, ref1m3) + c.Store.SetMetric(ctx, ref2m1) + c.Store.SetMetric(ctx, ref3m1) + c.Store.SetMetric(ctx, ref4m1) + + assert.NoError(t, c.GarbageCollectMetrics(context.Background())) + storedMetrics, err := c.Store.Metrics(ctx) + assert.NoError(t, err) + + expectedMetrics := schemas.Metrics{ + ref1m1.Key(): ref1m1, + ref1m3.Key(): ref1m3, + } + assert.Equal(t, expectedMetrics, storedMetrics) +} diff --git a/pkg/controller/handlers.go b/pkg/controller/handlers.go new file mode 100644 index 00000000..5fa2f429 --- /dev/null +++ b/pkg/controller/handlers.go @@ -0,0 +1,145 @@ +package controller + +import ( + "context" + "fmt" + "io" + "net/http" + "reflect" + + "github.com/heptiolabs/healthcheck" + "github.com/prometheus/client_golang/prometheus/promhttp" + log "github.com/sirupsen/logrus" + "github.com/xanzy/go-gitlab" + "go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp" + "go.opentelemetry.io/otel/trace" +) + +// HealthCheckHandler .. +func (c *Controller) HealthCheckHandler(ctx context.Context) (h healthcheck.Handler) { + h = healthcheck.NewHandler() + if c.Config.Gitlab.EnableHealthCheck { + h.AddReadinessCheck("gitlab-reachable", c.Gitlab.ReadinessCheck(ctx)) + } else { + log.WithContext(ctx). + Warn("GitLab health check has been disabled. Readiness checks won't be operated.") + } + + return +} + +// MetricsHandler .. +func (c *Controller) MetricsHandler(w http.ResponseWriter, r *http.Request) { + ctx := r.Context() + span := trace.SpanFromContext(ctx) + + defer span.End() + + registry := NewRegistry(ctx) + + metrics, err := c.Store.Metrics(ctx) + if err != nil { + log.WithContext(ctx). + WithError(err). + Error() + } + + if err := registry.ExportInternalMetrics( + ctx, + c.Gitlab, + c.Store, + ); err != nil { + log.WithContext(ctx). + WithError(err). + Warn() + } + + registry.ExportMetrics(metrics) + + otelhttp.NewHandler( + promhttp.HandlerFor(registry, promhttp.HandlerOpts{ + Registry: registry, + EnableOpenMetrics: c.Config.Server.Metrics.EnableOpenmetricsEncoding, + }), + "/metrics", + ).ServeHTTP(w, r) +} + +// WebhookHandler .. +func (c *Controller) WebhookHandler(w http.ResponseWriter, r *http.Request) { + span := trace.SpanFromContext(r.Context()) + defer span.End() + + // We create a new background context instead of relying on the request one which has a short cancellation TTL + ctx := trace.ContextWithSpan(context.Background(), span) + + logger := log. + WithContext(ctx). + WithFields(log.Fields{ + "ip-address": r.RemoteAddr, + "user-agent": r.UserAgent(), + }) + + logger.Debug("webhook request") + + if r.Header.Get("X-Gitlab-Token") != c.Config.Server.Webhook.SecretToken { + logger.Debug("invalid token provided for a webhook request") + w.WriteHeader(http.StatusForbidden) + fmt.Fprint(w, "{\"error\": \"invalid token\"}") + + return + } + + if r.Body == http.NoBody { + logger. + WithError(fmt.Errorf("nil body")). + Warn("unable to read body of a received webhook") + + w.WriteHeader(http.StatusBadRequest) + + return + } + + payload, err := io.ReadAll(r.Body) + if err != nil { + logger. + WithError(err). + Warn("unable to read body of a received webhook") + + w.WriteHeader(http.StatusBadRequest) + + return + } + + event, err := gitlab.ParseHook(gitlab.HookEventType(r), payload) + if err != nil { + logger. + WithError(err). + Warn("unable to parse body of a received webhook") + + w.WriteHeader(http.StatusBadRequest) + + return + } + + switch event := event.(type) { + case *gitlab.PipelineEvent: + go c.processPipelineEvent(ctx, *event) + case *gitlab.JobEvent: + go c.processJobEvent(ctx, *event) + case *gitlab.DeploymentEvent: + go c.processDeploymentEvent(ctx, *event) + case *gitlab.PushEvent: + go c.processPushEvent(ctx, *event) + case *gitlab.TagEvent: + go c.processTagEvent(ctx, *event) + case *gitlab.MergeEvent: + go c.processMergeEvent(ctx, *event) + default: + logger. + WithField("event-type", reflect.TypeOf(event).String()). + Warn("received a non supported event type as a webhook") + + w.WriteHeader(http.StatusUnprocessableEntity) + } +} diff --git a/pkg/exporter/webhooks_test.go b/pkg/controller/handlers_test.go similarity index 58% rename from pkg/exporter/webhooks_test.go rename to pkg/controller/handlers_test.go index 40f8df59..23c8c549 100644 --- a/pkg/exporter/webhooks_test.go +++ b/pkg/controller/handlers_test.go @@ -1,4 +1,4 @@ -package exporter +package controller import ( "io/ioutil" @@ -7,19 +7,27 @@ import ( "strings" "testing" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" ) func TestWebhookHandler(t *testing.T) { - resetGlobalValues() - - config.Server.Webhook.SecretToken = "secret" - req := httptest.NewRequest("POST", "/webhook", nil) + _, c, _, srv := newTestController(config.Config{ + Server: config.Server{ + Webhook: config.ServerWebhook{ + Enabled: true, + SecretToken: "secret", + }, + }, + }) + srv.Close() + + req := httptest.NewRequest(http.MethodPost, "/webhook", nil) // Test without auth token, should return a 403 w := httptest.NewRecorder() - WebhookHandler(w, req) + c.WebhookHandler(w, req) assert.Equal(t, http.StatusForbidden, w.Result().StatusCode) // Provide correct authentication header @@ -27,7 +35,7 @@ func TestWebhookHandler(t *testing.T) { // Test with empty body, should return a 400 w = httptest.NewRecorder() - WebhookHandler(w, req) + c.WebhookHandler(w, req) assert.Equal(t, http.StatusBadRequest, w.Result().StatusCode) // Provide an invalid body @@ -35,7 +43,7 @@ func TestWebhookHandler(t *testing.T) { // Test with invalid body, should return a 400 w = httptest.NewRecorder() - WebhookHandler(w, req) + c.WebhookHandler(w, req) assert.Equal(t, http.StatusBadRequest, w.Result().StatusCode) // Provide an invalid event type @@ -44,7 +52,7 @@ func TestWebhookHandler(t *testing.T) { // Test with invalid event type, should return a 422 w = httptest.NewRecorder() - WebhookHandler(w, req) + c.WebhookHandler(w, req) assert.Equal(t, http.StatusUnprocessableEntity, w.Result().StatusCode) // Provide an valid event type: pipeline @@ -53,7 +61,7 @@ func TestWebhookHandler(t *testing.T) { // Test with pipeline event type, should return a 200 w = httptest.NewRecorder() - WebhookHandler(w, req) + c.WebhookHandler(w, req) assert.Equal(t, http.StatusOK, w.Result().StatusCode) // Provide an valid event type: deployment @@ -62,51 +70,6 @@ func TestWebhookHandler(t *testing.T) { // Test with deployment event type, should return a 200 w = httptest.NewRecorder() - WebhookHandler(w, req) + c.WebhookHandler(w, req) assert.Equal(t, http.StatusOK, w.Result().StatusCode) } - -func TestTriggerRefMetricsPull(_ *testing.T) { - resetGlobalValues() - - ref1 := schemas.Ref{ - ProjectName: "group/foo", - Name: "main", - } - - p2 := schemas.Project{Name: "group/bar"} - ref2 := schemas.Ref{ - ProjectName: "group/bar", - Name: "main", - } - - store.SetRef(ref1) - store.SetProject(p2) - - // TODO: Assert results somehow - triggerRefMetricsPull(ref1) - triggerRefMetricsPull(ref2) -} - -func TestTriggerEnvironmentMetricsPull(_ *testing.T) { - resetGlobalValues() - - p1 := schemas.Project{Name: "foo/bar"} - env1 := schemas.Environment{ - ProjectName: "foo/bar", - Name: "dev", - } - - env2 := schemas.Environment{ - ProjectName: "foo/baz", - Name: "prod", - } - - store.SetProject(p1) - store.SetEnvironment(env1) - store.SetEnvironment(env2) - - // TODO: Assert results somehow - triggerEnvironmentMetricsPull(env1) - triggerEnvironmentMetricsPull(env2) -} diff --git a/pkg/exporter/jobs.go b/pkg/controller/jobs.go similarity index 52% rename from pkg/exporter/jobs.go rename to pkg/controller/jobs.go index 9bfafad3..36cf5ca2 100644 --- a/pkg/exporter/jobs.go +++ b/pkg/controller/jobs.go @@ -1,55 +1,51 @@ -package exporter +package controller import ( + "context" "reflect" "regexp" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" log "github.com/sirupsen/logrus" -) -func pullRefPipelineJobsMetrics(ref schemas.Ref) error { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) - jobs, err := gitlabClient.ListRefPipelineJobs(ref) +// PullRefPipelineJobsMetrics .. +func (c *Controller) PullRefPipelineJobsMetrics(ctx context.Context, ref schemas.Ref) error { + jobs, err := c.Gitlab.ListRefPipelineJobs(ctx, ref) if err != nil { return err } for _, job := range jobs { - processJobMetrics(ref, job) + c.ProcessJobMetrics(ctx, ref, job) } return nil } -func pullRefMostRecentJobsMetrics(ref schemas.Ref) error { - if !ref.PullPipelineJobsEnabled { +// PullRefMostRecentJobsMetrics .. +func (c *Controller) PullRefMostRecentJobsMetrics(ctx context.Context, ref schemas.Ref) error { + if !ref.Project.Pull.Pipeline.Jobs.Enabled { return nil } - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - jobs, err := gitlabClient.ListRefMostRecentJobs(ref) + jobs, err := c.Gitlab.ListRefMostRecentJobs(ctx, ref) if err != nil { return err } for _, job := range jobs { - processJobMetrics(ref, job) + c.ProcessJobMetrics(ctx, ref, job) } return nil } -func processJobMetrics(ref schemas.Ref, job schemas.Job) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - +// ProcessJobMetrics .. +func (c *Controller) ProcessJobMetrics(ctx context.Context, ref schemas.Ref, job schemas.Job) { projectRefLogFields := log.Fields{ - "project-name": ref.ProjectName, + "project-name": ref.Project.Name, "job-name": job.Name, "job-id": job.ID, } @@ -57,15 +53,20 @@ func processJobMetrics(ref schemas.Ref, job schemas.Job) { labels := ref.DefaultLabelsValues() labels["stage"] = job.Stage labels["job_name"] = job.Name + labels["tag_list"] = job.TagList + labels["failure_reason"] = job.FailureReason - if ref.PullPipelineJobsRunnerDescriptionEnabled { - re, err := regexp.Compile(ref.PullPipelineJobsRunnerDescriptionAggregationRegexp) + if ref.Project.Pull.Pipeline.Jobs.RunnerDescription.Enabled { + re, err := regexp.Compile(ref.Project.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexp) if err != nil { - log.WithFields(projectRefLogFields).WithField("error", err.Error()).Error("invalid job runner description aggregation regexp") + log.WithContext(ctx). + WithFields(projectRefLogFields). + WithError(err). + Error("invalid job runner description aggregation regexp") } if re.MatchString(job.Runner.Description) { - labels["runner_description"] = ref.PullPipelineJobsRunnerDescriptionAggregationRegexp + labels["runner_description"] = ref.Project.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexp } else { labels["runner_description"] = job.Runner.Description } @@ -75,8 +76,12 @@ func processJobMetrics(ref schemas.Ref, job schemas.Job) { } // Refresh ref state from the store - if err := store.GetRef(&ref); err != nil { - log.WithFields(projectRefLogFields).WithField("error", err.Error()).Error("getting ref from the store") + if err := c.Store.GetRef(ctx, &ref); err != nil { + log.WithContext(ctx). + WithFields(projectRefLogFields). + WithError(err). + Error("getting ref from the store") + return } @@ -91,34 +96,44 @@ func processJobMetrics(ref schemas.Ref, job schemas.Job) { if ref.LatestJobs == nil { ref.LatestJobs = make(schemas.Jobs) } + ref.LatestJobs[job.Name] = job - if err := store.SetRef(ref); err != nil { - log.WithFields( - projectRefLogFields, - ).WithField("error", err.Error()).Error("writing ref in the store") + + if err := c.Store.SetRef(ctx, ref); err != nil { + log.WithContext(ctx). + WithFields(projectRefLogFields). + WithError(err). + Error("writing ref in the store") + return } - log.WithFields(projectRefLogFields).Debug("processing job metrics") + log.WithFields(projectRefLogFields).Trace("processing job metrics") - storeSetMetric(schemas.Metric{ + storeSetMetric(ctx, c.Store, schemas.Metric{ Kind: schemas.MetricKindJobID, Labels: labels, Value: float64(job.ID), }) - storeSetMetric(schemas.Metric{ + storeSetMetric(ctx, c.Store, schemas.Metric{ Kind: schemas.MetricKindJobTimestamp, Labels: labels, Value: job.Timestamp, }) - storeSetMetric(schemas.Metric{ + storeSetMetric(ctx, c.Store, schemas.Metric{ Kind: schemas.MetricKindJobDurationSeconds, Labels: labels, Value: job.DurationSeconds, }) + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindJobQueuedDurationSeconds, + Labels: labels, + Value: job.QueuedDurationSeconds, + }) + jobRunCount := schemas.Metric{ Kind: schemas.MetricKindJobRunCount, Labels: labels, @@ -127,11 +142,13 @@ func processJobMetrics(ref schemas.Ref, job schemas.Job) { // If the metric does not exist yet, start with 0 instead of 1 // this could cause some false positives in prometheus // when restarting the exporter otherwise - jobRunCountExists, err := store.MetricExists(jobRunCount.Key()) + jobRunCountExists, err := c.Store.MetricExists(ctx, jobRunCount.Key()) if err != nil { - log.WithFields( - projectRefLogFields, - ).WithField("error", err.Error()).Error("checking if metric exists in the store") + log.WithContext(ctx). + WithFields(projectRefLogFields). + WithError(err). + Error("checking if metric exists in the store") + return } @@ -141,24 +158,28 @@ func processJobMetrics(ref schemas.Ref, job schemas.Job) { jobTriggeredRegexp := regexp.MustCompile("^(skipped|manual|scheduled)$") lastJobTriggered := !jobTriggeredRegexp.MatchString(lastJob.Status) jobTriggered := !jobTriggeredRegexp.MatchString(job.Status) + if jobRunCountExists && ((lastJob.ID != job.ID && jobTriggered) || (lastJob.ID == job.ID && jobTriggered && !lastJobTriggered)) { - storeGetMetric(&jobRunCount) + storeGetMetric(ctx, c.Store, &jobRunCount) + jobRunCount.Value++ } - storeSetMetric(jobRunCount) + storeSetMetric(ctx, c.Store, jobRunCount) - storeSetMetric(schemas.Metric{ + storeSetMetric(ctx, c.Store, schemas.Metric{ Kind: schemas.MetricKindJobArtifactSizeBytes, Labels: labels, Value: job.ArtifactSize, }) emitStatusMetric( + ctx, + c.Store, schemas.MetricKindJobStatus, labels, statusesList[:], job.Status, - ref.OutputSparseStatusMetrics, + ref.Project.OutputSparseStatusMetrics, ) } diff --git a/pkg/exporter/jobs_test.go b/pkg/controller/jobs_test.go similarity index 60% rename from pkg/exporter/jobs_test.go rename to pkg/controller/jobs_test.go index 005d5ad7..545a8eca 100644 --- a/pkg/exporter/jobs_test.go +++ b/pkg/controller/jobs_test.go @@ -1,41 +1,40 @@ -package exporter +package controller import ( "fmt" "net/http" "testing" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) func TestPullRefPipelineJobsMetrics(t *testing.T) { - resetGlobalValues() - mux, server := configureMockedGitlabClient() - defer server.Close() + ctx, c, mux, srv := newTestController(config.Config{}) + defer srv.Close() mux.HandleFunc("/api/v4/projects/foo/pipelines/1/jobs", func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `[{"id":1,"created_at":"2016-08-11T11:28:34.085Z"},{"id":2,"created_at":"2016-08-11T11:28:34.085Z"}]`) + fmt.Fprint(w, `[{"id":1,"created_at":"2016-08-11T11:28:34.085Z","started_at":"2016-08-11T11:28:56.085Z"},{"id":2,"created_at":"2016-08-11T11:28:34.085Z","started_at":"2016-08-11T11:28:58.085Z"}]`) }) - ref := schemas.Ref{ - ProjectName: "foo", - Name: "bar", - LatestPipeline: schemas.Pipeline{ - ID: 1, - }, - } + p := schemas.NewProject("foo") + p.Pull.Pipeline.Jobs.FromChildPipelines.Enabled = false - assert.NoError(t, pullRefPipelineJobsMetrics(ref)) - server.Close() - assert.Error(t, pullRefPipelineJobsMetrics(ref)) + ref := schemas.NewRef(p, schemas.RefKindBranch, "bar") + ref.LatestPipeline.ID = 1 + + // TODO: assert the results? + assert.NoError(t, c.PullRefPipelineJobsMetrics(ctx, ref)) + srv.Close() + assert.Error(t, c.PullRefPipelineJobsMetrics(ctx, ref)) } func TestPullRefMostRecentJobsMetrics(t *testing.T) { - resetGlobalValues() - mux, server := configureMockedGitlabClient() - defer server.Close() + ctx, c, mux, srv := newTestController(config.Config{}) + defer srv.Close() mux.HandleFunc("/api/v4/projects/foo/jobs", func(w http.ResponseWriter, r *http.Request) { @@ -43,8 +42,8 @@ func TestPullRefMostRecentJobsMetrics(t *testing.T) { }) ref := schemas.Ref{ - ProjectName: "foo", - Name: "bar", + Project: schemas.NewProject("foo"), + Name: "bar", LatestJobs: schemas.Jobs{ "bar": { ID: 1, @@ -53,17 +52,18 @@ func TestPullRefMostRecentJobsMetrics(t *testing.T) { } // Test with FetchPipelineJobMetrics disabled - assert.NoError(t, pullRefMostRecentJobsMetrics(ref)) + assert.NoError(t, c.PullRefMostRecentJobsMetrics(ctx, ref)) // Enable FetchPipelineJobMetrics - ref.PullPipelineJobsEnabled = true - assert.NoError(t, pullRefMostRecentJobsMetrics(ref)) - server.Close() - assert.Error(t, pullRefMostRecentJobsMetrics(ref)) + ref.Project.Pull.Pipeline.Jobs.Enabled = true + assert.NoError(t, c.PullRefMostRecentJobsMetrics(ctx, ref)) + srv.Close() + assert.Error(t, c.PullRefMostRecentJobsMetrics(ctx, ref)) } func TestProcessJobMetrics(t *testing.T) { - resetGlobalValues() + ctx, c, _, srv := newTestController(config.Config{}) + srv.Close() oldJob := schemas.Job{ ID: 1, @@ -78,56 +78,54 @@ func TestProcessJobMetrics(t *testing.T) { DurationSeconds: 15, Status: "failed", Stage: "🚀", + TagList: "", ArtifactSize: 150, Runner: schemas.Runner{ Description: "foo-123-bar", }, } - ref := schemas.Ref{ - ProjectName: "foo/bar", - Topics: "first,second", - Kind: schemas.RefKindBranch, - Name: "foo", - LatestPipeline: schemas.Pipeline{ - ID: 1, - Variables: "none", - }, - LatestJobs: schemas.Jobs{ - "foo": oldJob, - }, - OutputSparseStatusMetrics: true, - PullPipelineJobsRunnerDescriptionEnabled: true, - PullPipelineJobsRunnerDescriptionAggregationRegexp: "foo-(.*)-bar", + p := schemas.NewProject("foo") + p.Topics = "first,second" + p.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexp = `foo-(.*)-bar` + + ref := schemas.NewRef(p, schemas.RefKindBranch, "foo") + ref.LatestPipeline.ID = 1 + ref.LatestPipeline.Variables = "none" + ref.LatestJobs = schemas.Jobs{ + "foo": oldJob, } - store.SetRef(ref) + c.Store.SetRef(ctx, ref) // If we run it against the same job, nothing should change in the store - processJobMetrics(ref, oldJob) - refs, _ := store.Refs() + c.ProcessJobMetrics(ctx, ref, oldJob) + refs, _ := c.Store.Refs(ctx) assert.Equal(t, schemas.Jobs{ "foo": oldJob, }, refs[ref.Key()].LatestJobs) // Update the ref - processJobMetrics(ref, newJob) - refs, _ = store.Refs() + c.ProcessJobMetrics(ctx, ref, newJob) + refs, _ = c.Store.Refs(ctx) assert.Equal(t, schemas.Jobs{ "foo": newJob, }, refs[ref.Key()].LatestJobs) // Check if all the metrics exist - metrics, _ := store.Metrics() + metrics, _ := c.Store.Metrics(ctx) labels := map[string]string{ - "project": ref.ProjectName, - "topics": ref.Topics, + "project": ref.Project.Name, + "topics": ref.Project.Topics, "ref": ref.Name, "kind": string(ref.Kind), "variables": ref.LatestPipeline.Variables, + "source": ref.LatestPipeline.Source, "stage": newJob.Stage, + "tag_list": newJob.TagList, + "failure_reason": newJob.FailureReason, "job_name": newJob.Name, - "runner_description": ref.PullPipelineJobsRunnerDescriptionAggregationRegexp, + "runner_description": ref.Project.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexp, } lastJobRunID := schemas.Metric{ diff --git a/pkg/controller/metadata.go b/pkg/controller/metadata.go new file mode 100644 index 00000000..f2485c2f --- /dev/null +++ b/pkg/controller/metadata.go @@ -0,0 +1,24 @@ +package controller + +import ( + "context" + + goGitlab "github.com/xanzy/go-gitlab" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/gitlab" +) + +func (c *Controller) GetGitLabMetadata(ctx context.Context) error { + options := []goGitlab.RequestOptionFunc{goGitlab.WithContext(ctx)} + + metadata, _, err := c.Gitlab.Metadata.GetMetadata(options...) + if err != nil { + return err + } + + if metadata.Version != "" { + c.Gitlab.UpdateVersion(gitlab.NewGitLabVersion(metadata.Version)) + } + + return nil +} diff --git a/pkg/controller/metadata_test.go b/pkg/controller/metadata_test.go new file mode 100644 index 00000000..5fe830f0 --- /dev/null +++ b/pkg/controller/metadata_test.go @@ -0,0 +1,61 @@ +package controller + +import ( + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/gitlab" +) + +func TestGetGitLabMetadataSuccess(t *testing.T) { + tests := []struct { + name string + data string + expectedVersion gitlab.GitLabVersion + }{ + { + name: "successful parse", + data: ` +{ +"version":"16.7.0-pre", +"revision":"3fe364fe754", +"kas":{ + "enabled":true, + "externalUrl":"wss://kas.gitlab.com", + "version":"v16.7.0-rc2" +}, +"enterprise":true +} +`, + expectedVersion: gitlab.NewGitLabVersion("v16.7.0-pre"), + }, + { + name: "unsuccessful parse", + data: ` +{ +"revision":"3fe364fe754" +} +`, + expectedVersion: gitlab.NewGitLabVersion(""), + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + ctx, c, mux, srv := newTestController(config.Config{}) + defer srv.Close() + + mux.HandleFunc("/api/v4/metadata", + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, tc.data) + }) + + assert.NoError(t, c.GetGitLabMetadata(ctx)) + assert.Equal(t, tc.expectedVersion, c.Gitlab.Version()) + }) + } +} diff --git a/pkg/controller/metrics.go b/pkg/controller/metrics.go new file mode 100644 index 00000000..a28ae040 --- /dev/null +++ b/pkg/controller/metrics.go @@ -0,0 +1,239 @@ +package controller + +import ( + "context" + "fmt" + "reflect" + + "github.com/prometheus/client_golang/prometheus" + log "github.com/sirupsen/logrus" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/gitlab" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/store" +) + +// Registry wraps a pointer of prometheus.Registry. +type Registry struct { + *prometheus.Registry + + InternalCollectors struct { + CurrentlyQueuedTasksCount prometheus.Collector + EnvironmentsCount prometheus.Collector + ExecutedTasksCount prometheus.Collector + GitLabAPIRequestsCount prometheus.Collector + GitlabAPIRequestsRemaining prometheus.Collector + GitlabAPIRequestsLimit prometheus.Collector + MetricsCount prometheus.Collector + ProjectsCount prometheus.Collector + RefsCount prometheus.Collector + } + + Collectors RegistryCollectors +} + +// RegistryCollectors .. +type RegistryCollectors map[schemas.MetricKind]prometheus.Collector + +// NewRegistry initialize a new registry. +func NewRegistry(ctx context.Context) *Registry { + r := &Registry{ + Registry: prometheus.NewRegistry(), + Collectors: RegistryCollectors{ + schemas.MetricKindCoverage: NewCollectorCoverage(), + schemas.MetricKindDurationSeconds: NewCollectorDurationSeconds(), + schemas.MetricKindEnvironmentBehindCommitsCount: NewCollectorEnvironmentBehindCommitsCount(), + schemas.MetricKindEnvironmentBehindDurationSeconds: NewCollectorEnvironmentBehindDurationSeconds(), + schemas.MetricKindEnvironmentDeploymentCount: NewCollectorEnvironmentDeploymentCount(), + schemas.MetricKindEnvironmentDeploymentDurationSeconds: NewCollectorEnvironmentDeploymentDurationSeconds(), + schemas.MetricKindEnvironmentDeploymentJobID: NewCollectorEnvironmentDeploymentJobID(), + schemas.MetricKindEnvironmentDeploymentStatus: NewCollectorEnvironmentDeploymentStatus(), + schemas.MetricKindEnvironmentDeploymentTimestamp: NewCollectorEnvironmentDeploymentTimestamp(), + schemas.MetricKindEnvironmentInformation: NewCollectorEnvironmentInformation(), + schemas.MetricKindID: NewCollectorID(), + schemas.MetricKindJobArtifactSizeBytes: NewCollectorJobArtifactSizeBytes(), + schemas.MetricKindJobDurationSeconds: NewCollectorJobDurationSeconds(), + schemas.MetricKindJobID: NewCollectorJobID(), + schemas.MetricKindJobQueuedDurationSeconds: NewCollectorJobQueuedDurationSeconds(), + schemas.MetricKindJobRunCount: NewCollectorJobRunCount(), + schemas.MetricKindJobStatus: NewCollectorJobStatus(), + schemas.MetricKindJobTimestamp: NewCollectorJobTimestamp(), + schemas.MetricKindQueuedDurationSeconds: NewCollectorQueuedDurationSeconds(), + schemas.MetricKindRunCount: NewCollectorRunCount(), + schemas.MetricKindStatus: NewCollectorStatus(), + schemas.MetricKindTimestamp: NewCollectorTimestamp(), + schemas.MetricKindTestReportTotalTime: NewCollectorTestReportTotalTime(), + schemas.MetricKindTestReportTotalCount: NewCollectorTestReportTotalCount(), + schemas.MetricKindTestReportSuccessCount: NewCollectorTestReportSuccessCount(), + schemas.MetricKindTestReportFailedCount: NewCollectorTestReportFailedCount(), + schemas.MetricKindTestReportSkippedCount: NewCollectorTestReportSkippedCount(), + schemas.MetricKindTestReportErrorCount: NewCollectorTestReportErrorCount(), + schemas.MetricKindTestSuiteTotalTime: NewCollectorTestSuiteTotalTime(), + schemas.MetricKindTestSuiteTotalCount: NewCollectorTestSuiteTotalCount(), + schemas.MetricKindTestSuiteSuccessCount: NewCollectorTestSuiteSuccessCount(), + schemas.MetricKindTestSuiteFailedCount: NewCollectorTestSuiteFailedCount(), + schemas.MetricKindTestSuiteSkippedCount: NewCollectorTestSuiteSkippedCount(), + schemas.MetricKindTestSuiteErrorCount: NewCollectorTestSuiteErrorCount(), + schemas.MetricKindTestCaseExecutionTime: NewCollectorTestCaseExecutionTime(), + schemas.MetricKindTestCaseStatus: NewCollectorTestCaseStatus(), + }, + } + + r.RegisterInternalCollectors() + + if err := r.RegisterCollectors(); err != nil { + log.WithContext(ctx). + Fatal(err) + } + + return r +} + +// RegisterInternalCollectors declare our internal collectors to the registry. +func (r *Registry) RegisterInternalCollectors() { + r.InternalCollectors.CurrentlyQueuedTasksCount = NewInternalCollectorCurrentlyQueuedTasksCount() + r.InternalCollectors.EnvironmentsCount = NewInternalCollectorEnvironmentsCount() + r.InternalCollectors.ExecutedTasksCount = NewInternalCollectorExecutedTasksCount() + r.InternalCollectors.GitLabAPIRequestsCount = NewInternalCollectorGitLabAPIRequestsCount() + r.InternalCollectors.GitlabAPIRequestsRemaining = NewInternalCollectorGitLabAPIRequestsRemaining() + r.InternalCollectors.GitlabAPIRequestsLimit = NewInternalCollectorGitLabAPIRequestsLimit() + r.InternalCollectors.MetricsCount = NewInternalCollectorMetricsCount() + r.InternalCollectors.ProjectsCount = NewInternalCollectorProjectsCount() + r.InternalCollectors.RefsCount = NewInternalCollectorRefsCount() + + _ = r.Register(r.InternalCollectors.CurrentlyQueuedTasksCount) + _ = r.Register(r.InternalCollectors.EnvironmentsCount) + _ = r.Register(r.InternalCollectors.ExecutedTasksCount) + _ = r.Register(r.InternalCollectors.GitLabAPIRequestsCount) + _ = r.Register(r.InternalCollectors.GitlabAPIRequestsRemaining) + _ = r.Register(r.InternalCollectors.GitlabAPIRequestsLimit) + _ = r.Register(r.InternalCollectors.MetricsCount) + _ = r.Register(r.InternalCollectors.ProjectsCount) + _ = r.Register(r.InternalCollectors.RefsCount) +} + +// ExportInternalMetrics .. +func (r *Registry) ExportInternalMetrics( + ctx context.Context, + g *gitlab.Client, + s store.Store, +) (err error) { + var ( + currentlyQueuedTasks uint64 + environmentsCount int64 + executedTasksCount uint64 + metricsCount int64 + projectsCount int64 + refsCount int64 + ) + + currentlyQueuedTasks, err = s.CurrentlyQueuedTasksCount(ctx) + if err != nil { + return + } + + executedTasksCount, err = s.ExecutedTasksCount(ctx) + if err != nil { + return + } + + projectsCount, err = s.ProjectsCount(ctx) + if err != nil { + return + } + + environmentsCount, err = s.EnvironmentsCount(ctx) + if err != nil { + return + } + + refsCount, err = s.RefsCount(ctx) + if err != nil { + return + } + + metricsCount, err = s.MetricsCount(ctx) + if err != nil { + return + } + + r.InternalCollectors.CurrentlyQueuedTasksCount.(*prometheus.GaugeVec).With(prometheus.Labels{}).Set(float64(currentlyQueuedTasks)) + r.InternalCollectors.EnvironmentsCount.(*prometheus.GaugeVec).With(prometheus.Labels{}).Set(float64(environmentsCount)) + r.InternalCollectors.ExecutedTasksCount.(*prometheus.GaugeVec).With(prometheus.Labels{}).Set(float64(executedTasksCount)) + r.InternalCollectors.GitLabAPIRequestsCount.(*prometheus.GaugeVec).With(prometheus.Labels{}).Set(float64(g.RequestsCounter.Load())) + r.InternalCollectors.GitlabAPIRequestsRemaining.(*prometheus.GaugeVec).With(prometheus.Labels{}).Set(float64(g.RequestsRemaining)) + r.InternalCollectors.GitlabAPIRequestsLimit.(*prometheus.GaugeVec).With(prometheus.Labels{}).Set(float64(g.RequestsLimit)) + r.InternalCollectors.MetricsCount.(*prometheus.GaugeVec).With(prometheus.Labels{}).Set(float64(metricsCount)) + r.InternalCollectors.ProjectsCount.(*prometheus.GaugeVec).With(prometheus.Labels{}).Set(float64(projectsCount)) + r.InternalCollectors.RefsCount.(*prometheus.GaugeVec).With(prometheus.Labels{}).Set(float64(refsCount)) + + return +} + +// RegisterCollectors add all our metrics to the registry. +func (r *Registry) RegisterCollectors() error { + for _, c := range r.Collectors { + if err := r.Register(c); err != nil { + return fmt.Errorf("could not add provided collector '%v' to the Prometheus registry: %v", c, err) + } + } + + return nil +} + +// GetCollector .. +func (r *Registry) GetCollector(kind schemas.MetricKind) prometheus.Collector { + return r.Collectors[kind] +} + +// ExportMetrics .. +func (r *Registry) ExportMetrics(metrics schemas.Metrics) { + for _, m := range metrics { + switch c := r.GetCollector(m.Kind).(type) { + case *prometheus.GaugeVec: + c.With(m.Labels).Set(m.Value) + case *prometheus.CounterVec: + c.With(m.Labels).Add(m.Value) + default: + log.Errorf("unsupported collector type : %v", reflect.TypeOf(c)) + } + } +} + +func emitStatusMetric(ctx context.Context, s store.Store, metricKind schemas.MetricKind, labelValues map[string]string, statuses []string, status string, sparseMetrics bool) { + // Moved into separate function to reduce cyclomatic complexity + // List of available statuses from the API spec + // ref: https://docs.gitlab.com/ee/api/jobs.html#list-pipeline-jobs + for _, currentStatus := range statuses { + var ( + value float64 + statusLabels = make(map[string]string) + ) + + for k, v := range labelValues { + statusLabels[k] = v + } + + statusLabels["status"] = currentStatus + + statusMetric := schemas.Metric{ + Kind: metricKind, + Labels: statusLabels, + Value: value, + } + + if currentStatus == status { + statusMetric.Value = 1 + } else { + if sparseMetrics { + storeDelMetric(ctx, s, statusMetric) + + continue + } + + statusMetric.Value = 0 + } + + storeSetMetric(ctx, s, statusMetric) + } +} diff --git a/pkg/exporter/metrics_test.go b/pkg/controller/metrics_test.go similarity index 67% rename from pkg/exporter/metrics_test.go rename to pkg/controller/metrics_test.go index 5058543e..1e686534 100644 --- a/pkg/exporter/metrics_test.go +++ b/pkg/controller/metrics_test.go @@ -1,67 +1,77 @@ -package exporter +package controller import ( + "context" "net/http" "net/http/httptest" "testing" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) func TestNewRegistry(t *testing.T) { - r := NewRegistry() + r := NewRegistry(context.Background()) assert.NotNil(t, r.Registry) assert.NotNil(t, r.Collectors) } -// introduce a test to check the /metrics endpoint body +// introduce a test to check the /metrics endpoint body. func TestMetricsHandler(t *testing.T) { - resetGlobalValues() + _, c, _, srv := newTestController(config.Config{}) + srv.Close() w := httptest.NewRecorder() r := httptest.NewRequest(http.MethodGet, "/", nil) - MetricsHandler(w, r) - assert.Equal(t, http.StatusOK, w.Result().StatusCode) + c.MetricsHandler(w, r) + // TODO: Find a way to see if expected metrics are present + assert.Equal(t, http.StatusOK, w.Result().StatusCode) } func TestRegistryGetCollector(t *testing.T) { - r := NewRegistry() + r := NewRegistry(context.Background()) assert.Equal(t, r.Collectors[schemas.MetricKindCoverage], r.GetCollector(schemas.MetricKindCoverage)) assert.Nil(t, r.GetCollector(150)) } -func TestExportMetrics(t *testing.T) { - resetGlobalValues() +func TestExportMetrics(_ *testing.T) { + r := NewRegistry(context.Background()) - r := NewRegistry() - - store.SetMetric(schemas.Metric{ + m1 := schemas.Metric{ Kind: schemas.MetricKindCoverage, Labels: prometheus.Labels{ "project": "foo", "topics": "alpha", "ref": "bar", "kind": "branch", + "source": "schedule", "variables": "beta", }, Value: float64(107.7), - }) + } - store.SetMetric(schemas.Metric{ + m2 := schemas.Metric{ Kind: schemas.MetricKindRunCount, Labels: prometheus.Labels{ "project": "foo", "topics": "alpha", "ref": "bar", "kind": "branch", + "source": "schedule", "variables": "beta", }, Value: float64(10), - }) + } + + metrics := schemas.Metrics{ + m1.Key(): m1, + m2.Key(): m2, + } - assert.NoError(t, r.ExportMetrics()) // TODO: Assert that we have the correct metrics being rendered by the exporter + r.ExportMetrics(metrics) } diff --git a/pkg/controller/pipelines.go b/pkg/controller/pipelines.go new file mode 100644 index 00000000..81734343 --- /dev/null +++ b/pkg/controller/pipelines.go @@ -0,0 +1,335 @@ +package controller + +import ( + "context" + "fmt" + "reflect" + + log "github.com/sirupsen/logrus" + goGitlab "github.com/xanzy/go-gitlab" + "golang.org/x/exp/slices" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +// PullRefMetrics .. +func (c *Controller) PullRefMetrics(ctx context.Context, ref schemas.Ref) error { + finishedStatusesList := []string{ + "success", + "failed", + "skipped", + "cancelled", + } + + // At scale, the scheduled ref may be behind the actual state being stored + // to avoid issues, we refresh it from the store before manipulating it + if err := c.Store.GetRef(ctx, &ref); err != nil { + return err + } + + logFields := log.Fields{ + "project-name": ref.Project.Name, + "ref": ref.Name, + "ref-kind": ref.Kind, + } + + // We need a different syntax if the ref is a merge-request + var refName string + if ref.Kind == schemas.RefKindMergeRequest { + refName = fmt.Sprintf("refs/merge-requests/%s/head", ref.Name) + } else { + refName = ref.Name + } + + pipelines, _, err := c.Gitlab.GetProjectPipelines(ctx, ref.Project.Name, &goGitlab.ListProjectPipelinesOptions{ + // We only need the most recent pipeline + ListOptions: goGitlab.ListOptions{ + PerPage: 1, + Page: 1, + }, + Ref: &refName, + }) + if err != nil { + return fmt.Errorf("error fetching project pipelines for %s: %v", ref.Project.Name, err) + } + + if len(pipelines) == 0 { + log.WithFields(logFields).Debug("could not find any pipeline for the ref") + + return nil + } + + pipeline, err := c.Gitlab.GetRefPipeline(ctx, ref, pipelines[0].ID) + if err != nil { + return err + } + + if ref.LatestPipeline.ID == 0 || !reflect.DeepEqual(pipeline, ref.LatestPipeline) { + formerPipeline := ref.LatestPipeline + ref.LatestPipeline = pipeline + + // fetch pipeline variables + if ref.Project.Pull.Pipeline.Variables.Enabled { + ref.LatestPipeline.Variables, err = c.Gitlab.GetRefPipelineVariablesAsConcatenatedString(ctx, ref) + if err != nil { + return err + } + } + + // Update the ref in the store + if err = c.Store.SetRef(ctx, ref); err != nil { + return err + } + + labels := ref.DefaultLabelsValues() + + // If the metric does not exist yet, start with 0 instead of 1 + // this could cause some false positives in prometheus + // when restarting the exporter otherwise + runCount := schemas.Metric{ + Kind: schemas.MetricKindRunCount, + Labels: labels, + } + + storeGetMetric(ctx, c.Store, &runCount) + + if formerPipeline.ID != 0 && formerPipeline.ID != ref.LatestPipeline.ID { + runCount.Value++ + } + + storeSetMetric(ctx, c.Store, runCount) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindCoverage, + Labels: labels, + Value: pipeline.Coverage, + }) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindID, + Labels: labels, + Value: float64(pipeline.ID), + }) + + emitStatusMetric( + ctx, + c.Store, + schemas.MetricKindStatus, + labels, + statusesList[:], + pipeline.Status, + ref.Project.OutputSparseStatusMetrics, + ) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindDurationSeconds, + Labels: labels, + Value: pipeline.DurationSeconds, + }) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindQueuedDurationSeconds, + Labels: labels, + Value: pipeline.QueuedDurationSeconds, + }) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindTimestamp, + Labels: labels, + Value: pipeline.Timestamp, + }) + + if ref.Project.Pull.Pipeline.Jobs.Enabled { + if err := c.PullRefPipelineJobsMetrics(ctx, ref); err != nil { + return err + } + } + } else { + if err := c.PullRefMostRecentJobsMetrics(ctx, ref); err != nil { + return err + } + } + + // fetch pipeline test report + if ref.Project.Pull.Pipeline.TestReports.Enabled && slices.Contains(finishedStatusesList, ref.LatestPipeline.Status) { + ref.LatestPipeline.TestReport, err = c.Gitlab.GetRefPipelineTestReport(ctx, ref) + if err != nil { + return err + } + + c.ProcessTestReportMetrics(ctx, ref, ref.LatestPipeline.TestReport) + + for _, ts := range ref.LatestPipeline.TestReport.TestSuites { + c.ProcessTestSuiteMetrics(ctx, ref, ts) + // fetch pipeline test cases + if ref.Project.Pull.Pipeline.TestReports.TestCases.Enabled { + for _, tc := range ts.TestCases { + c.ProcessTestCaseMetrics(ctx, ref, ts, tc) + } + } + } + } + + return nil +} + +// ProcessTestReportMetrics .. +func (c *Controller) ProcessTestReportMetrics(ctx context.Context, ref schemas.Ref, tr schemas.TestReport) { + testReportLogFields := log.Fields{ + "project-name": ref.Project.Name, + "ref": ref.Name, + } + + labels := ref.DefaultLabelsValues() + + // Refresh ref state from the store + if err := c.Store.GetRef(ctx, &ref); err != nil { + log.WithContext(ctx). + WithFields(testReportLogFields). + WithError(err). + Error("getting ref from the store") + + return + } + + log.WithFields(testReportLogFields).Trace("processing test report metrics") + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindTestReportErrorCount, + Labels: labels, + Value: float64(tr.ErrorCount), + }) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindTestReportFailedCount, + Labels: labels, + Value: float64(tr.FailedCount), + }) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindTestReportSkippedCount, + Labels: labels, + Value: float64(tr.SkippedCount), + }) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindTestReportSuccessCount, + Labels: labels, + Value: float64(tr.SuccessCount), + }) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindTestReportTotalCount, + Labels: labels, + Value: float64(tr.TotalCount), + }) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindTestReportTotalTime, + Labels: labels, + Value: float64(tr.TotalTime), + }) +} + +// ProcessTestSuiteMetrics .. +func (c *Controller) ProcessTestSuiteMetrics(ctx context.Context, ref schemas.Ref, ts schemas.TestSuite) { + testSuiteLogFields := log.Fields{ + "project-name": ref.Project.Name, + "ref": ref.Name, + "test-suite-name": ts.Name, + } + + labels := ref.DefaultLabelsValues() + labels["test_suite_name"] = ts.Name + + // Refresh ref state from the store + if err := c.Store.GetRef(ctx, &ref); err != nil { + log.WithContext(ctx). + WithFields(testSuiteLogFields). + WithError(err). + Error("getting ref from the store") + + return + } + + log.WithFields(testSuiteLogFields).Trace("processing test suite metrics") + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindTestSuiteErrorCount, + Labels: labels, + Value: float64(ts.ErrorCount), + }) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindTestSuiteFailedCount, + Labels: labels, + Value: float64(ts.FailedCount), + }) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindTestSuiteSkippedCount, + Labels: labels, + Value: float64(ts.SkippedCount), + }) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindTestSuiteSuccessCount, + Labels: labels, + Value: float64(ts.SuccessCount), + }) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindTestSuiteTotalCount, + Labels: labels, + Value: float64(ts.TotalCount), + }) + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindTestSuiteTotalTime, + Labels: labels, + Value: ts.TotalTime, + }) +} + +func (c *Controller) ProcessTestCaseMetrics(ctx context.Context, ref schemas.Ref, ts schemas.TestSuite, tc schemas.TestCase) { + testCaseLogFields := log.Fields{ + "project-name": ref.Project.Name, + "ref": ref.Name, + "test-suite-name": ts.Name, + "test-case-name": tc.Name, + "test-case-status": tc.Status, + } + + labels := ref.DefaultLabelsValues() + labels["test_suite_name"] = ts.Name + labels["test_case_name"] = tc.Name + labels["test_case_classname"] = tc.Classname + + // Get the existing ref from the store + if err := c.Store.GetRef(ctx, &ref); err != nil { + log.WithContext(ctx). + WithFields(testCaseLogFields). + WithError(err). + Error("getting ref from the store") + + return + } + + log.WithFields(testCaseLogFields).Trace("processing test case metrics") + + storeSetMetric(ctx, c.Store, schemas.Metric{ + Kind: schemas.MetricKindTestCaseExecutionTime, + Labels: labels, + Value: tc.ExecutionTime, + }) + + emitStatusMetric( + ctx, + c.Store, + schemas.MetricKindTestCaseStatus, + labels, + statusesList[:], + tc.Status, + ref.Project.OutputSparseStatusMetrics, + ) +} diff --git a/pkg/controller/pipelines_test.go b/pkg/controller/pipelines_test.go new file mode 100644 index 00000000..a2d644c6 --- /dev/null +++ b/pkg/controller/pipelines_test.go @@ -0,0 +1,294 @@ +package controller + +import ( + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +func TestPullRefMetricsSucceed(t *testing.T) { + ctx, c, mux, srv := newTestController(config.Config{}) + defer srv.Close() + + mux.HandleFunc("/api/v4/projects/foo/pipelines", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "bar", r.URL.Query().Get("ref")) + fmt.Fprint(w, `[{"id":1}]`) + }) + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1", + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `{"id":1,"created_at":"2016-08-11T11:27:00.085Z", "started_at":"2016-08-11T11:28:00.085Z", + "duration":300,"queued_duration":60,"status":"running","coverage":"30.2","source":"schedule"}`) + }) + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1/variables", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `[{"key":"foo","value":"bar"}]`) + }) + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1/test_report", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `{"total_time": 5, "total_count": 1, "success_count": 1, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_suites": [{"name": "Secure", "total_time": 5, "total_count": 1, "success_count": 1, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_cases": [{"status": "success", "name": "Security Reports can create an auto-remediation MR", "classname": "vulnerability_management_spec", "execution_time": 5, "system_output": null, "stack_trace": null}]}]}`) + }) + + // Metrics pull shall succeed + p := schemas.NewProject("foo") + p.Pull.Pipeline.Variables.Enabled = true + p.Pull.Pipeline.TestReports.Enabled = true + p.Pull.Pipeline.TestReports.TestCases.Enabled = true + + assert.NoError(t, c.PullRefMetrics( + ctx, + schemas.NewRef( + p, + schemas.RefKindBranch, + "bar", + ))) + + // Check if all the metrics exist + metrics, _ := c.Store.Metrics(ctx) + labels := map[string]string{ + "kind": string(schemas.RefKindBranch), + "project": "foo", + "ref": "bar", + "topics": "", + "variables": "foo:bar", + "source": "schedule", + } + + runCount := schemas.Metric{ + Kind: schemas.MetricKindRunCount, + Labels: labels, + Value: 0, + } + assert.Equal(t, runCount, metrics[runCount.Key()]) + + coverage := schemas.Metric{ + Kind: schemas.MetricKindCoverage, + Labels: labels, + Value: 30.2, + } + assert.Equal(t, coverage, metrics[coverage.Key()]) + + runID := schemas.Metric{ + Kind: schemas.MetricKindID, + Labels: labels, + Value: 1, + } + assert.Equal(t, runID, metrics[runID.Key()]) + + queued := schemas.Metric{ + Kind: schemas.MetricKindQueuedDurationSeconds, + Labels: labels, + Value: 60, + } + assert.Equal(t, queued, metrics[queued.Key()]) + + labels["status"] = "running" + status := schemas.Metric{ + Kind: schemas.MetricKindStatus, + Labels: labels, + Value: 1, + } + assert.Equal(t, status, metrics[status.Key()]) +} + +func TestPullRefTestReportMetrics(t *testing.T) { + ctx, c, mux, srv := newTestController(config.Config{}) + defer srv.Close() + + mux.HandleFunc("/api/v4/projects/foo/pipelines", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "bar", r.URL.Query().Get("ref")) + fmt.Fprint(w, `[{"id":1}]`) + }) + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1", + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `{"id":1,"created_at":"2016-08-11T11:27:00.085Z", "started_at":"2016-08-11T11:28:00.085Z", + "duration":300,"queued_duration":60,"status":"success","coverage":"30.2","source":"schedule"}`) + }) + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1/variables", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `[{"key":"foo","value":"bar"}]`) + }) + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1/test_report", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `{"total_time": 5, "total_count": 1, "success_count": 1, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_suites": [{"name": "Secure", "total_time": 5, "total_count": 1, "success_count": 1, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_cases": [{"status": "success", "name": "Security Reports can create an auto-remediation MR", "classname": "vulnerability_management_spec", "execution_time": 5, "system_output": null, "stack_trace": null}]}]}`) + }) + + // Metrics pull shall succeed + p := schemas.NewProject("foo") + p.Pull.Pipeline.Variables.Enabled = true + p.Pull.Pipeline.TestReports.Enabled = true + p.Pull.Pipeline.TestReports.TestCases.Enabled = true + + assert.NoError(t, c.PullRefMetrics( + ctx, + schemas.NewRef( + p, + schemas.RefKindBranch, + "bar", + ))) + + // Check if all the metrics exist + metrics, _ := c.Store.Metrics(ctx) + labels := map[string]string{ + "kind": string(schemas.RefKindBranch), + "project": "foo", + "ref": "bar", + "topics": "", + "variables": "foo:bar", + "source": "schedule", + } + + trTotalTime := schemas.Metric{ + Kind: schemas.MetricKindTestReportTotalTime, + Labels: labels, + Value: 5, + } + assert.Equal(t, trTotalTime, metrics[trTotalTime.Key()]) + + trTotalCount := schemas.Metric{ + Kind: schemas.MetricKindTestReportTotalCount, + Labels: labels, + Value: 1, + } + assert.Equal(t, trTotalCount, metrics[trTotalCount.Key()]) + + trSuccessCount := schemas.Metric{ + Kind: schemas.MetricKindTestReportSuccessCount, + Labels: labels, + Value: 1, + } + assert.Equal(t, trSuccessCount, metrics[trSuccessCount.Key()]) + + trFailedCount := schemas.Metric{ + Kind: schemas.MetricKindTestReportFailedCount, + Labels: labels, + Value: 0, + } + assert.Equal(t, trFailedCount, metrics[trFailedCount.Key()]) + + trSkippedCount := schemas.Metric{ + Kind: schemas.MetricKindTestReportSkippedCount, + Labels: labels, + Value: 0, + } + assert.Equal(t, trSkippedCount, metrics[trSkippedCount.Key()]) + + trErrorCount := schemas.Metric{ + Kind: schemas.MetricKindTestReportErrorCount, + Labels: labels, + Value: 0, + } + assert.Equal(t, trErrorCount, metrics[trErrorCount.Key()]) + + labels["test_suite_name"] = "Secure" + + tsTotalTime := schemas.Metric{ + Kind: schemas.MetricKindTestSuiteTotalTime, + Labels: labels, + Value: 5, + } + assert.Equal(t, tsTotalTime, metrics[tsTotalTime.Key()]) + + tsTotalCount := schemas.Metric{ + Kind: schemas.MetricKindTestSuiteTotalCount, + Labels: labels, + Value: 1, + } + assert.Equal(t, tsTotalCount, metrics[tsTotalCount.Key()]) + + tsSuccessCount := schemas.Metric{ + Kind: schemas.MetricKindTestSuiteSuccessCount, + Labels: labels, + Value: 1, + } + assert.Equal(t, tsSuccessCount, metrics[tsSuccessCount.Key()]) + + tsFailedCount := schemas.Metric{ + Kind: schemas.MetricKindTestSuiteFailedCount, + Labels: labels, + Value: 0, + } + assert.Equal(t, tsFailedCount, metrics[tsFailedCount.Key()]) + + tsSkippedCount := schemas.Metric{ + Kind: schemas.MetricKindTestSuiteSkippedCount, + Labels: labels, + Value: 0, + } + assert.Equal(t, tsSkippedCount, metrics[tsSkippedCount.Key()]) + + tsErrorCount := schemas.Metric{ + Kind: schemas.MetricKindTestSuiteErrorCount, + Labels: labels, + Value: 0, + } + assert.Equal(t, tsErrorCount, metrics[tsErrorCount.Key()]) + + labels["test_case_name"] = "Security Reports can create an auto-remediation MR" + labels["test_case_classname"] = "vulnerability_management_spec" + + tcExecutionTime := schemas.Metric{ + Kind: schemas.MetricKindTestCaseExecutionTime, + Labels: labels, + Value: 5, + } + assert.Equal(t, tcExecutionTime, metrics[tcExecutionTime.Key()]) + + labels["status"] = "success" + tcStatus := schemas.Metric{ + Kind: schemas.MetricKindTestCaseStatus, + Labels: labels, + Value: 1, + } + assert.Equal(t, tcStatus, metrics[tcStatus.Key()]) +} + +func TestPullRefMetricsMergeRequestPipeline(t *testing.T) { + ctx, c, mux, srv := newTestController(config.Config{}) + defer srv.Close() + + mux.HandleFunc("/api/v4/projects/foo/pipelines", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "refs/merge-requests/1234/head", r.URL.Query().Get("ref")) + fmt.Fprint(w, `[{"id":1}]`) + }) + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1", + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `{"id":1,"updated_at":"2016-08-11T11:28:34.085Z","duration":300,"status":"running","coverage":"30.2","source":"schedule"}`) + }) + + mux.HandleFunc(fmt.Sprintf("/api/v4/projects/foo/pipelines/1/variables"), + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `[{"key":"foo","value":"bar"}]`) + }) + + // Metrics pull shall succeed + p := schemas.NewProject("foo") + p.Pull.Pipeline.Variables.Enabled = true + + assert.NoError(t, c.PullRefMetrics( + ctx, + schemas.NewRef( + p, + schemas.RefKindMergeRequest, + "1234", + ))) +} diff --git a/pkg/controller/projects.go b/pkg/controller/projects.go new file mode 100644 index 00000000..ff14fed3 --- /dev/null +++ b/pkg/controller/projects.go @@ -0,0 +1,80 @@ +package controller + +import ( + "context" + + log "github.com/sirupsen/logrus" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +// PullProject .. +func (c *Controller) PullProject(ctx context.Context, name string, pull config.ProjectPull) error { + gp, err := c.Gitlab.GetProject(ctx, name) + if err != nil { + return err + } + + p := schemas.NewProject(gp.PathWithNamespace) + p.Pull = pull + + projectExists, err := c.Store.ProjectExists(ctx, p.Key()) + if err != nil { + return err + } + + if !projectExists { + log.WithFields(log.Fields{ + "project-name": p.Name, + }).Info("discovered new project") + + if err := c.Store.SetProject(ctx, p); err != nil { + log.WithContext(ctx). + WithError(err). + Error() + } + + c.ScheduleTask(ctx, schemas.TaskTypePullRefsFromProject, string(p.Key()), p) + c.ScheduleTask(ctx, schemas.TaskTypePullEnvironmentsFromProject, string(p.Key()), p) + } + + return nil +} + +// PullProjectsFromWildcard .. +func (c *Controller) PullProjectsFromWildcard(ctx context.Context, w config.Wildcard) error { + foundProjects, err := c.Gitlab.ListProjects(ctx, w) + if err != nil { + return err + } + + for _, p := range foundProjects { + projectExists, err := c.Store.ProjectExists(ctx, p.Key()) + if err != nil { + return err + } + + if !projectExists { + log.WithFields(log.Fields{ + "wildcard-search": w.Search, + "wildcard-owner-kind": w.Owner.Kind, + "wildcard-owner-name": w.Owner.Name, + "wildcard-owner-include-subgroups": w.Owner.IncludeSubgroups, + "wildcard-archived": w.Archived, + "project-name": p.Name, + }).Info("discovered new project") + + if err := c.Store.SetProject(ctx, p); err != nil { + log.WithContext(ctx). + WithError(err). + Error() + } + + c.ScheduleTask(ctx, schemas.TaskTypePullRefsFromProject, string(p.Key()), p) + c.ScheduleTask(ctx, schemas.TaskTypePullEnvironmentsFromProject, string(p.Key()), p) + } + } + + return nil +} diff --git a/pkg/controller/projects_test.go b/pkg/controller/projects_test.go new file mode 100644 index 00000000..52522ac4 --- /dev/null +++ b/pkg/controller/projects_test.go @@ -0,0 +1,33 @@ +package controller + +import ( + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +func TestPullProjectsFromWildcard(t *testing.T) { + ctx, c, mux, srv := newTestController(config.Config{}) + defer srv.Close() + + mux.HandleFunc("/api/v4/projects", + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `[{"id":2,"path_with_namespace":"bar","jobs_enabled":true}]`) + }) + + w := config.NewWildcard() + assert.NoError(t, c.PullProjectsFromWildcard(ctx, w)) + + projects, _ := c.Store.Projects(ctx) + p1 := schemas.NewProject("bar") + + expectedProjects := schemas.Projects{ + p1.Key(): p1, + } + assert.Equal(t, expectedProjects, projects) +} diff --git a/pkg/controller/refs.go b/pkg/controller/refs.go new file mode 100644 index 00000000..9b57d04b --- /dev/null +++ b/pkg/controller/refs.go @@ -0,0 +1,107 @@ +package controller + +import ( + "context" + + "dario.cat/mergo" + log "github.com/sirupsen/logrus" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +// GetRefs .. +func (c *Controller) GetRefs(ctx context.Context, p schemas.Project) ( + refs schemas.Refs, + err error, +) { + var pulledRefs schemas.Refs + + refs = make(schemas.Refs) + + if p.Pull.Refs.Branches.Enabled { + // If one of these parameter is set, we will need to fetch the branches from the + // pipelines API instead of the branches one + if !p.Pull.Refs.Branches.ExcludeDeleted || + p.Pull.Refs.Branches.MostRecent > 0 || + p.Pull.Refs.Branches.MaxAgeSeconds > 0 { + if pulledRefs, err = c.Gitlab.GetRefsFromPipelines(ctx, p, schemas.RefKindBranch); err != nil { + return + } + } else { + if pulledRefs, err = c.Gitlab.GetProjectBranches(ctx, p); err != nil { + return + } + } + + if err = mergo.Merge(&refs, pulledRefs); err != nil { + return + } + } + + if p.Pull.Refs.Tags.Enabled { + // If one of these parameter is set, we will need to fetch the tags from the + // pipelines API instead of the tags one + if !p.Pull.Refs.Tags.ExcludeDeleted || + p.Pull.Refs.Tags.MostRecent > 0 || + p.Pull.Refs.Tags.MaxAgeSeconds > 0 { + if pulledRefs, err = c.Gitlab.GetRefsFromPipelines(ctx, p, schemas.RefKindTag); err != nil { + return + } + } else { + if pulledRefs, err = c.Gitlab.GetProjectTags(ctx, p); err != nil { + return + } + } + + if err = mergo.Merge(&refs, pulledRefs); err != nil { + return + } + } + + if p.Pull.Refs.MergeRequests.Enabled { + if pulledRefs, err = c.Gitlab.GetRefsFromPipelines( + ctx, + p, + schemas.RefKindMergeRequest, + ); err != nil { + return + } + + if err = mergo.Merge(&refs, pulledRefs); err != nil { + return + } + } + + return +} + +// PullRefsFromProject .. +func (c *Controller) PullRefsFromProject(ctx context.Context, p schemas.Project) error { + refs, err := c.GetRefs(ctx, p) + if err != nil { + return err + } + + for _, ref := range refs { + refExists, err := c.Store.RefExists(ctx, ref.Key()) + if err != nil { + return err + } + + if !refExists { + log.WithFields(log.Fields{ + "project-name": ref.Project.Name, + "ref": ref.Name, + "ref-kind": ref.Kind, + }).Info("discovered new ref") + + if err = c.Store.SetRef(ctx, ref); err != nil { + return err + } + + c.ScheduleTask(ctx, schemas.TaskTypePullRefMetrics, string(ref.Key()), ref) + } + } + + return nil +} diff --git a/pkg/controller/refs_test.go b/pkg/controller/refs_test.go new file mode 100644 index 00000000..34879306 --- /dev/null +++ b/pkg/controller/refs_test.go @@ -0,0 +1,81 @@ +package controller + +import ( + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +func TestGetRefs(t *testing.T) { + ctx, c, mux, srv := newTestController(config.Config{}) + defer srv.Close() + + mux.HandleFunc("/api/v4/projects/foo/repository/branches", + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `[{"name":"dev"},{"name":"main"}]`) + }) + + mux.HandleFunc("/api/v4/projects/foo/repository/tags", + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `[{"name":"0.0.1"},{"name":"v0.0.2"}]`) + }) + + mux.HandleFunc("/api/v4/projects/foo/pipelines", + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `[{"ref":"refs/merge-requests/1234/head"}]`) + }) + + p := schemas.NewProject("foo") + p.Pull.Refs.Branches.Regexp = `^m` + p.Pull.Refs.Tags.Regexp = `^v` + p.Pull.Refs.MergeRequests.Enabled = true + + foundRefs, err := c.GetRefs(ctx, p) + assert.NoError(t, err) + + ref1 := schemas.NewRef(p, schemas.RefKindBranch, "main") + ref2 := schemas.NewRef(p, schemas.RefKindTag, "v0.0.2") + ref3 := schemas.NewRef(p, schemas.RefKindMergeRequest, "1234") + expectedRefs := schemas.Refs{ + ref1.Key(): ref1, + ref2.Key(): ref2, + ref3.Key(): ref3, + } + assert.Equal(t, expectedRefs, foundRefs) +} + +func TestPullRefsFromProject(t *testing.T) { + ctx, c, mux, srv := newTestController(config.Config{}) + defer srv.Close() + + mux.HandleFunc("/api/v4/projects/foo", + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `{"name":"foo"}`) + }) + + mux.HandleFunc(fmt.Sprintf("/api/v4/projects/foo/repository/branches"), + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `[{"name":"main"},{"name":"nope"}]`) + }) + + mux.HandleFunc(fmt.Sprintf("/api/v4/projects/foo/repository/tags"), + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `[]`) + }) + + p1 := schemas.NewProject("foo") + assert.NoError(t, c.PullRefsFromProject(ctx, p1)) + + ref1 := schemas.NewRef(p1, schemas.RefKindBranch, "main") + expectedRefs := schemas.Refs{ + ref1.Key(): ref1, + } + + projectsRefs, _ := c.Store.Refs(ctx) + assert.Equal(t, expectedRefs, projectsRefs) +} diff --git a/pkg/controller/scheduler.go b/pkg/controller/scheduler.go new file mode 100644 index 00000000..ac6a0be7 --- /dev/null +++ b/pkg/controller/scheduler.go @@ -0,0 +1,482 @@ +package controller + +import ( + "context" + "strconv" + "time" + + "github.com/redis/go-redis/v9" + log "github.com/sirupsen/logrus" + "github.com/vmihailenco/taskq/memqueue/v4" + "github.com/vmihailenco/taskq/redisq/v4" + "github.com/vmihailenco/taskq/v4" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/monitor" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/store" +) + +// TaskController holds task related clients. +type TaskController struct { + Factory taskq.Factory + Queue taskq.Queue + TaskMap *taskq.TaskMap + TaskSchedulingMonitoring map[schemas.TaskType]*monitor.TaskSchedulingStatus +} + +// NewTaskController initializes and returns a new TaskController object. +func NewTaskController(ctx context.Context, r *redis.Client, maximumJobsQueueSize int) (t TaskController) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "controller:NewTaskController") + defer span.End() + + t.TaskMap = &taskq.TaskMap{} + + queueOptions := &taskq.QueueConfig{ + Name: "default", + PauseErrorsThreshold: 3, + Handler: t.TaskMap, + BufferSize: maximumJobsQueueSize, + } + + if r != nil { + t.Factory = redisq.NewFactory() + queueOptions.Redis = r + } else { + t.Factory = memqueue.NewFactory() + } + + t.Queue = t.Factory.RegisterQueue(queueOptions) + + // Purge the queue when we start + // I am only partially convinced this will not cause issues in HA fashion + if err := t.Queue.Purge(ctx); err != nil { + log.WithContext(ctx). + WithError(err). + Error("purging the pulling queue") + } + + if r != nil { + if err := t.Factory.StartConsumers(context.TODO()); err != nil { + log.WithContext(ctx). + WithError(err). + Fatal("starting consuming the task queue") + } + } + + t.TaskSchedulingMonitoring = make(map[schemas.TaskType]*monitor.TaskSchedulingStatus) + + return +} + +// TaskHandlerPullProject .. +func (c *Controller) TaskHandlerPullProject(ctx context.Context, name string, pull config.ProjectPull) error { + defer c.unqueueTask(ctx, schemas.TaskTypePullProject, name) + + return c.PullProject(ctx, name, pull) +} + +// TaskHandlerPullProjectsFromWildcard .. +func (c *Controller) TaskHandlerPullProjectsFromWildcard(ctx context.Context, id string, w config.Wildcard) error { + defer c.unqueueTask(ctx, schemas.TaskTypePullProjectsFromWildcard, id) + + return c.PullProjectsFromWildcard(ctx, w) +} + +// TaskHandlerPullEnvironmentsFromProject .. +func (c *Controller) TaskHandlerPullEnvironmentsFromProject(ctx context.Context, p schemas.Project) { + defer c.unqueueTask(ctx, schemas.TaskTypePullEnvironmentsFromProject, string(p.Key())) + + // On errors, we do not want to retry these tasks + if p.Pull.Environments.Enabled { + if err := c.PullEnvironmentsFromProject(ctx, p); err != nil { + log.WithContext(ctx). + WithFields(log.Fields{ + "project-name": p.Name, + }). + WithError(err). + Warn("pulling environments from project") + } + } +} + +// TaskHandlerPullEnvironmentMetrics .. +func (c *Controller) TaskHandlerPullEnvironmentMetrics(ctx context.Context, env schemas.Environment) { + defer c.unqueueTask(ctx, schemas.TaskTypePullEnvironmentMetrics, string(env.Key())) + + // On errors, we do not want to retry these tasks + if err := c.PullEnvironmentMetrics(ctx, env); err != nil { + log.WithContext(ctx). + WithFields(log.Fields{ + "project-name": env.ProjectName, + "environment-name": env.Name, + "environment-id": env.ID, + }). + WithError(err). + Warn("pulling environment metrics") + } +} + +// TaskHandlerPullRefsFromProject .. +func (c *Controller) TaskHandlerPullRefsFromProject(ctx context.Context, p schemas.Project) { + defer c.unqueueTask(ctx, schemas.TaskTypePullRefsFromProject, string(p.Key())) + + // On errors, we do not want to retry these tasks + if err := c.PullRefsFromProject(ctx, p); err != nil { + log.WithContext(ctx). + WithFields(log.Fields{ + "project-name": p.Name, + }). + WithError(err). + Warn("pulling refs from project") + } +} + +// TaskHandlerPullRefMetrics .. +func (c *Controller) TaskHandlerPullRefMetrics(ctx context.Context, ref schemas.Ref) { + defer c.unqueueTask(ctx, schemas.TaskTypePullRefMetrics, string(ref.Key())) + + // On errors, we do not want to retry these tasks + if err := c.PullRefMetrics(ctx, ref); err != nil { + log.WithContext(ctx). + WithFields(log.Fields{ + "project-name": ref.Project.Name, + "ref": ref.Name, + }). + WithError(err). + Warn("pulling ref metrics") + } +} + +// TaskHandlerPullProjectsFromWildcards .. +func (c *Controller) TaskHandlerPullProjectsFromWildcards(ctx context.Context) { + defer c.unqueueTask(ctx, schemas.TaskTypePullProjectsFromWildcards, "_") + defer c.TaskController.monitorLastTaskScheduling(schemas.TaskTypePullProjectsFromWildcards) + + log.WithFields( + log.Fields{ + "wildcards-count": len(c.Config.Wildcards), + }, + ).Info("scheduling projects from wildcards pull") + + for id, w := range c.Config.Wildcards { + c.ScheduleTask(ctx, schemas.TaskTypePullProjectsFromWildcard, strconv.Itoa(id), strconv.Itoa(id), w) + } +} + +// TaskHandlerPullEnvironmentsFromProjects .. +func (c *Controller) TaskHandlerPullEnvironmentsFromProjects(ctx context.Context) { + defer c.unqueueTask(ctx, schemas.TaskTypePullEnvironmentsFromProjects, "_") + defer c.TaskController.monitorLastTaskScheduling(schemas.TaskTypePullEnvironmentsFromProjects) + + projectsCount, err := c.Store.ProjectsCount(ctx) + if err != nil { + log.WithContext(ctx). + WithError(err). + Error() + } + + log.WithFields( + log.Fields{ + "projects-count": projectsCount, + }, + ).Info("scheduling environments from projects pull") + + projects, err := c.Store.Projects(ctx) + if err != nil { + log.WithContext(ctx). + WithError(err). + Error() + } + + for _, p := range projects { + c.ScheduleTask(ctx, schemas.TaskTypePullEnvironmentsFromProject, string(p.Key()), p) + } +} + +// TaskHandlerPullRefsFromProjects .. +func (c *Controller) TaskHandlerPullRefsFromProjects(ctx context.Context) { + defer c.unqueueTask(ctx, schemas.TaskTypePullRefsFromProjects, "_") + defer c.TaskController.monitorLastTaskScheduling(schemas.TaskTypePullRefsFromProjects) + + projectsCount, err := c.Store.ProjectsCount(ctx) + if err != nil { + log.WithContext(ctx). + WithError(err). + Error() + } + + log.WithFields( + log.Fields{ + "projects-count": projectsCount, + }, + ).Info("scheduling refs from projects pull") + + projects, err := c.Store.Projects(ctx) + if err != nil { + log.WithContext(ctx). + WithError(err). + Error() + } + + for _, p := range projects { + c.ScheduleTask(ctx, schemas.TaskTypePullRefsFromProject, string(p.Key()), p) + } +} + +// TaskHandlerPullMetrics .. +func (c *Controller) TaskHandlerPullMetrics(ctx context.Context) { + defer c.unqueueTask(ctx, schemas.TaskTypePullMetrics, "_") + defer c.TaskController.monitorLastTaskScheduling(schemas.TaskTypePullMetrics) + + refsCount, err := c.Store.RefsCount(ctx) + if err != nil { + log.WithContext(ctx). + WithError(err). + Error() + } + + envsCount, err := c.Store.EnvironmentsCount(ctx) + if err != nil { + log.WithContext(ctx). + WithError(err). + Error() + } + + log.WithFields( + log.Fields{ + "environments-count": envsCount, + "refs-count": refsCount, + }, + ).Info("scheduling metrics pull") + + // ENVIRONMENTS + envs, err := c.Store.Environments(ctx) + if err != nil { + log.WithContext(ctx). + WithError(err). + Error() + } + + for _, env := range envs { + c.ScheduleTask(ctx, schemas.TaskTypePullEnvironmentMetrics, string(env.Key()), env) + } + + // REFS + refs, err := c.Store.Refs(ctx) + if err != nil { + log.WithContext(ctx). + WithError(err). + Error() + } + + for _, ref := range refs { + c.ScheduleTask(ctx, schemas.TaskTypePullRefMetrics, string(ref.Key()), ref) + } +} + +// TaskHandlerGarbageCollectProjects .. +func (c *Controller) TaskHandlerGarbageCollectProjects(ctx context.Context) error { + defer c.unqueueTask(ctx, schemas.TaskTypeGarbageCollectProjects, "_") + defer c.TaskController.monitorLastTaskScheduling(schemas.TaskTypeGarbageCollectProjects) + + return c.GarbageCollectProjects(ctx) +} + +// TaskHandlerGarbageCollectEnvironments .. +func (c *Controller) TaskHandlerGarbageCollectEnvironments(ctx context.Context) error { + defer c.unqueueTask(ctx, schemas.TaskTypeGarbageCollectEnvironments, "_") + defer c.TaskController.monitorLastTaskScheduling(schemas.TaskTypeGarbageCollectEnvironments) + + return c.GarbageCollectEnvironments(ctx) +} + +// TaskHandlerGarbageCollectRefs .. +func (c *Controller) TaskHandlerGarbageCollectRefs(ctx context.Context) error { + defer c.unqueueTask(ctx, schemas.TaskTypeGarbageCollectRefs, "_") + defer c.TaskController.monitorLastTaskScheduling(schemas.TaskTypeGarbageCollectRefs) + + return c.GarbageCollectRefs(ctx) +} + +// TaskHandlerGarbageCollectMetrics .. +func (c *Controller) TaskHandlerGarbageCollectMetrics(ctx context.Context) error { + defer c.unqueueTask(ctx, schemas.TaskTypeGarbageCollectMetrics, "_") + defer c.TaskController.monitorLastTaskScheduling(schemas.TaskTypeGarbageCollectMetrics) + + return c.GarbageCollectMetrics(ctx) +} + +// Schedule .. +func (c *Controller) Schedule(ctx context.Context, pull config.Pull, gc config.GarbageCollect) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "controller:Schedule") + defer span.End() + + go func() { + c.GetGitLabMetadata(ctx) + }() + + for tt, cfg := range map[schemas.TaskType]config.SchedulerConfig{ + schemas.TaskTypePullProjectsFromWildcards: config.SchedulerConfig(pull.ProjectsFromWildcards), + schemas.TaskTypePullEnvironmentsFromProjects: config.SchedulerConfig(pull.EnvironmentsFromProjects), + schemas.TaskTypePullRefsFromProjects: config.SchedulerConfig(pull.RefsFromProjects), + schemas.TaskTypePullMetrics: config.SchedulerConfig(pull.Metrics), + schemas.TaskTypeGarbageCollectProjects: config.SchedulerConfig(gc.Projects), + schemas.TaskTypeGarbageCollectEnvironments: config.SchedulerConfig(gc.Environments), + schemas.TaskTypeGarbageCollectRefs: config.SchedulerConfig(gc.Refs), + schemas.TaskTypeGarbageCollectMetrics: config.SchedulerConfig(gc.Metrics), + } { + if cfg.OnInit { + c.ScheduleTask(ctx, tt, "_") + } + + if cfg.Scheduled { + c.ScheduleTaskWithTicker(ctx, tt, cfg.IntervalSeconds) + } + + if c.Redis != nil { + c.ScheduleRedisSetKeepalive(ctx) + } + } +} + +// ScheduleRedisSetKeepalive will ensure that whilst the process is running, +// a key is periodically updated within Redis to let other instances know this +// one is alive and processing tasks. +func (c *Controller) ScheduleRedisSetKeepalive(ctx context.Context) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "controller:ScheduleRedisSetKeepalive") + defer span.End() + + go func(ctx context.Context) { + ticker := time.NewTicker(time.Duration(5) * time.Second) + + for { + select { + case <-ctx.Done(): + log.Info("stopped redis keepalive") + + return + case <-ticker.C: + if _, err := c.Store.(*store.Redis).SetKeepalive(ctx, c.UUID.String(), time.Duration(10)*time.Second); err != nil { + log.WithContext(ctx). + WithError(err). + Fatal("setting keepalive") + } + } + } + }(ctx) +} + +// ScheduleTask .. +func (c *Controller) ScheduleTask(ctx context.Context, tt schemas.TaskType, uniqueID string, args ...interface{}) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "controller:ScheduleTask") + defer span.End() + + span.SetAttributes(attribute.String("task_type", string(tt))) + span.SetAttributes(attribute.String("task_unique_id", uniqueID)) + + logFields := log.Fields{ + "task_type": tt, + "task_unique_id": uniqueID, + } + task := c.TaskController.TaskMap.Get(string(tt)) + msg := task.NewJob(args...) + + qlen, err := c.TaskController.Queue.Len(ctx) + if err != nil { + log.WithContext(ctx). + WithFields(logFields). + Warn("unable to read task queue length, skipping scheduling of task..") + + return + } + + if qlen >= c.TaskController.Queue.Options().BufferSize { + log.WithContext(ctx). + WithFields(logFields). + Warn("queue buffer size exhausted, skipping scheduling of task..") + + return + } + + queued, err := c.Store.QueueTask(ctx, tt, uniqueID, c.UUID.String()) + if err != nil { + log.WithContext(ctx). + WithFields(logFields). + Warn("unable to declare the queueing, skipping scheduling of task..") + + return + } + + if !queued { + log.WithFields(logFields). + Debug("task already queued, skipping scheduling of task..") + + return + } + + go func(job *taskq.Job) { + if err := c.TaskController.Queue.AddJob(ctx, job); err != nil { + log.WithContext(ctx). + WithError(err). + Warn("scheduling task") + } + }(msg) +} + +// ScheduleTaskWithTicker .. +func (c *Controller) ScheduleTaskWithTicker(ctx context.Context, tt schemas.TaskType, intervalSeconds int) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "controller:ScheduleTaskWithTicker") + defer span.End() + span.SetAttributes(attribute.String("task_type", string(tt))) + span.SetAttributes(attribute.Int("interval_seconds", intervalSeconds)) + + if intervalSeconds <= 0 { + log.WithContext(ctx). + WithField("task", tt). + Warn("task scheduling misconfigured, currently disabled") + + return + } + + log.WithFields(log.Fields{ + "task": tt, + "interval_seconds": intervalSeconds, + }).Debug("task scheduled") + + c.TaskController.monitorNextTaskScheduling(tt, intervalSeconds) + + go func(ctx context.Context) { + ticker := time.NewTicker(time.Duration(intervalSeconds) * time.Second) + + for { + select { + case <-ctx.Done(): + log.WithField("task", tt).Info("scheduling of task stopped") + + return + case <-ticker.C: + c.ScheduleTask(ctx, tt, "_") + c.TaskController.monitorNextTaskScheduling(tt, intervalSeconds) + } + } + }(ctx) +} + +func (tc *TaskController) monitorNextTaskScheduling(tt schemas.TaskType, duration int) { + if _, ok := tc.TaskSchedulingMonitoring[tt]; !ok { + tc.TaskSchedulingMonitoring[tt] = &monitor.TaskSchedulingStatus{} + } + + tc.TaskSchedulingMonitoring[tt].Next = time.Now().Add(time.Duration(duration) * time.Second) +} + +func (tc *TaskController) monitorLastTaskScheduling(tt schemas.TaskType) { + if _, ok := tc.TaskSchedulingMonitoring[tt]; !ok { + tc.TaskSchedulingMonitoring[tt] = &monitor.TaskSchedulingStatus{} + } + + tc.TaskSchedulingMonitoring[tt].Last = time.Now() +} diff --git a/pkg/controller/scheduler_test.go b/pkg/controller/scheduler_test.go new file mode 100644 index 00000000..ac3a8274 --- /dev/null +++ b/pkg/controller/scheduler_test.go @@ -0,0 +1,3 @@ +package controller + +// TODO diff --git a/pkg/controller/store.go b/pkg/controller/store.go new file mode 100644 index 00000000..398681f4 --- /dev/null +++ b/pkg/controller/store.go @@ -0,0 +1,44 @@ +package controller + +import ( + "context" + + log "github.com/sirupsen/logrus" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/store" +) + +func metricLogFields(m schemas.Metric) log.Fields { + return log.Fields{ + "metric-kind": m.Kind, + "metric-labels": m.Labels, + } +} + +func storeGetMetric(ctx context.Context, s store.Store, m *schemas.Metric) { + if err := s.GetMetric(ctx, m); err != nil { + log.WithContext(ctx). + WithFields(metricLogFields(*m)). + WithError(err). + Errorf("reading metric from the store") + } +} + +func storeSetMetric(ctx context.Context, s store.Store, m schemas.Metric) { + if err := s.SetMetric(ctx, m); err != nil { + log.WithContext(ctx). + WithFields(metricLogFields(m)). + WithError(err). + Errorf("writing metric from the store") + } +} + +func storeDelMetric(ctx context.Context, s store.Store, m schemas.Metric) { + if err := s.DelMetric(ctx, m.Key()); err != nil { + log.WithContext(ctx). + WithFields(metricLogFields(m)). + WithError(err). + Errorf("deleting metric from the store") + } +} diff --git a/pkg/exporter/store_test.go b/pkg/controller/store_test.go similarity index 81% rename from pkg/exporter/store_test.go rename to pkg/controller/store_test.go index 0c6f43e9..a1f00255 100644 --- a/pkg/exporter/store_test.go +++ b/pkg/controller/store_test.go @@ -1,12 +1,13 @@ -package exporter +package controller import ( "testing" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" "github.com/prometheus/client_golang/prometheus" log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) func TestMetricLogFields(t *testing.T) { @@ -24,9 +25,5 @@ func TestMetricLogFields(t *testing.T) { } func TestStoreGetSetDelMetric(_ *testing.T) { - resetGlobalValues() - - storeGetMetric(&schemas.Metric{}) - storeSetMetric(schemas.Metric{}) - storeDelMetric(schemas.Metric{}) + // TODO: implement correctly } diff --git a/pkg/controller/webhooks.go b/pkg/controller/webhooks.go new file mode 100644 index 00000000..3faf81ec --- /dev/null +++ b/pkg/controller/webhooks.go @@ -0,0 +1,434 @@ +package controller + +import ( + "context" + "fmt" + "regexp" + "strconv" + "strings" + + log "github.com/sirupsen/logrus" + goGitlab "github.com/xanzy/go-gitlab" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +func (c *Controller) processPipelineEvent(ctx context.Context, e goGitlab.PipelineEvent) { + var ( + refKind schemas.RefKind + refName = e.ObjectAttributes.Ref + ) + + // TODO: Perhaps it would be nice to match upon the regexp to validate + // that it is actually a merge request ref + if e.MergeRequest.IID != 0 { + refKind = schemas.RefKindMergeRequest + refName = strconv.Itoa(e.MergeRequest.IID) + } else if e.ObjectAttributes.Tag { + refKind = schemas.RefKindTag + } else { + refKind = schemas.RefKindBranch + } + + c.triggerRefMetricsPull(ctx, schemas.NewRef( + schemas.NewProject(e.Project.PathWithNamespace), + refKind, + refName, + )) +} + +func (c *Controller) processJobEvent(ctx context.Context, e goGitlab.JobEvent) { + var ( + refKind schemas.RefKind + refName = e.Ref + ) + + if e.Tag { + refKind = schemas.RefKindTag + } else { + refKind = schemas.RefKindBranch + } + + project, _, err := c.Gitlab.Projects.GetProject(e.ProjectID, nil) + if err != nil { + log.WithContext(ctx). + WithError(err). + Error("reading project from GitLab") + + return + } + + c.triggerRefMetricsPull(ctx, schemas.NewRef( + schemas.NewProject(project.PathWithNamespace), + refKind, + refName, + )) +} + +func (c *Controller) processPushEvent(ctx context.Context, e goGitlab.PushEvent) { + if e.CheckoutSHA == "" { + var ( + refKind = schemas.RefKindBranch + refName string + ) + + // branch refs in push events have "refs/heads/" prefix + if branch, found := strings.CutPrefix(e.Ref, "refs/heads/"); found { + refName = branch + } else { + log.WithContext(ctx). + WithFields(log.Fields{ + "project-name": e.Project.Name, + "ref": e.Ref, + }). + Error("extracting branch name from ref") + + return + } + + _ = deleteRef(ctx, c.Store, schemas.NewRef( + schemas.NewProject(e.Project.PathWithNamespace), + refKind, + refName, + ), "received branch deletion push event from webhook") + } +} + +func (c *Controller) processTagEvent(ctx context.Context, e goGitlab.TagEvent) { + if e.CheckoutSHA == "" { + var ( + refKind = schemas.RefKindTag + refName string + ) + + // tags refs in tag events have "refs/tags/" prefix + if tag, found := strings.CutPrefix(e.Ref, "refs/tags/"); found { + refName = tag + } else { + log.WithContext(ctx). + WithFields(log.Fields{ + "project-name": e.Project.Name, + "ref": e.Ref, + }). + Error("extracting tag name from ref") + + return + } + + _ = deleteRef(ctx, c.Store, schemas.NewRef( + schemas.NewProject(e.Project.PathWithNamespace), + refKind, + refName, + ), "received tag deletion tag event from webhook") + } +} + +func (c *Controller) processMergeEvent(ctx context.Context, e goGitlab.MergeEvent) { + ref := schemas.NewRef( + schemas.NewProject(e.Project.PathWithNamespace), + schemas.RefKindMergeRequest, + strconv.Itoa(e.ObjectAttributes.IID), + ) + + switch e.ObjectAttributes.Action { + case "close": + _ = deleteRef(ctx, c.Store, ref, "received merge request close event from webhook") + case "merge": + _ = deleteRef(ctx, c.Store, ref, "received merge request merge event from webhook") + default: + log. + WithField("merge-request-event-type", e.ObjectAttributes.Action). + Debug("received a non supported merge-request event type as a webhook") + } +} + +func (c *Controller) triggerRefMetricsPull(ctx context.Context, ref schemas.Ref) { + logFields := log.Fields{ + "project-name": ref.Project.Name, + "ref": ref.Name, + "ref-kind": ref.Kind, + } + + refExists, err := c.Store.RefExists(ctx, ref.Key()) + if err != nil { + log.WithContext(ctx). + WithFields(logFields). + WithError(err). + Error("reading ref from the store") + + return + } + + // Let's try to see if the project is configured to export this ref + if !refExists { + p := schemas.NewProject(ref.Project.Name) + + projectExists, err := c.Store.ProjectExists(ctx, p.Key()) + if err != nil { + log.WithContext(ctx). + WithFields(logFields). + WithError(err). + Error("reading project from the store") + + return + } + + // Perhaps the project is discoverable through a wildcard + if !projectExists && len(c.Config.Wildcards) > 0 { + for _, w := range c.Config.Wildcards { + // If in all our wildcards we have one which can potentially match the project ref + // received, we trigger a pull of the project + matches, err := isRefMatchingWilcard(w, ref) + if err != nil { + log.WithContext(ctx). + WithError(err). + Warn("checking if the ref matches the wildcard config") + + continue + } + + if matches { + c.ScheduleTask(context.TODO(), schemas.TaskTypePullProject, ref.Project.Name, ref.Project.Name, w.Pull) + log.WithFields(logFields).Info("project ref not currently exported but its configuration matches a wildcard, triggering a pull of the project") + } else { + log.WithFields(logFields).Debug("project ref not matching wildcard, skipping..") + } + } + + log.WithFields(logFields).Info("done looking up for wildcards matching the project ref") + + return + } + + if projectExists { + // If the project exists, we check that the ref matches it's configuration + if err := c.Store.GetProject(ctx, &p); err != nil { + log.WithContext(ctx). + WithFields(logFields). + WithError(err). + Error("reading project from the store") + + return + } + + matches, err := isRefMatchingProjectPullRefs(p.Pull.Refs, ref) + if err != nil { + log.WithContext(ctx). + WithError(err). + Error("checking if the ref matches the project config") + + return + } + + if matches { + ref.Project = p + + if err = c.Store.SetRef(ctx, ref); err != nil { + log.WithContext(ctx). + WithFields(logFields). + WithError(err). + Error("writing ref in the store") + + return + } + + goto schedulePull + } + } + + log.WithFields(logFields).Info("ref not configured in the exporter, ignoring pipeline webhook") + + return + } + +schedulePull: + log.WithFields(logFields).Info("received a pipeline webhook from GitLab for a ref, triggering metrics pull") + // TODO: When all the metrics will be sent over the webhook, we might be able to avoid redoing a pull + // eg: 'coverage' is not in the pipeline payload yet, neither is 'artifacts' in the job one + c.ScheduleTask(context.TODO(), schemas.TaskTypePullRefMetrics, string(ref.Key()), ref) +} + +func (c *Controller) processDeploymentEvent(ctx context.Context, e goGitlab.DeploymentEvent) { + c.triggerEnvironmentMetricsPull( + ctx, + schemas.Environment{ + ProjectName: e.Project.PathWithNamespace, + Name: e.Environment, + }, + ) +} + +func (c *Controller) triggerEnvironmentMetricsPull(ctx context.Context, env schemas.Environment) { + logFields := log.Fields{ + "project-name": env.ProjectName, + "environment-name": env.Name, + } + + envExists, err := c.Store.EnvironmentExists(ctx, env.Key()) + if err != nil { + log.WithContext(ctx). + WithFields(logFields). + WithError(err). + Error("reading environment from the store") + + return + } + + if !envExists { + p := schemas.NewProject(env.ProjectName) + + projectExists, err := c.Store.ProjectExists(ctx, p.Key()) + if err != nil { + log.WithContext(ctx). + WithFields(logFields). + WithError(err). + Error("reading project from the store") + + return + } + + // Perhaps the project is discoverable through a wildcard + if !projectExists && len(c.Config.Wildcards) > 0 { + for _, w := range c.Config.Wildcards { + // If in all our wildcards we have one which can potentially match the env + // received, we trigger a pull of the project + matches, err := isEnvMatchingWilcard(w, env) + if err != nil { + log.WithContext(ctx). + WithError(err). + Warn("checking if the env matches the wildcard config") + + continue + } + + if matches { + c.ScheduleTask(context.TODO(), schemas.TaskTypePullProject, env.ProjectName, env.ProjectName, w.Pull) + log.WithFields(logFields).Info("project environment not currently exported but its configuration matches a wildcard, triggering a pull of the project") + } else { + log.WithFields(logFields).Debug("project ref not matching wildcard, skipping..") + } + } + + log.WithFields(logFields).Info("done looking up for wildcards matching the project ref") + + return + } + + if projectExists { + if err := c.Store.GetProject(ctx, &p); err != nil { + log.WithContext(ctx). + WithFields(logFields). + WithError(err). + Error("reading project from the store") + } + + matches, err := isEnvMatchingProjectPullEnvironments(p.Pull.Environments, env) + if err != nil { + log.WithContext(ctx). + WithError(err). + Error("checking if the env matches the project config") + + return + } + + if matches { + // As we do not get the environment ID within the deployment event, we need to query it back.. + if err = c.UpdateEnvironment(ctx, &env); err != nil { + log.WithContext(ctx). + WithFields(logFields). + WithError(err). + Error("updating event from GitLab API") + + return + } + + goto schedulePull + } + } + + log.WithFields(logFields). + Info("environment not configured in the exporter, ignoring deployment webhook") + + return + } + + // Need to refresh the env from the store in order to get at least it's ID + if env.ID == 0 { + if err = c.Store.GetEnvironment(ctx, &env); err != nil { + log.WithContext(ctx). + WithFields(logFields). + WithError(err). + Error("reading environment from the store") + } + } + +schedulePull: + log.WithFields(logFields).Info("received a deployment webhook from GitLab for an environment, triggering metrics pull") + c.ScheduleTask(ctx, schemas.TaskTypePullEnvironmentMetrics, string(env.Key()), env) +} + +func isRefMatchingProjectPullRefs(pprs config.ProjectPullRefs, ref schemas.Ref) (matches bool, err error) { + // We check if the ref kind is enabled + switch ref.Kind { + case schemas.RefKindBranch: + if !pprs.Branches.Enabled { + return + } + case schemas.RefKindTag: + if !pprs.Tags.Enabled { + return + } + case schemas.RefKindMergeRequest: + if !pprs.MergeRequests.Enabled { + return + } + default: + return false, fmt.Errorf("invalid ref kind %v", ref.Kind) + } + + // Then we check if it matches the regexp + var re *regexp.Regexp + + if re, err = schemas.GetRefRegexp(pprs, ref.Kind); err != nil { + return + } + + return re.MatchString(ref.Name), nil +} + +func isEnvMatchingProjectPullEnvironments(ppe config.ProjectPullEnvironments, env schemas.Environment) (matches bool, err error) { + // We check if the environments pulling is enabled + if !ppe.Enabled { + return + } + + // Then we check if it matches the regexp + var re *regexp.Regexp + + if re, err = regexp.Compile(ppe.Regexp); err != nil { + return + } + + return re.MatchString(env.Name), nil +} + +func isRefMatchingWilcard(w config.Wildcard, ref schemas.Ref) (matches bool, err error) { + // Then we check if the owner matches the ref or is global + if w.Owner.Kind != "" && !strings.Contains(ref.Project.Name, w.Owner.Name) { + return + } + + // Then we check if the ref matches the project pull parameters + return isRefMatchingProjectPullRefs(w.Pull.Refs, ref) +} + +func isEnvMatchingWilcard(w config.Wildcard, env schemas.Environment) (matches bool, err error) { + // Then we check if the owner matches the ref or is global + if w.Owner.Kind != "" && !strings.Contains(env.ProjectName, w.Owner.Name) { + return + } + + // Then we check if the ref matches the project pull parameters + return isEnvMatchingProjectPullEnvironments(w.Pull.Environments, env) +} diff --git a/pkg/controller/webhooks_test.go b/pkg/controller/webhooks_test.go new file mode 100644 index 00000000..5e47e5c1 --- /dev/null +++ b/pkg/controller/webhooks_test.go @@ -0,0 +1,57 @@ +package controller + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +func TestTriggerRefMetricsPull(t *testing.T) { + ctx, c, _, srv := newTestController(config.Config{}) + srv.Close() + + ref1 := schemas.Ref{ + Project: schemas.NewProject("group/foo"), + Name: "main", + } + + p2 := schemas.NewProject("group/bar") + ref2 := schemas.Ref{ + Project: p2, + Name: "main", + } + + assert.NoError(t, c.Store.SetRef(ctx, ref1)) + assert.NoError(t, c.Store.SetProject(ctx, p2)) + + // TODO: Assert results somehow + c.triggerRefMetricsPull(ctx, ref1) + c.triggerRefMetricsPull(ctx, ref2) +} + +func TestTriggerEnvironmentMetricsPull(t *testing.T) { + ctx, c, _, srv := newTestController(config.Config{}) + srv.Close() + + p1 := schemas.NewProject("foo/bar") + env1 := schemas.Environment{ + ProjectName: p1.Name, + Name: "dev", + } + + env2 := schemas.Environment{ + ProjectName: "foo/baz", + Name: "prod", + } + + assert.NoError(t, c.Store.SetProject(ctx, p1)) + assert.NoError(t, c.Store.SetEnvironment(ctx, env1)) + assert.NoError(t, c.Store.SetEnvironment(ctx, env2)) + + // TODO: Assert results somehow + c.triggerEnvironmentMetricsPull(ctx, env1) + c.triggerEnvironmentMetricsPull(ctx, env2) +} diff --git a/pkg/exporter/collectors.go b/pkg/exporter/collectors.go deleted file mode 100644 index 0f6b129d..00000000 --- a/pkg/exporter/collectors.go +++ /dev/null @@ -1,232 +0,0 @@ -package exporter - -import "github.com/prometheus/client_golang/prometheus" - -var ( - defaultLabels = []string{"project", "topics", "kind", "ref", "variables"} - jobLabels = []string{"stage", "job_name", "runner_description"} - statusLabels = []string{"status"} - environmentLabels = []string{"project", "environment"} - environmentInformationLabels = []string{"environment_id", "external_url", "kind", "ref", "latest_commit_short_id", "current_commit_short_id", "available", "username"} - statusesList = [...]string{"created", "waiting_for_resource", "preparing", "pending", "running", "success", "failed", "canceled", "skipped", "manual", "scheduled"} -) - -// NewCollectorCoverage returns a new collector for the gitlab_ci_pipeline_coverage metric -func NewCollectorCoverage() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_pipeline_coverage", - Help: "Coverage of the most recent pipeline", - }, - defaultLabels, - ) -} - -// NewCollectorDurationSeconds returns a new collector for the gitlab_ci_pipeline_duration_seconds metric -func NewCollectorDurationSeconds() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_pipeline_duration_seconds", - Help: "Duration in seconds of the most recent pipeline", - }, - defaultLabels, - ) -} - -// NewCollectorEnvironmentBehindCommitsCount returns a new collector for the gitlab_ci_environment_behind_commits_count metric -func NewCollectorEnvironmentBehindCommitsCount() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_environment_behind_commits_count", - Help: "Number of commits the environment is behind given its last deployment", - }, - environmentLabels, - ) -} - -// NewCollectorEnvironmentBehindDurationSeconds returns a new collector for the gitlab_ci_environment_behind_duration_seconds metric -func NewCollectorEnvironmentBehindDurationSeconds() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_environment_behind_duration_seconds", - Help: "Duration in seconds the environment is behind the most recent commit given its last deployment", - }, - environmentLabels, - ) -} - -// NewCollectorEnvironmentDeploymentCount returns a new collector for the gitlab_ci_environment_deployment_count metric -func NewCollectorEnvironmentDeploymentCount() prometheus.Collector { - return prometheus.NewCounterVec( - prometheus.CounterOpts{ - Name: "gitlab_ci_environment_deployment_count", - Help: "Number of deployments for an environment", - }, - environmentLabels, - ) -} - -// NewCollectorEnvironmentDeploymentDurationSeconds returns a new collector for the gitlab_ci_environment_deployment_duration_seconds metric -func NewCollectorEnvironmentDeploymentDurationSeconds() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_environment_deployment_duration_seconds", - Help: "Duration in seconds of the most recent deployment of the environment", - }, - environmentLabels, - ) -} - -// NewCollectorEnvironmentDeploymentJobID returns a new collector for the gitlab_ci_environment_deployment_id metric -func NewCollectorEnvironmentDeploymentJobID() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_environment_deployment_job_id", - Help: "ID of the most recent deployment job of the environment", - }, - environmentLabels, - ) -} - -// NewCollectorEnvironmentDeploymentStatus returns a new collector for the gitlab_ci_environment_deployment_status metric -func NewCollectorEnvironmentDeploymentStatus() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_environment_deployment_status", - Help: "Status of the most recent deployment of the environment", - }, - append(environmentLabels, "status"), - ) -} - -// NewCollectorEnvironmentDeploymentTimestamp returns a new collector for the gitlab_ci_environment_deployment_timestamp metric -func NewCollectorEnvironmentDeploymentTimestamp() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_environment_deployment_timestamp", - Help: "Creation date of the most recent deployment of the environment", - }, - environmentLabels, - ) -} - -// NewCollectorEnvironmentInformation returns a new collector for the gitlab_ci_environment_information metric -func NewCollectorEnvironmentInformation() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_environment_information", - Help: "Information about the environment", - }, - append(environmentLabels, environmentInformationLabels...), - ) -} - -// NewCollectorID returns a new collector for the gitlab_ci_pipeline_id metric -func NewCollectorID() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_pipeline_id", - Help: "ID of the most recent pipeline", - }, - defaultLabels, - ) -} - -// NewCollectorJobArtifactSizeBytes returns a new collector for the gitlab_ci_pipeline_job_artifact_size_bytes metric -func NewCollectorJobArtifactSizeBytes() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_pipeline_job_artifact_size_bytes", - Help: "Artifact size in bytes (sum of all of them) of the most recent job", - }, - append(defaultLabels, jobLabels...), - ) -} - -// NewCollectorJobDurationSeconds returns a new collector for the gitlab_ci_pipeline_job_duration_seconds metric -func NewCollectorJobDurationSeconds() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_pipeline_job_duration_seconds", - Help: "Duration in seconds of the most recent job", - }, - append(defaultLabels, jobLabels...), - ) -} - -// NewCollectorJobID returns a new collector for the gitlab_ci_pipeline_job_id metric -func NewCollectorJobID() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_pipeline_job_id", - Help: "ID of the most recent job", - }, - append(defaultLabels, jobLabels...), - ) -} - -// NewCollectorJobRunCount returns a new collector for the gitlab_ci_pipeline_job_run_count metric -func NewCollectorJobRunCount() prometheus.Collector { - return prometheus.NewCounterVec( - prometheus.CounterOpts{ - Name: "gitlab_ci_pipeline_job_run_count", - Help: "Number of executions of a job", - }, - append(defaultLabels, jobLabels...), - ) -} - -// NewCollectorJobStatus returns a new collector for the gitlab_ci_pipeline_job_status metric -func NewCollectorJobStatus() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_pipeline_job_status", - Help: "Status of the most recent job", - }, - append(defaultLabels, append(jobLabels, statusLabels...)...), - ) -} - -// NewCollectorJobTimestamp returns a new collector for the gitlab_ci_pipeline_job_timestamp metric -func NewCollectorJobTimestamp() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_pipeline_job_timestamp", - Help: "Creation date timestamp of the the most recent job", - }, - append(defaultLabels, jobLabels...), - ) -} - -// NewCollectorStatus returns a new collector for the gitlab_ci_pipeline_status metric -func NewCollectorStatus() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_pipeline_status", - Help: "Status of the most recent pipeline", - }, - append(defaultLabels, "status"), - ) -} - -// NewCollectorTimestamp returns a new collector for the gitlab_ci_pipeline_timestamp metric -func NewCollectorTimestamp() prometheus.Collector { - return prometheus.NewGaugeVec( - prometheus.GaugeOpts{ - Name: "gitlab_ci_pipeline_timestamp", - Help: "Timestamp of the last update of the most recent pipeline", - }, - defaultLabels, - ) -} - -// NewCollectorRunCount returns a new collector for the gitlab_ci_pipeline_run_count metric -func NewCollectorRunCount() prometheus.Collector { - return prometheus.NewCounterVec( - prometheus.CounterOpts{ - Name: "gitlab_ci_pipeline_run_count", - Help: "Number of executions of a pipeline", - }, - defaultLabels, - ) -} diff --git a/pkg/exporter/exporter.go b/pkg/exporter/exporter.go deleted file mode 100644 index 195fd1b4..00000000 --- a/pkg/exporter/exporter.go +++ /dev/null @@ -1,245 +0,0 @@ -package exporter - -import ( - "context" - "net/http" - "net/http/pprof" - "os" - "os/signal" - "sync" - "syscall" - "time" - - "github.com/go-redis/redis/v8" - "github.com/heptiolabs/healthcheck" - "github.com/pkg/errors" - log "github.com/sirupsen/logrus" - "github.com/vmihailenco/taskq/v3" - "github.com/vmihailenco/taskq/v3/memqueue" - "github.com/vmihailenco/taskq/v3/redisq" - - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/gitlab" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/ratelimit" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/storage" -) - -var ( - config schemas.Config - gitlabClient *gitlab.Client - redisClient *redis.Client - taskFactory taskq.Factory - pullingQueue taskq.Queue - store storage.Storage - cfgUpdateLock sync.RWMutex -) - -// Configure .. -func Configure(cfg schemas.Config, userAgentVersion string) error { - cfgUpdateLock.Lock() - config = cfg - cfgUpdateLock.Unlock() - - configurePullingQueue() - configureStore() - return configureGitlabClient(userAgentVersion) -} - -// ConfigureGitlabClient .. -func configureGitlabClient(userAgentVersion string) (err error) { - cfgUpdateLock.Lock() - defer cfgUpdateLock.Unlock() - - gitlabClient, err = gitlab.NewClient(gitlab.ClientConfig{ - URL: config.Gitlab.URL, - Token: config.Gitlab.Token, - DisableTLSVerify: !config.Gitlab.EnableTLSVerify, - UserAgentVersion: userAgentVersion, - RateLimiter: newRateLimiter(), - ReadinessURL: config.Gitlab.HealthURL, - }) - return -} - -// ConfigureRedisClient .. -func ConfigureRedisClient(c *redis.Client) error { - cfgUpdateLock.Lock() - defer cfgUpdateLock.Unlock() - - redisClient = c - if _, err := redisClient.Ping(context.Background()).Result(); err != nil { - return errors.Wrap(err, "connecting to redis") - } - return nil -} - -// ConfigurePullingQueue .. -func configurePullingQueue() { - cfgUpdateLock.Lock() - defer cfgUpdateLock.Unlock() - - pullingQueueOptions := &taskq.QueueOptions{ - Name: "pull", - PauseErrorsThreshold: 0, - - // Disable system resources checks - MinSystemResources: taskq.SystemResources{ - Load1PerCPU: -1, - MemoryFreeMB: 0, - MemoryFreePercentage: 0, - }, - } - - if redisClient != nil { - taskFactory = redisq.NewFactory() - pullingQueueOptions.Redis = redisClient - } else { - taskFactory = memqueue.NewFactory() - } - - pullingQueue = taskFactory.RegisterQueue(pullingQueueOptions) - - // Purge the queue when we start - // I am only partially convinced this will not cause issues in HA fashion - if err := pullingQueue.Purge(); err != nil { - log.WithField("error", err.Error()).Error("purging the pulling queue") - } -} - -// ConfigureStore .. -func configureStore() { - cfgUpdateLock.Lock() - defer cfgUpdateLock.Unlock() - - if redisClient != nil { - store = storage.NewRedisStorage(redisClient) - } else { - store = storage.NewLocalStorage() - } - - // Load all the configured projects in the store - for _, p := range config.Projects { - exists, err := store.ProjectExists(p.Key()) - if err != nil { - log.WithFields(log.Fields{ - "project-name": p.Name, - "error": err.Error(), - }).Error("reading project from the store") - } - - if !exists { - if err = store.SetProject(p); err != nil { - log.WithFields(log.Fields{ - "project-name": p.Name, - "error": err.Error(), - }).Error("writing project in the store") - } - - if config.Pull.RefsFromProjects.OnInit { - go schedulePullRefsFromProject(context.Background(), p) - go schedulePullRefsFromPipeline(context.Background(), p) - } - - if config.Pull.EnvironmentsFromProjects.OnInit { - go schedulePullEnvironmentsFromProject(context.Background(), p) - } - } - } -} - -func newRateLimiter() ratelimit.Limiter { - if redisClient != nil { - return ratelimit.NewRedisLimiter(context.Background(), redisClient, config.Pull.MaximumGitLabAPIRequestsPerSecond) - } - return ratelimit.NewLocalLimiter(config.Pull.MaximumGitLabAPIRequestsPerSecond) -} - -func processPullingQueue(ctx context.Context) { - if redisClient != nil { - if err := taskFactory.StartConsumers(ctx); err != nil { - log.Fatal(err) - } - } -} - -func healthCheckHandler() (h healthcheck.Handler) { - h = healthcheck.NewHandler() - if config.Gitlab.EnableHealthCheck { - h.AddReadinessCheck("gitlab-reachable", gitlabClient.ReadinessCheck()) - } else { - log.Warn("GitLab health check has been disabled. Readiness checks won't be operated.") - } - - return -} - -// Run executes the http servers supporting the exporter -func Run() { - // Graceful shutdowns - onShutdown := make(chan os.Signal, 1) - signal.Notify(onShutdown, syscall.SIGINT, syscall.SIGTERM, syscall.SIGABRT) - - schedulingContext, stopOrchestratePulling := context.WithCancel(context.Background()) - schedule(schedulingContext) - processPullingQueue(schedulingContext) - - // HTTP server - mux := http.NewServeMux() - srv := &http.Server{ - Addr: config.Server.ListenAddress, - Handler: mux, - } - - // health endpoints - health := healthCheckHandler() - mux.HandleFunc("/health/live", health.LiveEndpoint) - mux.HandleFunc("/health/ready", health.ReadyEndpoint) - - // metrics endpoint - if config.Server.Metrics.Enabled { - mux.HandleFunc("/metrics", MetricsHandler) - } - - // pprof/debug endpoints - if config.Server.EnablePprof { - mux.HandleFunc("/debug/pprof/", pprof.Index) - mux.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline) - mux.HandleFunc("/debug/pprof/profile", pprof.Profile) - mux.HandleFunc("/debug/pprof/symbol", pprof.Symbol) - mux.HandleFunc("/debug/pprof/trace", pprof.Trace) - } - - // webhook endpoints - if config.Server.Webhook.Enabled { - mux.HandleFunc("/webhook", WebhookHandler) - } - - go func() { - if err := srv.ListenAndServe(); err != nil && err != http.ErrServerClosed { - log.Fatal(err) - } - }() - - log.WithFields( - log.Fields{ - "listen-address": config.Server.ListenAddress, - "pprof-endpoint-enabled": config.Server.EnablePprof, - "metrics-endpoint-enabled": config.Server.Metrics.Enabled, - "webhook-endpoint-enabled": config.Server.Webhook.Enabled, - "openmetrics-encoding-enabled": config.Server.Metrics.EnableOpenmetricsEncoding, - }, - ).Info("http server started") - - <-onShutdown - log.Info("received signal, attempting to gracefully exit..") - stopOrchestratePulling() - - httpServerContext, forceHTTPServerShutdown := context.WithTimeout(context.Background(), 5*time.Second) - defer forceHTTPServerShutdown() - - if err := srv.Shutdown(httpServerContext); err != nil { - log.Fatalf("metrics server shutdown failed: %+v", err) - } - - log.Info("stopped!") -} diff --git a/pkg/exporter/exporter_test.go b/pkg/exporter/exporter_test.go deleted file mode 100644 index 7233c655..00000000 --- a/pkg/exporter/exporter_test.go +++ /dev/null @@ -1,149 +0,0 @@ -package exporter - -import ( - "context" - "net/http" - "net/http/httptest" - "testing" - - "github.com/alicebob/miniredis" - "github.com/go-redis/redis/v8" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/gitlab" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/ratelimit" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/storage" - "github.com/stretchr/testify/assert" - goGitlab "github.com/xanzy/go-gitlab" -) - -func resetGlobalValues() { - cfgUpdateLock.Lock() - defer cfgUpdateLock.Unlock() - - config = schemas.Config{} - gitlabClient = nil - redisClient = nil - taskFactory = nil - pullingQueue = nil - store = storage.NewLocalStorage() -} - -func configureMockedGitlabClient() (*http.ServeMux, *httptest.Server) { - cfgUpdateLock.Lock() - defer cfgUpdateLock.Unlock() - - mux := http.NewServeMux() - server := httptest.NewServer(mux) - - opts := []goGitlab.ClientOptionFunc{ - goGitlab.WithBaseURL(server.URL), - goGitlab.WithoutRetries(), - } - - gc, _ := goGitlab.NewClient("", opts...) - - gitlabClient = &gitlab.Client{ - Client: gc, - RateLimiter: ratelimit.NewLocalLimiter(100), - } - - return mux, server -} - -func TestConfigure(t *testing.T) { - resetGlobalValues() - - cfg := schemas.Config{ - Gitlab: schemas.GitlabConfig{ - URL: "http://foo.bar", - }, - Pull: schemas.PullConfig{ - MaximumGitLabAPIRequestsPerSecond: 1, - }, - } - - assert.NoError(t, Configure(cfg, "")) - assert.Equal(t, cfg, config) -} - -func TestConfigureGitlabClient(t *testing.T) { - resetGlobalValues() - - config.Pull.MaximumGitLabAPIRequestsPerSecond = 1 - configureGitlabClient("yolo") - assert.NotNil(t, gitlabClient) -} - -func TestConfigureRedisClient(t *testing.T) { - resetGlobalValues() - - s, err := miniredis.Run() - if err != nil { - panic(err) - } - defer s.Close() - - c := redis.NewClient(&redis.Options{Addr: s.Addr()}) - assert.NoError(t, ConfigureRedisClient(c)) - assert.Equal(t, redisClient, c) - - s.Close() - assert.Error(t, ConfigureRedisClient(c)) -} - -// TODO: Sort out why this creates loads of race issues across -func TestConfigurePullingQueue(t *testing.T) { - resetGlobalValues() - - // TODO: Test with redis client, miniredis does not seem to support it yet - configurePullingQueue() - assert.Equal(t, "pull", pullingQueue.Options().Name) -} - -func TestConfigureStore(t *testing.T) { - resetGlobalValues() - - config = schemas.Config{ - Projects: []schemas.Project{ - { - Name: "foo/bar", - }, - }, - } - - // Test with local storage - configureStore() - assert.NotNil(t, store) - - projects, err := store.Projects() - assert.NoError(t, err) - - expectedProjects := schemas.Projects{ - "3861188962": schemas.Project{ - Name: "foo/bar", - }, - } - assert.Equal(t, expectedProjects, projects) - - // Test with redis storage - s, err := miniredis.Run() - if err != nil { - panic(err) - } - defer s.Close() - - c := redis.NewClient(&redis.Options{Addr: s.Addr()}) - assert.NoError(t, ConfigureRedisClient(c)) - - configureStore() - projects, err = store.Projects() - assert.NoError(t, err) - assert.Equal(t, expectedProjects, projects) -} - -func TestProcessPullingQueue(_ *testing.T) { - resetGlobalValues() - - // TODO: Test with redis client, miniredis does not seem to support it yet - processPullingQueue(context.TODO()) -} diff --git a/pkg/exporter/garbage_collector_test.go b/pkg/exporter/garbage_collector_test.go deleted file mode 100644 index 84a18ca5..00000000 --- a/pkg/exporter/garbage_collector_test.go +++ /dev/null @@ -1,191 +0,0 @@ -package exporter - -import ( - "fmt" - "net/http" - "testing" - - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/openlyinc/pointy" - "github.com/prometheus/client_golang/prometheus" - "github.com/stretchr/testify/assert" -) - -func TestGarbageCollectProjects(t *testing.T) { - resetGlobalValues() - - mux, server := configureMockedGitlabClient() - defer server.Close() - - mux.HandleFunc("/api/v4/groups/wc/projects", - func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `[{"id":1, "path_with_namespace": "wc/p3", "jobs_enabled": true}]`) - }) - - p1 := schemas.Project{Name: "cfg/p1"} - p2 := schemas.Project{Name: "cfg/p2"} - p3 := schemas.Project{Name: "wc/p3"} - p4 := schemas.Project{Name: "wc/p4"} - - store.SetProject(p1) - store.SetProject(p2) - store.SetProject(p3) - store.SetProject(p4) - - config = schemas.Config{ - Projects: []schemas.Project{p1}, - Wildcards: schemas.Wildcards{ - schemas.Wildcard{ - Owner: schemas.WildcardOwner{ - Kind: "group", - Name: "wc", - }, - }, - }, - } - - assert.NoError(t, garbageCollectProjects()) - storedProjects, err := store.Projects() - assert.NoError(t, err) - - expectedProjects := schemas.Projects{ - p1.Key(): p1, - p3.Key(): p3, - } - assert.Equal(t, expectedProjects, storedProjects) -} - -func TestGarbageCollectEnvironments(t *testing.T) { - resetGlobalValues() - mux, server := configureMockedGitlabClient() - defer server.Close() - - mux.HandleFunc("/api/v4/projects/p2/environments", - func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `[{"name": "main"}]`) - }) - - p2 := schemas.Project{ - Name: "p2", - ProjectParameters: schemas.ProjectParameters{ - Pull: schemas.ProjectPull{ - Environments: schemas.ProjectPullEnvironments{ - NameRegexpValue: pointy.String("^main$"), - }, - }, - }, - } - envp1main := schemas.Environment{ProjectName: "p1", Name: "main"} - envp2dev := schemas.Environment{ProjectName: "p2", Name: "dev"} - envp2main := schemas.Environment{ProjectName: "p2", Name: "main"} - - store.SetProject(p2) - store.SetEnvironment(envp1main) - store.SetEnvironment(envp2dev) - store.SetEnvironment(envp2main) - - assert.NoError(t, garbageCollectEnvironments()) - storedEnvironments, err := store.Environments() - assert.NoError(t, err) - - expectedEnvironments := schemas.Environments{ - envp2main.Key(): schemas.Environment{ - ProjectName: "p2", - Name: "main", - TagsRegexp: ".*", - OutputSparseStatusMetrics: true, - }, - } - assert.Equal(t, expectedEnvironments, storedEnvironments) -} - -func TestGarbageCollectRefs(t *testing.T) { - resetGlobalValues() - mux, server := configureMockedGitlabClient() - defer server.Close() - - mux.HandleFunc("/api/v4/projects/p2/repository/branches", - func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `[{"name": "main"}]`) - }) - - mux.HandleFunc("/api/v4/projects/p2/repository/tags", - func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `[{"name": "main"}]`) - }) - - pr1dev := schemas.Ref{Kind: schemas.RefKindBranch, ProjectName: "p1", Name: "dev"} - pr1main := schemas.Ref{Kind: schemas.RefKindBranch, ProjectName: "p1", Name: "main"} - - p2 := schemas.Project{ - Name: "p2", - ProjectParameters: schemas.ProjectParameters{ - Pull: schemas.ProjectPull{ - Refs: schemas.ProjectPullRefs{ - RegexpValue: pointy.String("^main$"), - }, - }, - }, - } - pr2dev := schemas.Ref{Kind: schemas.RefKindBranch, ProjectName: "p2", Name: "dev"} - pr2main := schemas.Ref{Kind: schemas.RefKindBranch, ProjectName: "p2", Name: "main"} - - store.SetProject(p2) - store.SetRef(pr1dev) - store.SetRef(pr1main) - store.SetRef(pr2dev) - store.SetRef(pr2main) - - assert.NoError(t, garbageCollectRefs()) - storedRefs, err := store.Refs() - assert.NoError(t, err) - - newPR2main := schemas.Ref{Kind: schemas.RefKindBranch, ProjectName: "p2", Name: "main"} - expectedRefs := schemas.Refs{ - newPR2main.Key(): schemas.Ref{ - Kind: schemas.RefKindBranch, - ProjectName: "p2", - Name: "main", - OutputSparseStatusMetrics: true, - PullPipelineVariablesRegexp: ".*", - }, - } - assert.Equal(t, expectedRefs, storedRefs) -} - -func TestGarbageCollectMetrics(t *testing.T) { - resetGlobalValues() - - ref1 := schemas.Ref{ - ProjectName: "p1", - Name: "foo", - OutputSparseStatusMetrics: true, - PullPipelineJobsEnabled: true, - } - - ref1m1 := schemas.Metric{Kind: schemas.MetricKindCoverage, Labels: prometheus.Labels{"project": "p1", "ref": "foo"}} - ref1m2 := schemas.Metric{Kind: schemas.MetricKindStatus, Labels: prometheus.Labels{"project": "p1", "ref": "foo"}} - ref1m3 := schemas.Metric{Kind: schemas.MetricKindJobDurationSeconds, Labels: prometheus.Labels{"project": "p1", "ref": "foo"}} - - ref2m1 := schemas.Metric{Kind: schemas.MetricKindCoverage, Labels: prometheus.Labels{"project": "p2", "ref": "bar"}} - ref3m1 := schemas.Metric{Kind: schemas.MetricKindCoverage, Labels: prometheus.Labels{"project": "foo"}} - ref4m1 := schemas.Metric{Kind: schemas.MetricKindCoverage, Labels: prometheus.Labels{"ref": "bar"}} - - store.SetRef(ref1) - store.SetMetric(ref1m1) - store.SetMetric(ref1m2) - store.SetMetric(ref1m3) - store.SetMetric(ref2m1) - store.SetMetric(ref3m1) - store.SetMetric(ref4m1) - - assert.NoError(t, garbageCollectMetrics()) - storedMetrics, err := store.Metrics() - assert.NoError(t, err) - - expectedMetrics := schemas.Metrics{ - ref1m1.Key(): ref1m1, - ref1m3.Key(): ref1m3, - } - assert.Equal(t, expectedMetrics, storedMetrics) -} diff --git a/pkg/exporter/metrics.go b/pkg/exporter/metrics.go deleted file mode 100644 index c5182ad3..00000000 --- a/pkg/exporter/metrics.go +++ /dev/null @@ -1,142 +0,0 @@ -package exporter - -import ( - "fmt" - "net/http" - "reflect" - - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/prometheus/client_golang/prometheus" - "github.com/prometheus/client_golang/prometheus/promhttp" - log "github.com/sirupsen/logrus" -) - -// Registry wraps a pointer of prometheus.Registry -type Registry struct { - *prometheus.Registry - - Collectors RegistryCollectors -} - -// RegistryCollectors .. -type RegistryCollectors map[schemas.MetricKind]prometheus.Collector - -// NewRegistry initialize a new registry -func NewRegistry() *Registry { - r := &Registry{ - Registry: prometheus.NewRegistry(), - Collectors: RegistryCollectors{ - schemas.MetricKindCoverage: NewCollectorCoverage(), - schemas.MetricKindDurationSeconds: NewCollectorDurationSeconds(), - schemas.MetricKindEnvironmentBehindCommitsCount: NewCollectorEnvironmentBehindCommitsCount(), - schemas.MetricKindEnvironmentBehindDurationSeconds: NewCollectorEnvironmentBehindDurationSeconds(), - schemas.MetricKindEnvironmentDeploymentCount: NewCollectorEnvironmentDeploymentCount(), - schemas.MetricKindEnvironmentDeploymentDurationSeconds: NewCollectorEnvironmentDeploymentDurationSeconds(), - schemas.MetricKindEnvironmentDeploymentJobID: NewCollectorEnvironmentDeploymentJobID(), - schemas.MetricKindEnvironmentDeploymentStatus: NewCollectorEnvironmentDeploymentStatus(), - schemas.MetricKindEnvironmentDeploymentTimestamp: NewCollectorEnvironmentDeploymentTimestamp(), - schemas.MetricKindEnvironmentInformation: NewCollectorEnvironmentInformation(), - schemas.MetricKindID: NewCollectorID(), - schemas.MetricKindJobArtifactSizeBytes: NewCollectorJobArtifactSizeBytes(), - schemas.MetricKindJobDurationSeconds: NewCollectorJobDurationSeconds(), - schemas.MetricKindJobID: NewCollectorJobID(), - schemas.MetricKindJobRunCount: NewCollectorJobRunCount(), - schemas.MetricKindJobStatus: NewCollectorJobStatus(), - schemas.MetricKindJobTimestamp: NewCollectorJobTimestamp(), - schemas.MetricKindRunCount: NewCollectorRunCount(), - schemas.MetricKindStatus: NewCollectorStatus(), - schemas.MetricKindTimestamp: NewCollectorTimestamp(), - }, - } - - if err := r.RegisterCollectors(); err != nil { - log.Fatal(err) - } - - return r -} - -// RegisterCollectors add all our metrics to the registry -func (r *Registry) RegisterCollectors() error { - for _, c := range r.Collectors { - if err := r.Register(c); err != nil { - return fmt.Errorf("could not add provided collector '%v' to the Prometheus registry: %v", c, err) - } - } - return nil -} - -// MetricsHandler returns an http handler containing with the desired configuration -func MetricsHandler(w http.ResponseWriter, r *http.Request) { - registry := NewRegistry() - - if err := registry.ExportMetrics(); err != nil { - log.Error(err.Error()) - } - - promhttp.HandlerFor(registry, promhttp.HandlerOpts{ - Registry: registry, - EnableOpenMetrics: config.Server.Metrics.EnableOpenmetricsEncoding, - }).ServeHTTP(w, r) -} - -// GetCollector .. -func (r *Registry) GetCollector(kind schemas.MetricKind) prometheus.Collector { - return r.Collectors[kind] -} - -// ExportMetrics .. -func (r *Registry) ExportMetrics() error { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - metrics, err := store.Metrics() - if err != nil { - return err - } - - for _, m := range metrics { - switch c := r.GetCollector(m.Kind).(type) { - case *prometheus.GaugeVec: - c.With(m.Labels).Set(m.Value) - case *prometheus.CounterVec: - c.With(m.Labels).Add(m.Value) - default: - log.Errorf("unsupported collector type : %v", reflect.TypeOf(c)) - } - } - - return nil -} - -func emitStatusMetric(metricKind schemas.MetricKind, labelValues map[string]string, statuses []string, status string, sparseMetrics bool) { - // Moved into separate function to reduce cyclomatic complexity - // List of available statuses from the API spec - // ref: https://docs.gitlab.com/ee/api/jobs.html#list-pipeline-jobs - for _, s := range statuses { - var value float64 - statusLabels := make(map[string]string) - for k, v := range labelValues { - statusLabels[k] = v - } - statusLabels["status"] = s - - statusMetric := schemas.Metric{ - Kind: metricKind, - Labels: statusLabels, - Value: value, - } - - if s == status { - statusMetric.Value = 1 - } else { - if sparseMetrics { - storeDelMetric(statusMetric) - continue - } - statusMetric.Value = 0 - } - - storeSetMetric(statusMetric) - } -} diff --git a/pkg/exporter/pipelines.go b/pkg/exporter/pipelines.go deleted file mode 100644 index db84ce99..00000000 --- a/pkg/exporter/pipelines.go +++ /dev/null @@ -1,138 +0,0 @@ -package exporter - -import ( - "fmt" - "reflect" - - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - log "github.com/sirupsen/logrus" - goGitlab "github.com/xanzy/go-gitlab" -) - -func pullRefMetrics(ref schemas.Ref) error { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - // At scale, the scheduled ref may be behind the actual state being stored - // to avoid issues, we refresh it from the store before manipulating it - if err := store.GetRef(&ref); err != nil { - return err - } - - logFields := log.Fields{ - "project-name": ref.ProjectName, - "ref": ref.Name, - "ref-kind": ref.Kind, - } - - // TODO: Figure out if we want to have a similar approach for RefKindTag with - // an additional configuration parameter perhaps - if ref.Kind == schemas.RefKindMergeRequest && ref.LatestPipeline.ID != 0 { - switch ref.LatestPipeline.Status { - case "success", "failed", "canceled", "skipped": - // The pipeline will not evolve, lets not bother querying the API - log.WithFields(logFields).WithField("most-recent-pipeline-id", ref.LatestPipeline.ID).Debug("skipping finished merge-request pipeline") - return nil - } - } - - pipelines, err := gitlabClient.GetProjectPipelines(ref.ProjectName, &goGitlab.ListProjectPipelinesOptions{ - // We only need the most recent pipeline - ListOptions: goGitlab.ListOptions{ - PerPage: 1, - Page: 1, - }, - Ref: goGitlab.String(ref.Name), - }) - if err != nil { - return fmt.Errorf("error fetching project pipelines for %s: %v", ref.ProjectName, err) - } - - if len(pipelines) == 0 { - log.WithFields(logFields).Debug("could not find any pipeline for the ref") - return nil - } - - pipeline, err := gitlabClient.GetRefPipeline(ref, pipelines[0].ID) - if err != nil { - return err - } - - if ref.LatestPipeline.ID == 0 || !reflect.DeepEqual(pipeline, ref.LatestPipeline) { - formerPipeline := ref.LatestPipeline - ref.LatestPipeline = pipeline - - // fetch pipeline variables - if ref.PullPipelineVariablesEnabled { - ref.LatestPipeline.Variables, err = gitlabClient.GetRefPipelineVariablesAsConcatenatedString(ref) - if err != nil { - return err - } - } - - // Update the ref in the store - if err = store.SetRef(ref); err != nil { - return err - } - - // If the metric does not exist yet, start with 0 instead of 1 - // this could cause some false positives in prometheus - // when restarting the exporter otherwise - runCount := schemas.Metric{ - Kind: schemas.MetricKindRunCount, - Labels: ref.DefaultLabelsValues(), - } - storeGetMetric(&runCount) - if formerPipeline.ID != 0 { - runCount.Value++ - } - storeSetMetric(runCount) - - storeSetMetric(schemas.Metric{ - Kind: schemas.MetricKindCoverage, - Labels: ref.DefaultLabelsValues(), - Value: pipeline.Coverage, - }) - - storeSetMetric(schemas.Metric{ - Kind: schemas.MetricKindID, - Labels: ref.DefaultLabelsValues(), - Value: float64(pipeline.ID), - }) - - emitStatusMetric( - schemas.MetricKindStatus, - ref.DefaultLabelsValues(), - statusesList[:], - pipeline.Status, - ref.OutputSparseStatusMetrics, - ) - - storeSetMetric(schemas.Metric{ - Kind: schemas.MetricKindDurationSeconds, - Labels: ref.DefaultLabelsValues(), - Value: pipeline.DurationSeconds, - }) - - storeSetMetric(schemas.Metric{ - Kind: schemas.MetricKindTimestamp, - Labels: ref.DefaultLabelsValues(), - Value: pipeline.Timestamp, - }) - - if ref.PullPipelineJobsEnabled { - if err := pullRefPipelineJobsMetrics(ref); err != nil { - return err - } - } - return nil - } - - if ref.PullPipelineJobsEnabled { - if err := pullRefMostRecentJobsMetrics(ref); err != nil { - return err - } - } - - return nil -} diff --git a/pkg/exporter/pipelines_test.go b/pkg/exporter/pipelines_test.go deleted file mode 100644 index aceae77a..00000000 --- a/pkg/exporter/pipelines_test.go +++ /dev/null @@ -1,92 +0,0 @@ -package exporter - -import ( - "fmt" - "net/http" - "testing" - - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/stretchr/testify/assert" -) - -func TestPullRefMetricsSucceed(t *testing.T) { - resetGlobalValues() - mux, server := configureMockedGitlabClient() - defer server.Close() - - mux.HandleFunc("/api/v4/projects/foo/pipelines", - func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `[{"id":1}]`) - }) - - mux.HandleFunc("/api/v4/projects/foo/pipelines/1", - func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `{"id":1,"updated_at":"2016-08-11T11:28:34.085Z","duration":300,"status":"running","coverage":"30.2"}`) - }) - - mux.HandleFunc(fmt.Sprintf("/api/v4/projects/foo/pipelines/1/variables"), - func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, "GET", r.Method) - fmt.Fprint(w, `[{"key":"foo","value":"bar"}]`) - }) - - // Metrics pull shall succeed - assert.NoError(t, pullRefMetrics(schemas.Ref{ - Kind: schemas.RefKindBranch, - ProjectName: "foo", - Name: "bar", - PullPipelineVariablesEnabled: true, - })) - - // Check if all the metrics exist - metrics, _ := store.Metrics() - labels := map[string]string{ - "kind": string(schemas.RefKindBranch), - "project": "foo", - "ref": "bar", - "topics": "", - "variables": "foo:bar", - } - - runCount := schemas.Metric{ - Kind: schemas.MetricKindRunCount, - Labels: labels, - Value: 0, - } - assert.Equal(t, runCount, metrics[runCount.Key()]) - - coverage := schemas.Metric{ - Kind: schemas.MetricKindCoverage, - Labels: labels, - Value: 30.2, - } - assert.Equal(t, coverage, metrics[coverage.Key()]) - - runID := schemas.Metric{ - Kind: schemas.MetricKindID, - Labels: labels, - Value: 1, - } - assert.Equal(t, runID, metrics[runID.Key()]) - - labels["status"] = "running" - status := schemas.Metric{ - Kind: schemas.MetricKindStatus, - Labels: labels, - Value: 1, - } - assert.Equal(t, status, metrics[status.Key()]) -} - -func TestPullRefMetricsMergeRequestPipeline(t *testing.T) { - resetGlobalValues() - ref := schemas.Ref{ - Kind: schemas.RefKindMergeRequest, - LatestPipeline: schemas.Pipeline{ - ID: 1, - Status: "success", - }, - } - - assert.NoError(t, pullRefMetrics(ref)) -} diff --git a/pkg/exporter/projects.go b/pkg/exporter/projects.go deleted file mode 100644 index 9ff4bfc5..00000000 --- a/pkg/exporter/projects.go +++ /dev/null @@ -1,46 +0,0 @@ -package exporter - -import ( - "context" - - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - log "github.com/sirupsen/logrus" -) - -func pullProjectsFromWildcard(w schemas.Wildcard) error { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - foundProjects, err := gitlabClient.ListProjects(w) - if err != nil { - return err - } - - for _, p := range foundProjects { - projectExists, err := store.ProjectExists(p.Key()) - if err != nil { - return err - } - - if !projectExists { - log.WithFields(log.Fields{ - "wildcard-search": w.Search, - "wildcard-owner-kind": w.Owner.Kind, - "wildcard-owner-name": w.Owner.Name, - "wildcard-owner-include-subgroups": w.Owner.IncludeSubgroups, - "wildcard-archived": w.Archived, - "project-name": p.Name, - }).Info("discovered new project") - - if err := store.SetProject(p); err != nil { - log.Errorf(err.Error()) - } - - go schedulePullRefsFromProject(context.Background(), p) - go schedulePullRefsFromPipeline(context.Background(), p) - go schedulePullEnvironmentsFromProject(context.Background(), p) - } - } - - return nil -} diff --git a/pkg/exporter/projects_test.go b/pkg/exporter/projects_test.go deleted file mode 100644 index 52b55c6e..00000000 --- a/pkg/exporter/projects_test.go +++ /dev/null @@ -1,32 +0,0 @@ -package exporter - -import ( - "fmt" - "net/http" - "testing" - - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/stretchr/testify/assert" -) - -func TestPullProjectsFromWildcard(t *testing.T) { - resetGlobalValues() - mux, server := configureMockedGitlabClient() - defer server.Close() - - mux.HandleFunc("/api/v4/projects", - func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `[{"id":1,"path_with_namespace":"foo","jobs_enabled":false},{"id":2,"path_with_namespace":"bar","jobs_enabled":true}]`) - }) - - w := schemas.Wildcard{} - assert.NoError(t, pullProjectsFromWildcard(w)) - - projects, _ := store.Projects() - expectedProjects := schemas.Projects{ - "1996459178": schemas.Project{ - Name: "bar", - }, - } - assert.Equal(t, expectedProjects, projects) -} diff --git a/pkg/exporter/refs.go b/pkg/exporter/refs.go deleted file mode 100644 index fd4b7edc..00000000 --- a/pkg/exporter/refs.go +++ /dev/null @@ -1,153 +0,0 @@ -package exporter - -import ( - "context" - "strings" - - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - log "github.com/sirupsen/logrus" -) - -func getRefs( - projectName string, - filterRegexp string, - maxAgeSeconds uint, - fetchMergeRequestsPipelinesRefs bool, - fetchMergeRequestsPipelinesRefsInitLimit int) (map[string]schemas.RefKind, error) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - branches, err := gitlabClient.GetProjectBranches(projectName, filterRegexp, maxAgeSeconds) - if err != nil { - return nil, err - } - - tags, err := gitlabClient.GetProjectTags(projectName, filterRegexp, maxAgeSeconds) - if err != nil { - return nil, err - } - - mergeRequests := []string{} - if fetchMergeRequestsPipelinesRefs { - mergeRequests, err = gitlabClient.GetProjectMergeRequestsPipelines(projectName, fetchMergeRequestsPipelinesRefsInitLimit, maxAgeSeconds) - if err != nil { - return nil, err - } - } - - foundRefs := map[string]schemas.RefKind{} - for kind, refs := range map[schemas.RefKind][]string{ - schemas.RefKindBranch: branches, - schemas.RefKindTag: tags, - schemas.RefKindMergeRequest: mergeRequests, - } { - for _, ref := range refs { - if _, ok := foundRefs[ref]; ok { - log.Warn("found duplicate ref for project") - continue - } - foundRefs[ref] = kind - } - } - return foundRefs, nil -} - -func pullRefsFromProject(p schemas.Project) error { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - gp, err := gitlabClient.GetProject(p.Name) - if err != nil { - return err - } - - refs, err := getRefs( - p.Name, - p.Pull.Refs.Regexp(), - p.Pull.Refs.MaxAgeSeconds(), - p.Pull.Refs.From.MergeRequests.Enabled(), - p.Pull.Refs.From.MergeRequests.Depth(), - ) - if err != nil { - return err - } - - for ref, kind := range refs { - ref := schemas.NewRef( - kind, - p.Name, - ref, - strings.Join(gp.TagList, ","), - p.OutputSparseStatusMetrics(), - p.Pull.Pipeline.Jobs.Enabled(), - p.Pull.Pipeline.Jobs.FromChildPipelines.Enabled(), - p.Pull.Pipeline.Jobs.RunnerDescription.Enabled(), - p.Pull.Pipeline.Variables.Enabled(), - p.Pull.Pipeline.Variables.Regexp(), - p.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexp(), - ) - - refExists, err := store.RefExists(ref.Key()) - if err != nil { - return err - } - - if !refExists { - log.WithFields(log.Fields{ - "project-name": ref.ProjectName, - "ref": ref.Name, - "ref-kind": ref.Kind, - }).Info("discovered new ref") - - if err = store.SetRef(ref); err != nil { - return err - } - - go schedulePullRefMetrics(context.Background(), ref) - } - } - return nil -} - -func pullRefsFromPipelines(p schemas.Project) error { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - log.WithFields(log.Fields{ - "init-operation": true, - "project-name": p.Name, - }).Debug("fetching project") - - gp, err := gitlabClient.GetProject(p.Name) - if err != nil { - return err - } - - refs, err := gitlabClient.GetRefsFromPipelines(p, strings.Join(gp.TagList, ",")) - if err != nil { - return err - } - - // Immediately trigger a pull of the ref - for _, ref := range refs { - refExists, err := store.RefExists(ref.Key()) - if err != nil { - return err - } - - if !refExists { - log.WithFields(log.Fields{ - "project-name": ref.ProjectName, - "ref": ref.Name, - "ref-kind": ref.Kind, - }).Info("discovered new ref from pipelines") - - if err = store.SetRef(ref); err != nil { - return err - } - - go schedulePullRefMetrics(context.Background(), ref) - } - } - return nil -} diff --git a/pkg/exporter/refs_test.go b/pkg/exporter/refs_test.go deleted file mode 100644 index 86a4b450..00000000 --- a/pkg/exporter/refs_test.go +++ /dev/null @@ -1,131 +0,0 @@ -package exporter - -import ( - "fmt" - "net/http" - "testing" - - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/stretchr/testify/assert" -) - -func TestGetRefs(t *testing.T) { - resetGlobalValues() - mux, server := configureMockedGitlabClient() - defer server.Close() - - mux.HandleFunc("/api/v4/projects/foo/repository/branches", - func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `[{"name":"keep/dev"},{"name":"keep/main"}]`) - }) - - mux.HandleFunc("/api/v4/projects/foo/repository/tags", - func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `[{"name":"keep/dev"},{"name":"keep/0.0.2"}]`) - }) - - mux.HandleFunc("/api/v4/projects/foo/pipelines", - func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `[{"id":1,"ref":"refs/merge-requests/foo"}]`) - }) - - foundRefs, err := getRefs("foo", "^keep", 0, true, 10) - assert.NoError(t, err) - - assert.Equal(t, foundRefs["keep/0.0.2"], schemas.RefKindTag) - assert.Equal(t, foundRefs["keep/main"], schemas.RefKindBranch) - assert.Equal(t, foundRefs["refs/merge-requests/foo"], schemas.RefKindMergeRequest) - assert.Contains(t, []schemas.RefKind{schemas.RefKindTag, schemas.RefKindBranch}, foundRefs["keep/dev"]) -} - -func TestPullRefsFromProject(t *testing.T) { - resetGlobalValues() - mux, server := configureMockedGitlabClient() - defer server.Close() - - mux.HandleFunc("/api/v4/projects/foo", - func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `{"name":"foo"}`) - }) - - mux.HandleFunc(fmt.Sprintf("/api/v4/projects/foo/repository/branches"), - func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `[{"name":"main"},{"name":"nope"}]`) - }) - - mux.HandleFunc(fmt.Sprintf("/api/v4/projects/foo/repository/tags"), - func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `[]`) - }) - - assert.NoError(t, pullRefsFromProject(schemas.Project{Name: "foo"})) - - projectsRefs, _ := store.Refs() - expectedRefs := schemas.Refs{ - "99908380": schemas.Ref{ - Kind: schemas.RefKindBranch, - ProjectName: "foo", - Name: "main", - LatestJobs: make(schemas.Jobs), - OutputSparseStatusMetrics: true, - PullPipelineJobsFromChildPipelinesEnabled: true, - PullPipelineJobsRunnerDescriptionEnabled: true, - PullPipelineVariablesRegexp: ".*", - PullPipelineJobsRunnerDescriptionAggregationRegexp: "shared-runners-manager-(\\d*)\\.gitlab\\.com", - }, - } - assert.Equal(t, expectedRefs, projectsRefs) -} - -func TestPullRefsFromPipelines(t *testing.T) { - resetGlobalValues() - mux, server := configureMockedGitlabClient() - defer server.Close() - - mux.HandleFunc("/api/v4/projects/foo", - func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `{"name":"foo"}`) - }) - - mux.HandleFunc("/api/v4/projects/foo/pipelines", - func(w http.ResponseWriter, r *http.Request) { - if scope, ok := r.URL.Query()["scope"]; ok && len(scope) == 1 && scope[0] == "branches" { - fmt.Fprint(w, `[{"id":1,"ref":"main"}]`) - return - } - - if scope, ok := r.URL.Query()["scope"]; ok && len(scope) == 1 && scope[0] == "tags" { - fmt.Fprint(w, `[{"id":2,"ref":"master"}]`) - return - } - }) - - assert.NoError(t, pullRefsFromPipelines(schemas.Project{Name: "foo"})) - - projectsRefs, _ := store.Refs() - expectedRefs := schemas.Refs{ - "964648533": schemas.Ref{ - Kind: schemas.RefKindTag, - ProjectName: "foo", - Name: "master", - LatestJobs: make(schemas.Jobs), - OutputSparseStatusMetrics: true, - PullPipelineJobsFromChildPipelinesEnabled: true, - PullPipelineJobsRunnerDescriptionEnabled: true, - PullPipelineVariablesRegexp: ".*", - PullPipelineJobsRunnerDescriptionAggregationRegexp: "shared-runners-manager-(\\d*)\\.gitlab\\.com", - }, - "99908380": schemas.Ref{ - Kind: schemas.RefKindBranch, - ProjectName: "foo", - Name: "main", - LatestJobs: make(schemas.Jobs), - OutputSparseStatusMetrics: true, - PullPipelineJobsFromChildPipelinesEnabled: true, - PullPipelineJobsRunnerDescriptionEnabled: true, - PullPipelineVariablesRegexp: ".*", - PullPipelineJobsRunnerDescriptionAggregationRegexp: "shared-runners-manager-(\\d*)\\.gitlab\\.com", - }, - } - assert.Equal(t, expectedRefs, projectsRefs) -} diff --git a/pkg/exporter/scheduler.go b/pkg/exporter/scheduler.go deleted file mode 100644 index 329e9173..00000000 --- a/pkg/exporter/scheduler.go +++ /dev/null @@ -1,515 +0,0 @@ -package exporter - -import ( - "context" - "time" - - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - log "github.com/sirupsen/logrus" - - "github.com/vmihailenco/taskq/v3" -) - -var ( - pullProjectsFromWildcardTask = taskq.RegisterTask(&taskq.TaskOptions{ - Name: "getProjectsFromWildcardTask", - Handler: func(ctx context.Context, w schemas.Wildcard) error { - return pullProjectsFromWildcard(w) - }, - }) - pullEnvironmentsFromProjectTask = taskq.RegisterTask(&taskq.TaskOptions{ - Name: "pullEnvironmentsFromProjectTask", - Handler: func(p schemas.Project) (err error) { - // On errors, we do not want to retry these tasks - if err := pullEnvironmentsFromProject(p); err != nil { - log.WithFields(log.Fields{ - "project-name": p.Name, - "error": err.Error(), - }).Warn("pulling environments from project") - } - return - }, - }) - pullEnvironmentMetricsTask = taskq.RegisterTask(&taskq.TaskOptions{ - Name: "pullEnvironmentMetricsTask", - Handler: func(env schemas.Environment) (err error) { - // On errors, we do not want to retry these tasks - if err := pullEnvironmentMetrics(env); err != nil { - log.WithFields(log.Fields{ - "project-name": env.ProjectName, - "environment-name": env.Name, - "environment-id": env.ID, - "error": err.Error(), - }).Warn("pulling environment metrics") - } - return - }, - }) - pullRefsFromProjectTask = taskq.RegisterTask(&taskq.TaskOptions{ - Name: "pullRefsFromProjectTask", - Handler: func(p schemas.Project) (err error) { - // On errors, we do not want to retry these tasks - if err := pullRefsFromProject(p); err != nil { - log.WithFields(log.Fields{ - "project-name": p.Name, - "error": err.Error(), - }).Warn("pulling refs from project") - } - return - }, - }) - pullRefsFromPipelinesTask = taskq.RegisterTask(&taskq.TaskOptions{ - Name: "getRefsFromPipelinesTask", - Handler: func(p schemas.Project) (err error) { - // On errors, we do not want to retry these tasks - if err := pullRefsFromPipelines(p); err != nil { - log.WithFields(log.Fields{ - "project-name": p.Name, - "error": err.Error(), - }).Warn("pulling projects refs from pipelines") - } - return - }, - }) - pullRefMetricsTask = taskq.RegisterTask(&taskq.TaskOptions{ - Name: "pullRefMetricsTask", - Handler: func(ref schemas.Ref) (err error) { - // On errors, we do not want to retry these tasks - if err := pullRefMetrics(ref); err != nil { - log.WithFields(log.Fields{ - "project-name": ref.ProjectName, - "ref": ref.Name, - "error": err.Error(), - }).Warn("pulling ref metrics") - } - return - }, - }) - garbageCollectProjectsTask = taskq.RegisterTask(&taskq.TaskOptions{ - Name: "garbageCollectProjectsTask", - Handler: func() error { - return garbageCollectProjects() - }, - }) - garbageCollectEnvironmentsTask = taskq.RegisterTask(&taskq.TaskOptions{ - Name: "garbageCollectEnvironmentsTask", - Handler: func() error { - return garbageCollectEnvironments() - }, - }) - garbageCollectRefsTask = taskq.RegisterTask(&taskq.TaskOptions{ - Name: "garbageCollectRefsTask", - Handler: func() error { - return garbageCollectRefs() - }, - }) - garbageCollectMetricsTask = taskq.RegisterTask(&taskq.TaskOptions{ - Name: "garbageCollectMetricsTask", - Handler: func() error { - return garbageCollectMetrics() - }, - }) -) - -// Schedule .. -func schedule(ctx context.Context) { - // Check if some tasks are configured to be run on start - schedulerInit(ctx) - - go func(ctx context.Context) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - pullProjectsFromWildcardsTicker := time.NewTicker(time.Duration(config.Pull.ProjectsFromWildcards.IntervalSeconds) * time.Second) - pullEnvironmentsFromProjectsTicker := time.NewTicker(time.Duration(config.Pull.EnvironmentsFromProjects.IntervalSeconds) * time.Second) - pullRefsFromProjectsTicker := time.NewTicker(time.Duration(config.Pull.RefsFromProjects.IntervalSeconds) * time.Second) - pullMetricsTicker := time.NewTicker(time.Duration(config.Pull.Metrics.IntervalSeconds) * time.Second) - garbageCollectProjectsTicker := time.NewTicker(time.Duration(config.GarbageCollect.Projects.IntervalSeconds) * time.Second) - garbageCollectEnvironmentsTicker := time.NewTicker(time.Duration(config.GarbageCollect.Environments.IntervalSeconds) * time.Second) - garbageCollectRefsTicker := time.NewTicker(time.Duration(config.GarbageCollect.Refs.IntervalSeconds) * time.Second) - garbageCollectMetricsTicker := time.NewTicker(time.Duration(config.GarbageCollect.Metrics.IntervalSeconds) * time.Second) - - // Ticker configuration - if !config.Pull.ProjectsFromWildcards.Scheduled { - pullProjectsFromWildcardsTicker.Stop() - } - - if !config.Pull.EnvironmentsFromProjects.Scheduled { - pullEnvironmentsFromProjectsTicker.Stop() - } - - if !config.Pull.RefsFromProjects.Scheduled { - pullRefsFromProjectsTicker.Stop() - } - - if !config.Pull.Metrics.Scheduled { - pullMetricsTicker.Stop() - } - - if !config.GarbageCollect.Projects.Scheduled { - garbageCollectProjectsTicker.Stop() - } - - if !config.GarbageCollect.Environments.Scheduled { - garbageCollectEnvironmentsTicker.Stop() - } - - if !config.GarbageCollect.Refs.Scheduled { - garbageCollectRefsTicker.Stop() - } - - if !config.GarbageCollect.Metrics.Scheduled { - garbageCollectMetricsTicker.Stop() - } - - // Waiting for the tickers to kick in - for { - select { - case <-ctx.Done(): - log.Info("stopped gitlab api pull orchestration") - return - case <-pullProjectsFromWildcardsTicker.C: - schedulePullProjectsFromWildcards(ctx) - case <-pullEnvironmentsFromProjectsTicker.C: - schedulePullEnvironmentsFromProjects(ctx) - case <-pullRefsFromProjectsTicker.C: - schedulePullRefsFromProjects(ctx) - case <-pullMetricsTicker.C: - schedulePullMetrics(ctx) - case <-garbageCollectProjectsTicker.C: - scheduleGarbageCollectProjects(ctx) - case <-garbageCollectEnvironmentsTicker.C: - scheduleGarbageCollectEnvironments(ctx) - case <-garbageCollectRefsTicker.C: - scheduleGarbageCollectRefs(ctx) - case <-garbageCollectMetricsTicker.C: - scheduleGarbageCollectMetrics(ctx) - } - } - }(ctx) -} - -func schedulerInit(ctx context.Context) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - if config.Pull.ProjectsFromWildcards.OnInit { - schedulePullProjectsFromWildcards(ctx) - } - - if config.Pull.EnvironmentsFromProjects.OnInit { - schedulePullEnvironmentsFromProjects(ctx) - } - - if config.Pull.RefsFromProjects.OnInit { - schedulePullRefsFromProjects(ctx) - } - - if config.Pull.Metrics.OnInit { - schedulePullMetrics(ctx) - } - - if config.GarbageCollect.Projects.OnInit { - scheduleGarbageCollectProjects(ctx) - } - - if config.GarbageCollect.Environments.OnInit { - scheduleGarbageCollectEnvironments(ctx) - } - - if config.GarbageCollect.Refs.OnInit { - scheduleGarbageCollectRefs(ctx) - } - - if config.GarbageCollect.Metrics.OnInit { - scheduleGarbageCollectMetrics(ctx) - } -} - -func schedulePullProjectsFromWildcards(ctx context.Context) { - log.WithFields( - log.Fields{ - "wildcards-count": len(config.Wildcards), - }, - ).Info("scheduling projects from wildcards pull") - - for _, w := range config.Wildcards { - go schedulePullProjectsFromWildcardTask(ctx, w) - } -} - -func schedulePullEnvironmentsFromProjects(ctx context.Context) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - projectsCount, err := store.ProjectsCount() - if err != nil { - log.Error(err.Error()) - } - - log.WithFields( - log.Fields{ - "projects-count": projectsCount, - }, - ).Info("scheduling environments from projects pull") - - projects, err := store.Projects() - if err != nil { - log.Error(err) - } - - for _, p := range projects { - go schedulePullEnvironmentsFromProject(ctx, p) - } -} - -func schedulePullRefsFromProjects(ctx context.Context) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - projectsCount, err := store.ProjectsCount() - if err != nil { - log.Error(err.Error()) - } - - log.WithFields( - log.Fields{ - "projects-count": projectsCount, - }, - ).Info("scheduling refs from projects pull") - - projects, err := store.Projects() - if err != nil { - log.Error(err) - } - - for _, p := range projects { - go schedulePullRefsFromProject(ctx, p) - } -} - -func schedulePullMetrics(ctx context.Context) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - refsCount, err := store.RefsCount() - if err != nil { - log.Error(err) - } - - envsCount, err := store.EnvironmentsCount() - if err != nil { - log.Error(err) - } - - log.WithFields( - log.Fields{ - "environments-count": envsCount, - "refs-count": refsCount, - }, - ).Info("scheduling metrics pull") - - // ENVIRONMENTS - envs, err := store.Environments() - if err != nil { - log.Error(err) - } - - for _, env := range envs { - go schedulePullEnvironmentMetrics(ctx, env) - } - - // REFS - refs, err := store.Refs() - if err != nil { - log.Error(err) - } - - for _, ref := range refs { - go schedulePullRefMetrics(ctx, ref) - } -} - -func schedulePullProjectsFromWildcardTask(ctx context.Context, w schemas.Wildcard) { - if pullingQueue == nil { - log.Warn("uninitialized pulling queue, cannot schedule") - return - } - - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - if err := pullingQueue.Add(pullProjectsFromWildcardTask.WithArgs(ctx, w)); err != nil { - log.WithFields(log.Fields{ - "wildcard-owner-kind": w.Owner.Kind, - "wildcard-owner-name": w.Owner.Name, - "error": err.Error(), - }).Error("scheduling 'projects from wildcard' pull") - } -} - -func schedulePullRefsFromPipeline(ctx context.Context, p schemas.Project) { - if !p.Pull.Refs.From.Pipelines.Enabled() { - log.WithFields(log.Fields{ - "project-name": p.Name, - }).Debug("pull refs from pipelines disabled, not scheduling") - return - } - - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - if pullingQueue == nil { - log.Warn("uninitialized pulling queue, cannot schedule") - return - } - - if err := pullingQueue.Add(pullRefsFromPipelinesTask.WithArgs(ctx, p)); err != nil { - log.WithFields(log.Fields{ - "project-name": p.Name, - "error": err.Error(), - }).Error("scheduling 'refs from pipeline' pull") - } -} - -func schedulePullEnvironmentsFromProject(ctx context.Context, p schemas.Project) { - if !p.Pull.Environments.Enabled() { - log.WithFields(log.Fields{ - "project-name": p.Name, - }).Debug("pull environments from project disabled, not scheduling") - return - } - - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - if pullingQueue == nil { - log.Warn("uninitialized pulling queue, cannot schedule") - return - } - - if err := pullingQueue.Add(pullEnvironmentsFromProjectTask.WithArgs(ctx, p)); err != nil { - log.WithFields(log.Fields{ - "project-name": p.Name, - "error": err.Error(), - }).Error("scheduling 'environments from project' pull") - } -} - -func schedulePullEnvironmentMetrics(ctx context.Context, env schemas.Environment) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - if pullingQueue == nil { - log.Warn("uninitialized pulling queue, cannot schedule") - return - } - - if err := pullingQueue.Add(pullEnvironmentMetricsTask.WithArgs(ctx, env)); err != nil { - log.WithFields(log.Fields{ - "project-name": env.ProjectName, - "environment-id": env.ID, - "environment-name": env.Name, - "error": err.Error(), - }).Error("scheduling 'ref most recent pipeline metrics' pull") - } -} - -func schedulePullRefsFromProject(ctx context.Context, p schemas.Project) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - if pullingQueue == nil { - log.Warn("uninitialized pulling queue, cannot schedule") - return - } - - if err := pullingQueue.Add(pullRefsFromProjectTask.WithArgs(ctx, p)); err != nil { - log.WithFields(log.Fields{ - "project-name": p.Name, - "error": err.Error(), - }).Error("scheduling 'refs from project' pull") - } -} - -func schedulePullRefMetrics(ctx context.Context, ref schemas.Ref) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - if pullingQueue == nil { - log.Warn("uninitialized pulling queue, cannot schedule") - return - } - - if err := pullingQueue.Add(pullRefMetricsTask.WithArgs(ctx, ref)); err != nil { - log.WithFields(log.Fields{ - "project-name": ref.ProjectName, - "ref-name": ref.Name, - "error": err.Error(), - }).Error("scheduling 'ref most recent pipeline metrics' pull") - } -} - -func scheduleGarbageCollectProjects(ctx context.Context) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - if pullingQueue == nil { - log.Warn("uninitialized pulling queue, cannot schedule") - return - } - - if err := pullingQueue.Add(garbageCollectProjectsTask.WithArgs(ctx)); err != nil { - log.WithFields(log.Fields{ - "error": err.Error(), - }).Error("scheduling 'projects garbage collection' task") - } -} - -func scheduleGarbageCollectEnvironments(ctx context.Context) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - if pullingQueue == nil { - log.Warn("uninitialized pulling queue, cannot schedule") - return - } - - if err := pullingQueue.Add(garbageCollectEnvironmentsTask.WithArgs(ctx)); err != nil { - log.WithFields(log.Fields{ - "error": err.Error(), - }).Error("scheduling 'environments garbage collection' task") - } -} - -func scheduleGarbageCollectRefs(ctx context.Context) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - if pullingQueue == nil { - log.Warn("uninitialized pulling queue, cannot schedule") - return - } - - if err := pullingQueue.Add(garbageCollectRefsTask.WithArgs(ctx)); err != nil { - log.WithFields(log.Fields{ - "error": err.Error(), - }).Error("scheduling 'refs garbage collection' task") - } -} - -func scheduleGarbageCollectMetrics(ctx context.Context) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - if pullingQueue == nil { - log.Warn("uninitialized pulling queue, cannot schedule") - return - } - - if err := pullingQueue.Add(garbageCollectMetricsTask.WithArgs(ctx)); err != nil { - log.WithFields(log.Fields{ - "error": err.Error(), - }).Error("scheduling 'metrics garbage collection' task") - } -} diff --git a/pkg/exporter/scheduler_test.go b/pkg/exporter/scheduler_test.go deleted file mode 100644 index 20842c67..00000000 --- a/pkg/exporter/scheduler_test.go +++ /dev/null @@ -1,24 +0,0 @@ -package exporter - -import ( - "context" - "testing" -) - -func TestSchedulerInit(_ *testing.T) { - resetGlobalValues() - - configureStore() - configurePullingQueue() - config.Pull.ProjectsFromWildcards.OnInit = true - config.Pull.EnvironmentsFromProjects.OnInit = true - config.Pull.RefsFromProjects.OnInit = true - config.Pull.Metrics.OnInit = true - config.GarbageCollect.Projects.OnInit = true - config.GarbageCollect.Environments.OnInit = true - config.GarbageCollect.Refs.OnInit = true - config.GarbageCollect.Metrics.OnInit = true - - schedulerInit(context.Background()) - // TODO: Assert if it worked as expected -} diff --git a/pkg/exporter/store.go b/pkg/exporter/store.go deleted file mode 100644 index 6b8c560b..00000000 --- a/pkg/exporter/store.go +++ /dev/null @@ -1,52 +0,0 @@ -package exporter - -import ( - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - log "github.com/sirupsen/logrus" -) - -func metricLogFields(m schemas.Metric) log.Fields { - return log.Fields{ - "metric-kind": m.Kind, - "metric-labels": m.Labels, - } -} - -func storeGetMetric(m *schemas.Metric) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - if err := store.GetMetric(m); err != nil { - log.WithFields( - metricLogFields(*m), - ).WithField( - "error", err.Error(), - ).Errorf("reading metric from the store") - } -} - -func storeSetMetric(m schemas.Metric) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - if err := store.SetMetric(m); err != nil { - log.WithFields( - metricLogFields(m), - ).WithField( - "error", err.Error(), - ).Errorf("writing metric in the store") - } -} - -func storeDelMetric(m schemas.Metric) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - if err := store.DelMetric(m.Key()); err != nil { - log.WithFields( - metricLogFields(m), - ).WithField( - "error", err.Error(), - ).Errorf("deleting metric from the store") - } -} diff --git a/pkg/exporter/webhooks.go b/pkg/exporter/webhooks.go deleted file mode 100644 index 28d96754..00000000 --- a/pkg/exporter/webhooks.go +++ /dev/null @@ -1,228 +0,0 @@ -package exporter - -import ( - "context" - "fmt" - "io/ioutil" - "net/http" - "reflect" - "regexp" - "strings" - - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - log "github.com/sirupsen/logrus" - goGitlab "github.com/xanzy/go-gitlab" -) - -// WebhookHandler .. -func WebhookHandler(w http.ResponseWriter, r *http.Request) { - logFields := log.Fields{ - "ip-address": r.RemoteAddr, - "user-agent": r.UserAgent(), - } - log.WithFields(logFields).Debug("webhook request") - - if r.Header.Get("X-Gitlab-Token") != config.Server.Webhook.SecretToken { - log.WithFields(logFields).Debug("invalid token provided for a webhook request") - w.WriteHeader(http.StatusForbidden) - fmt.Fprint(w, "{\"error\": \"invalid token\"") - return - } - - if r.Body == http.NoBody { - log.WithFields(logFields).WithField("error", "nil body").Warn("unable to read body of a received webhook") - w.WriteHeader(http.StatusBadRequest) - return - } - - payload, err := ioutil.ReadAll(r.Body) - if err != nil { - log.WithFields(logFields).WithField("error", err.Error()).Warn("unable to read body of a received webhook") - w.WriteHeader(http.StatusBadRequest) - return - } - - event, err := goGitlab.ParseHook(goGitlab.HookEventType(r), payload) - if err != nil { - log.WithFields(logFields).WithFields(logFields).WithField("error", err.Error()).Warn("unable to parse body of a received webhook") - w.WriteHeader(http.StatusBadRequest) - return - } - - switch event := event.(type) { - case *goGitlab.PipelineEvent: - go processPipelineEvent(*event) - case *goGitlab.DeploymentEvent: - go processDeploymentEvent(*event) - default: - log.WithFields(logFields).WithField("event-type", reflect.TypeOf(event).String()).Warn("received a non supported event type as a webhook") - w.WriteHeader(http.StatusUnprocessableEntity) - } -} - -func processPipelineEvent(e goGitlab.PipelineEvent) { - var k schemas.RefKind - if e.MergeRequest.IID != 0 { - k = schemas.RefKindMergeRequest - } else if e.ObjectAttributes.Tag { - k = schemas.RefKindTag - } else { - k = schemas.RefKindBranch - } - - triggerRefMetricsPull(schemas.Ref{ - Kind: k, - ProjectName: e.Project.PathWithNamespace, - Name: e.ObjectAttributes.Ref, - }) -} - -func triggerRefMetricsPull(ref schemas.Ref) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - logFields := log.Fields{ - "project-name": ref.ProjectName, - "ref": ref.Name, - "ref-kind": ref.Kind, - } - - exists, err := store.RefExists(ref.Key()) - if err != nil { - log.WithFields(logFields).WithField("error", err.Error()).Error("reading ref from the store") - } - - // Let's try to see if the project is configured to export this ref - if !exists { - p := schemas.Project{ - Name: ref.ProjectName, - } - - exists, err = store.ProjectExists(p.Key()) - if err != nil { - log.WithFields(logFields).WithField("error", err.Error()).Error("reading project from the store") - } - - // Perhaps the project is discoverable through a wildcard - if !exists && len(config.Wildcards) > 0 { - for _, w := range config.Wildcards { - // If in all our wildcards we have one which can potentially match the project ref - // received, we trigger a scan - if w.Owner.Kind == "" || - (strings.Contains(p.Name, w.Owner.Name) && regexp.MustCompile(w.Pull.Refs.Regexp()).MatchString(ref.Name)) { - go schedulePullProjectsFromWildcardTask(context.TODO(), w) - log.WithFields(logFields).Info("project ref not currently exported but its configuration matches a wildcard, triggering a pull of the projects from this wildcard") - return - } - } - } - - if exists { - if err := store.GetProject(&p); err != nil { - log.WithFields(logFields).WithField("error", err.Error()).Error("reading project from the store") - } - - if regexp.MustCompile(p.Pull.Refs.Regexp()).MatchString(ref.Name) { - if err = store.SetRef(ref); err != nil { - log.WithFields(logFields).WithField("error", err.Error()).Error("writing ref in the store") - } - goto schedulePull - } - } - - log.WithFields(logFields).Info("ref not configured in the exporter, ignoring pipeline webhook") - return - } - -schedulePull: - log.WithFields(logFields).Info("received a pipeline webhook from GitLab for a ref, triggering metrics pull") - // TODO: When all the metrics will be sent over the webhook, we might be able to avoid redoing a pull - // eg: 'coverage' is not in the pipeline payload yet, neither is 'artifacts' in the job one - go schedulePullRefMetrics(context.Background(), ref) -} - -func processDeploymentEvent(e goGitlab.DeploymentEvent) { - triggerEnvironmentMetricsPull(schemas.Environment{ - ProjectName: e.Project.PathWithNamespace, - Name: e.Environment, - }) -} - -func triggerEnvironmentMetricsPull(env schemas.Environment) { - cfgUpdateLock.RLock() - defer cfgUpdateLock.RUnlock() - - logFields := log.Fields{ - "project-name": env.ProjectName, - "environment-name": env.Name, - } - - exists, err := store.EnvironmentExists(env.Key()) - if err != nil { - log.WithFields(logFields).WithField("error", err.Error()).Error("reading environment from the store") - } - - if !exists { - p := schemas.Project{ - Name: env.ProjectName, - } - - exists, err = store.ProjectExists(p.Key()) - if err != nil { - log.WithFields(logFields).WithField("error", err.Error()).Error("reading project from the store") - } - - // Perhaps the project is discoverable through a wildcard - if !exists && len(config.Wildcards) > 0 { - for _, w := range config.Wildcards { - // If in all our wildcards we have one which can potentially match the project ref - // received, we trigger a scan - if w.Pull.Environments.Enabled() && (w.Owner.Kind == "" || (strings.Contains(p.Name, w.Owner.Name) && regexp.MustCompile(w.Pull.Environments.NameRegexp()).MatchString(env.ProjectName))) { - go schedulePullProjectsFromWildcardTask(context.TODO(), w) - log.WithFields(logFields).Info("project environment not currently exported but its configuration matches a wildcard, triggering a pull of the projects from this wildcard") - return - } - } - } - - if exists { - if err := store.GetProject(&p); err != nil { - log.WithFields(logFields).WithField("error", err.Error()).Error("reading project from the store") - } - - // As we do not get the environment ID within the deployment event, we need to query it back.. - envs, err := gitlabClient.GetProjectEnvironments(p.Name, p.Pull.Environments.NameRegexp()) - if err != nil { - log.WithFields(logFields).WithField("error", err.Error()).Error("listing project envs from GitLab API") - } - - for envID, envName := range envs { - if envName == env.Name { - env.ID = envID - break - } - } - - if env.ID != 0 { - if err = store.SetEnvironment(env); err != nil { - log.WithFields(logFields).WithField("error", err.Error()).Error("writing environment in the store") - } - goto schedulePull - } - } - - log.WithFields(logFields).Info("environment not configured in the exporter, ignoring deployment webhook") - return - } - - // Need to refresh the env from the store in order to get at least it's ID - if env.ID == 0 { - if err = store.GetEnvironment(&env); err != nil { - log.WithFields(logFields).WithField("error", err.Error()).Error("reading environment from the store") - } - } - -schedulePull: - log.WithFields(logFields).Info("received a deployment webhook from GitLab for an environment, triggering metrics pull") - go schedulePullEnvironmentMetrics(context.Background(), env) -} diff --git a/pkg/gitlab/branches.go b/pkg/gitlab/branches.go index 157c41de..33407e3a 100644 --- a/pkg/gitlab/branches.go +++ b/pkg/gitlab/branches.go @@ -1,16 +1,27 @@ package gitlab import ( + "context" "regexp" - "time" log "github.com/sirupsen/logrus" goGitlab "github.com/xanzy/go-gitlab" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) // GetProjectBranches .. -func (c *Client) GetProjectBranches(projectName, filterRegexp string, maxAgeSeconds uint) ([]string, error) { - var names []string +func (c *Client) GetProjectBranches(ctx context.Context, p schemas.Project) ( + refs schemas.Refs, + err error, +) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:GetProjectBranches") + defer span.End() + span.SetAttributes(attribute.String("project_name", p.Name)) + + refs = make(schemas.Refs) options := &goGitlab.ListBranchesOptions{ ListOptions: goGitlab.ListOptions{ @@ -19,56 +30,64 @@ func (c *Client) GetProjectBranches(projectName, filterRegexp string, maxAgeSeco }, } - re, err := regexp.Compile(filterRegexp) - if err != nil { - return nil, err + var re *regexp.Regexp + + if re, err = regexp.Compile(p.Pull.Refs.Branches.Regexp); err != nil { + return } for { - c.rateLimit() - branches, resp, err := c.Branches.ListBranches(projectName, options) + c.rateLimit(ctx) + + var ( + branches []*goGitlab.Branch + resp *goGitlab.Response + ) + + branches, resp, err = c.Branches.ListBranches(p.Name, options, goGitlab.WithContext(ctx)) if err != nil { - return names, err + return } + c.requestsRemaining(resp) + for _, branch := range branches { if re.MatchString(branch.Name) { - if maxAgeSeconds > 0 && time.Now().Sub(*branch.Commit.AuthoredDate) > (time.Duration(maxAgeSeconds)*time.Second) { - log.WithFields(log.Fields{ - "project-name": projectName, - "branch": branch.Name, - "regexp": filterRegexp, - "max-age-seconds": maxAgeSeconds, - "authored-date": *branch.Commit.AuthoredDate, - }).Debug("branch matching regexp but last authored at a date outside of the required timeframe, ignoring..") - continue - } - names = append(names, branch.Name) + ref := schemas.NewRef(p, schemas.RefKindBranch, branch.Name) + refs[ref.Key()] = ref } } - if resp.CurrentPage >= resp.TotalPages { + if resp.CurrentPage >= resp.NextPage { break } options.Page = resp.NextPage } - return names, nil + return } // GetBranchLatestCommit .. -func (c *Client) GetBranchLatestCommit(project, branch string) (string, float64, error) { +func (c *Client) GetBranchLatestCommit(ctx context.Context, project, branch string) (string, float64, error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:GetBranchLatestCommit") + defer span.End() + span.SetAttributes(attribute.String("project_name", project)) + span.SetAttributes(attribute.String("branch_name", branch)) + log.WithFields(log.Fields{ "project-name": project, "branch": branch, }).Debug("reading project branch") - c.rateLimit() - b, _, err := c.Branches.GetBranch(project, branch, nil) + c.rateLimit(ctx) + + b, resp, err := c.Branches.GetBranch(project, branch, goGitlab.WithContext(ctx)) if err != nil { return "", 0, err } + c.requestsRemaining(resp) + return b.Commit.ShortID, float64(b.Commit.CommittedDate.Unix()), nil } diff --git a/pkg/gitlab/branches_test.go b/pkg/gitlab/branches_test.go index 45af96d4..ba7075fb 100644 --- a/pkg/gitlab/branches_test.go +++ b/pkg/gitlab/branches_test.go @@ -7,25 +7,33 @@ import ( "testing" "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) func TestGetProjectBranches(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() - mux.HandleFunc(fmt.Sprintf("/api/v4/projects/1/repository/branches"), + mux.HandleFunc(fmt.Sprintf("/api/v4/projects/foo/repository/branches"), func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, "GET", r.Method) assert.Equal(t, []string{"100"}, r.URL.Query()["per_page"]) currentPage, err := strconv.Atoi(r.URL.Query()["page"][0]) assert.NoError(t, err) - w.Header().Add("X-Total-Pages", "2") + nextPage := currentPage + 1 + + if currentPage == 2 { + nextPage = currentPage + } + w.Header().Add("X-Page", strconv.Itoa(currentPage)) - w.Header().Add("X-Next-Page", strconv.Itoa(currentPage+1)) + w.Header().Add("X-Next-Page", strconv.Itoa(nextPage)) if currentPage == 1 { fmt.Fprint(w, `[{"name":"main"},{"name":"dev"}]`) + return } @@ -37,22 +45,29 @@ func TestGetProjectBranches(t *testing.T) { w.WriteHeader(http.StatusNotFound) }) - branches, err := c.GetProjectBranches("1", "^(main)$", 0) + p := schemas.NewProject("foo") + expectedRef := schemas.NewRef(p, schemas.RefKindBranch, "main") + refs, err := c.GetProjectBranches(ctx, p) assert.NoError(t, err) - assert.Len(t, branches, 1) - assert.Equal(t, "main", branches[0]) + assert.Len(t, refs, 1) + assert.Equal(t, schemas.Refs{ + expectedRef.Key(): expectedRef, + }, refs) - // Test invalid project id - _, err = c.GetProjectBranches("0", "", 0) + // Test invalid project name + p.Name = "invalid" + _, err = c.GetProjectBranches(ctx, p) assert.Error(t, err) // Test invalid regexp - _, err = c.GetProjectBranches("0", "[", 0) + p.Name = "foo" + p.Pull.Refs.Branches.Regexp = `[` + _, err = c.GetProjectBranches(ctx, p) assert.Error(t, err) } func TestGetBranchLatestCommit(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() mux.HandleFunc("/api/v4/projects/1/repository/branches/main", @@ -67,7 +82,7 @@ func TestGetBranchLatestCommit(t *testing.T) { }`) }) - commitShortID, commitCreatedAt, err := c.GetBranchLatestCommit("1", "main") + commitShortID, commitCreatedAt, err := c.GetBranchLatestCommit(ctx, "1", "main") assert.NoError(t, err) assert.Equal(t, "7b5c3cc", commitShortID) assert.Equal(t, float64(1553540113), commitCreatedAt) diff --git a/pkg/gitlab/client.go b/pkg/gitlab/client.go index 94a9828a..f8084722 100644 --- a/pkg/gitlab/client.go +++ b/pkg/gitlab/client.go @@ -1,18 +1,26 @@ package gitlab import ( + "context" "crypto/tls" "fmt" "net/http" + "strconv" + "sync" + "sync/atomic" "time" "github.com/heptiolabs/healthcheck" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/ratelimit" + "github.com/paulbellamy/ratecounter" goGitlab "github.com/xanzy/go-gitlab" + "go.opentelemetry.io/otel" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/ratelimit" ) const ( - userAgent = "gitlab-ci-pipelines-exporter" + userAgent = "gitlab-ci-pipelines-exporter" + tracerName = "gitlab-ci-pipelines-exporter" ) // Client .. @@ -24,7 +32,14 @@ type Client struct { HTTPClient *http.Client } - RateLimiter ratelimit.Limiter + RateLimiter ratelimit.Limiter + RateCounter *ratecounter.RateCounter + RequestsCounter atomic.Uint64 + RequestsLimit int + RequestsRemaining int + + version GitLabVersion + mutex sync.RWMutex } // ClientConfig .. @@ -58,7 +73,7 @@ func NewClient(cfg ClientConfig) (*Client, error) { goGitlab.WithoutRetries(), } - gc, err := goGitlab.NewClient(cfg.Token, opts...) + gc, err := goGitlab.NewOAuthClient(cfg.Token, opts...) if err != nil { return nil, err } @@ -78,25 +93,80 @@ func NewClient(cfg ClientConfig) (*Client, error) { URL: cfg.ReadinessURL, HTTPClient: readinessCheckHTTPClient, }, + RateCounter: ratecounter.NewRateCounter(time.Second), }, nil } // ReadinessCheck .. -func (c *Client) ReadinessCheck() healthcheck.Check { +func (c *Client) ReadinessCheck(ctx context.Context) healthcheck.Check { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:ReadinessCheck") + defer span.End() + return func() error { if c.Readiness.HTTPClient == nil { return fmt.Errorf("readiness http client not configured") } - resp, err := c.Readiness.HTTPClient.Get(c.Readiness.URL) - if err == nil && resp.StatusCode != 200 { + req, err := http.NewRequestWithContext( + ctx, + http.MethodGet, + c.Readiness.URL, + nil, + ) + if err != nil { + return err + } + + resp, err := c.Readiness.HTTPClient.Do(req) + if err != nil { + return err + } + + if resp == nil { + return fmt.Errorf("HTTP error: empty response") + } + + if err == nil && resp.StatusCode != http.StatusOK { return fmt.Errorf("HTTP error: %d", resp.StatusCode) } - return err + return nil } } -func (c *Client) rateLimit() { - ratelimit.Take(c.RateLimiter) +func (c *Client) rateLimit(ctx context.Context) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:rateLimit") + defer span.End() + + ratelimit.Take(ctx, c.RateLimiter) + // Used for monitoring purposes + c.RateCounter.Incr(1) + c.RequestsCounter.Add(1) +} + +func (c *Client) UpdateVersion(version GitLabVersion) { + c.mutex.Lock() + defer c.mutex.Unlock() + c.version = version +} + +func (c *Client) Version() GitLabVersion { + c.mutex.RLock() + defer c.mutex.RUnlock() + + return c.version +} + +func (c *Client) requestsRemaining(response *goGitlab.Response) { + if response == nil { + return + } + + if remaining := response.Header.Get("ratelimit-remaining"); remaining != "" { + c.RequestsRemaining, _ = strconv.Atoi(remaining) + } + + if limit := response.Header.Get("ratelimit-limit"); limit != "" { + c.RequestsLimit, _ = strconv.Atoi(limit) + } } diff --git a/pkg/gitlab/client_test.go b/pkg/gitlab/client_test.go index 450267a9..33681114 100644 --- a/pkg/gitlab/client_test.go +++ b/pkg/gitlab/client_test.go @@ -1,19 +1,22 @@ package gitlab import ( + "context" "fmt" "net/http" "net/http/httptest" "testing" "time" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/ratelimit" + "github.com/paulbellamy/ratecounter" "github.com/stretchr/testify/assert" goGitlab "github.com/xanzy/go-gitlab" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/ratelimit" ) -// Mocking helpers -func getMockedClient() (*http.ServeMux, *httptest.Server, *Client) { +// Mocking helpers. +func getMockedClient() (context.Context, *http.ServeMux, *httptest.Server, *Client) { mux := http.NewServeMux() server := httptest.NewServer(mux) @@ -26,10 +29,11 @@ func getMockedClient() (*http.ServeMux, *httptest.Server, *Client) { c := &Client{ Client: gc, - RateLimiter: ratelimit.NewLocalLimiter(100), + RateLimiter: ratelimit.NewLocalLimiter(100, 1), + RateCounter: ratecounter.NewRateCounter(time.Second), } - return mux, server, c + return context.Background(), mux, server, c } func TestNewHTTPClient(t *testing.T) { @@ -42,9 +46,9 @@ func TestNewClient(t *testing.T) { URL: "https://gitlab.example.com", Token: "supersecret", UserAgentVersion: "0.0.0", - DisableTLSVerify: false, + DisableTLSVerify: true, ReadinessURL: "https://gitlab.example.com/amialive", - RateLimiter: ratelimit.NewLocalLimiter(10), + RateLimiter: ratelimit.NewLocalLimiter(10, 1), } c, err := NewClient(cfg) @@ -54,28 +58,35 @@ func TestNewClient(t *testing.T) { assert.Equal(t, "https", c.Client.BaseURL().Scheme) assert.Equal(t, "gitlab.example.com", c.Client.BaseURL().Host) assert.Equal(t, "https://gitlab.example.com/amialive", c.Readiness.URL) + assert.True(t, c.Readiness.HTTPClient.Transport.(*http.Transport).TLSClientConfig.InsecureSkipVerify) assert.Equal(t, 5*time.Second, c.Readiness.HTTPClient.Timeout) } func TestReadinessCheck(t *testing.T) { - mux, server, c := getMockedClient() - mux.HandleFunc(fmt.Sprintf("/200"), + ctx, mux, server, c := getMockedClient() + mux.HandleFunc( + "/200", func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, "GET", r.Method) w.WriteHeader(http.StatusOK) - }) - mux.HandleFunc(fmt.Sprintf("/500"), + }, + ) + mux.HandleFunc( + "/500", func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusInternalServerError) - }) + }, + ) - readinessCheck := c.ReadinessCheck() + readinessCheck := c.ReadinessCheck(ctx) assert.Error(t, readinessCheck()) c.Readiness.HTTPClient = NewHTTPClient(false) c.Readiness.URL = fmt.Sprintf("%s/200", server.URL) + assert.NoError(t, readinessCheck()) c.Readiness.URL = fmt.Sprintf("%s/500", server.URL) + assert.Error(t, readinessCheck()) } diff --git a/pkg/gitlab/environments.go b/pkg/gitlab/environments.go index e4e0451c..3bd098de 100644 --- a/pkg/gitlab/environments.go +++ b/pkg/gitlab/environments.go @@ -1,62 +1,119 @@ package gitlab import ( + "context" "regexp" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" log "github.com/sirupsen/logrus" goGitlab "github.com/xanzy/go-gitlab" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) // GetProjectEnvironments .. -func (c *Client) GetProjectEnvironments(project, envRegexp string) (map[int]string, error) { - environments := map[int]string{} +func (c *Client) GetProjectEnvironments(ctx context.Context, p schemas.Project) ( + envs schemas.Environments, + err error, +) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:GetProjectEnvironments") + defer span.End() + span.SetAttributes(attribute.String("project_name", p.Name)) + + envs = make(schemas.Environments) options := &goGitlab.ListEnvironmentsOptions{ - Page: 1, - PerPage: 100, + ListOptions: goGitlab.ListOptions{ + Page: 1, + PerPage: 100, + }, } - re, err := regexp.Compile(envRegexp) + if p.Pull.Environments.ExcludeStopped { + options.States = goGitlab.String("available") + } + + re, err := regexp.Compile(p.Pull.Environments.Regexp) if err != nil { return nil, err } for { - c.rateLimit() - envs, resp, err := c.Environments.ListEnvironments(project, options) + c.rateLimit(ctx) + + var ( + glenvs []*goGitlab.Environment + resp *goGitlab.Response + ) + + glenvs, resp, err = c.Environments.ListEnvironments(p.Name, options, goGitlab.WithContext(ctx)) if err != nil { - return environments, err + return } - for _, env := range envs { - if re.MatchString(env.Name) { - environments[env.ID] = env.Name + c.requestsRemaining(resp) + + for _, glenv := range glenvs { + if re.MatchString(glenv.Name) { + env := schemas.Environment{ + ProjectName: p.Name, + ID: glenv.ID, + Name: glenv.Name, + OutputSparseStatusMetrics: p.OutputSparseStatusMetrics, + } + + if glenv.State == "available" { + env.Available = true + } + + envs[env.Key()] = env } } - if resp.CurrentPage >= resp.TotalPages { + if resp.CurrentPage >= resp.NextPage { break } + options.Page = resp.NextPage } - return environments, nil + return } // GetEnvironment .. -func (c *Client) GetEnvironment(project string, environmentID int) (schemas.Environment, error) { - environment := schemas.Environment{ +func (c *Client) GetEnvironment( + ctx context.Context, + project string, + environmentID int, +) ( + environment schemas.Environment, + err error, +) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:GetEnvironment") + defer span.End() + span.SetAttributes(attribute.String("project_name", project)) + span.SetAttributes(attribute.Int("environment_id", environmentID)) + + environment = schemas.Environment{ ProjectName: project, ID: environmentID, } - c.rateLimit() - e, _, err := c.Environments.GetEnvironment(project, environmentID, nil) + c.rateLimit(ctx) + + var ( + e *goGitlab.Environment + resp *goGitlab.Response + ) + + e, resp, err = c.Environments.GetEnvironment(project, environmentID, goGitlab.WithContext(ctx)) if err != nil || e == nil { - return environment, err + return } + c.requestsRemaining(resp) + environment.Name = e.Name environment.ExternalURL = e.ExternalURL @@ -64,35 +121,39 @@ func (c *Client) GetEnvironment(project string, environmentID int) (schemas.Envi environment.Available = true } - if e.LastDeployment != nil { - if e.LastDeployment.Deployable.Tag { - environment.LatestDeployment.RefKind = schemas.RefKindTag - } else { - environment.LatestDeployment.RefKind = schemas.RefKindBranch - } + if e.LastDeployment == nil { + log.WithContext(ctx). + WithFields(log.Fields{ + "project-name": project, + "environment-name": e.Name, + }). + Debug("no deployments found for the environment") - environment.LatestDeployment.RefName = e.LastDeployment.Ref - environment.LatestDeployment.JobID = e.LastDeployment.Deployable.ID - environment.LatestDeployment.DurationSeconds = e.LastDeployment.Deployable.Duration - environment.LatestDeployment.Status = e.LastDeployment.Deployable.Status + return + } - if e.LastDeployment.Deployable.User != nil { - environment.LatestDeployment.Username = e.LastDeployment.Deployable.User.Username - } + if e.LastDeployment.Deployable.Tag { + environment.LatestDeployment.RefKind = schemas.RefKindTag + } else { + environment.LatestDeployment.RefKind = schemas.RefKindBranch + } - if e.LastDeployment.Deployable.Commit != nil { - environment.LatestDeployment.CommitShortID = e.LastDeployment.Deployable.Commit.ShortID - } + environment.LatestDeployment.RefName = e.LastDeployment.Ref + environment.LatestDeployment.JobID = e.LastDeployment.Deployable.ID + environment.LatestDeployment.DurationSeconds = e.LastDeployment.Deployable.Duration + environment.LatestDeployment.Status = e.LastDeployment.Deployable.Status - if e.LastDeployment.CreatedAt != nil { - environment.LatestDeployment.Timestamp = float64(e.LastDeployment.CreatedAt.Unix()) - } - } else { - log.WithFields(log.Fields{ - "project-name": project, - "environment-name": e.Name, - }).Warn("no deployments found for the environment") + if e.LastDeployment.Deployable.User != nil { + environment.LatestDeployment.Username = e.LastDeployment.Deployable.User.Username + } + + if e.LastDeployment.Deployable.Commit != nil { + environment.LatestDeployment.CommitShortID = e.LastDeployment.Deployable.Commit.ShortID + } + + if e.LastDeployment.CreatedAt != nil { + environment.LatestDeployment.Timestamp = float64(e.LastDeployment.CreatedAt.Unix()) } - return environment, nil + return } diff --git a/pkg/gitlab/environments_test.go b/pkg/gitlab/environments_test.go index a070b8aa..e77d0e43 100644 --- a/pkg/gitlab/environments_test.go +++ b/pkg/gitlab/environments_test.go @@ -6,53 +6,104 @@ import ( "strconv" "testing" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) func TestGetProjectEnvironments(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() - mux.HandleFunc(fmt.Sprintf("/api/v4/projects/foo/environments"), + mux.HandleFunc( + "/api/v4/projects/foo/environments", func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, "GET", r.Method) assert.Equal(t, []string{"100"}, r.URL.Query()["per_page"]) currentPage, err := strconv.Atoi(r.URL.Query()["page"][0]) assert.NoError(t, err) + nextPage := currentPage + 1 + if currentPage == 2 { + nextPage = currentPage + } - w.Header().Add("X-Total-Pages", "2") w.Header().Add("X-Page", strconv.Itoa(currentPage)) - w.Header().Add("X-Next-Page", strconv.Itoa(currentPage+1)) + w.Header().Add("X-Next-Page", strconv.Itoa(nextPage)) + + if scope, ok := r.URL.Query()["states"]; ok && len(scope) == 1 && scope[0] == "available" { + fmt.Fprint(w, `[{"id":1338,"name":"main"}]`) + + return + } if currentPage == 1 { - fmt.Fprint(w, `[{"name":"main"},{"id":1337,"name":"dev"}]`) + fmt.Fprint(w, `[{"id":1338,"name":"main"},{"id":1337,"name":"dev"}]`) + return } fmt.Fprint(w, `[]`) - }) + }, + ) - mux.HandleFunc(fmt.Sprintf("/api/v4/projects/0/environments"), + mux.HandleFunc( + "/api/v4/projects/0/environments", func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusNotFound) - }) + }, + ) + + p := schemas.NewProject("foo") + p.Pull.Environments.Regexp = "^dev" + p.Pull.Environments.ExcludeStopped = false + + xenv := schemas.Environment{ + ProjectName: "foo", + Name: "dev", + ID: 1337, + OutputSparseStatusMetrics: true, + } + + xenvs := schemas.Environments{ + xenv.Key(): xenv, + } - envs, err := c.GetProjectEnvironments("foo", "^dev") + envs, err := c.GetProjectEnvironments(ctx, p) assert.NoError(t, err) - assert.Equal(t, map[int]string{1337: "dev"}, envs) + assert.Equal(t, xenvs, envs) // Test invalid project - _, err = c.GetProjectEnvironments("0", "") + p.Name = "" + _, err = c.GetProjectEnvironments(ctx, p) assert.Error(t, err) // Test invalid regexp - _, err = c.GetProjectEnvironments("1", "[") + p.Name = "foo" + p.Pull.Environments.Regexp = "[" + _, err = c.GetProjectEnvironments(ctx, p) assert.Error(t, err) + + // Test exclude stopped + xenv = schemas.Environment{ + ProjectName: "foo", + Name: "main", + ID: 1338, + OutputSparseStatusMetrics: true, + } + + xenvs = schemas.Environments{ + xenv.Key(): xenv, + } + + p.Pull.Environments.Regexp = ".*" + p.Pull.Environments.ExcludeStopped = true + envs, err = c.GetProjectEnvironments(ctx, p) + assert.NoError(t, err) + assert.Equal(t, xenvs, envs) } func TestGetEnvironment(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() mux.HandleFunc("/api/v4/projects/foo/environments/1", @@ -83,7 +134,7 @@ func TestGetEnvironment(t *testing.T) { }`) }) - e, err := c.GetEnvironment("foo", 1) + e, err := c.GetEnvironment(ctx, "foo", 1) assert.NoError(t, err) assert.NotNil(t, e) diff --git a/pkg/gitlab/jobs.go b/pkg/gitlab/jobs.go index 5d635d14..156caac5 100644 --- a/pkg/gitlab/jobs.go +++ b/pkg/gitlab/jobs.go @@ -1,33 +1,46 @@ package gitlab import ( + "context" + "reflect" + "strconv" "strings" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" log "github.com/sirupsen/logrus" goGitlab "github.com/xanzy/go-gitlab" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) // ListRefPipelineJobs .. -func (c *Client) ListRefPipelineJobs(ref schemas.Ref) (jobs []schemas.Job, err error) { - if ref.LatestPipeline == (schemas.Pipeline{}) { +func (c *Client) ListRefPipelineJobs(ctx context.Context, ref schemas.Ref) (jobs []schemas.Job, err error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:ListRefPipelineJobs") + defer span.End() + span.SetAttributes(attribute.String("project_name", ref.Project.Name)) + span.SetAttributes(attribute.String("ref_name", ref.Name)) + + if reflect.DeepEqual(ref.LatestPipeline, (schemas.Pipeline{})) { log.WithFields( log.Fields{ - "project-name": ref.ProjectName, + "project-name": ref.Project.Name, "ref": ref.Name, }, ).Debug("most recent pipeline not defined, exiting..") + return } - jobs, err = c.ListPipelineJobs(ref.ProjectName, ref.LatestPipeline.ID) + jobs, err = c.ListPipelineJobs(ctx, ref.Project.Name, ref.LatestPipeline.ID) if err != nil { return } - if ref.PullPipelineJobsFromChildPipelinesEnabled { + if ref.Project.Pull.Pipeline.Jobs.FromChildPipelines.Enabled { var childJobs []schemas.Job - childJobs, err = c.ListPipelineChildJobs(ref.ProjectName, ref.LatestPipeline.ID) + + childJobs, err = c.ListPipelineChildJobs(ctx, ref.Project.Name, ref.LatestPipeline.ID) if err != nil { return } @@ -39,9 +52,16 @@ func (c *Client) ListRefPipelineJobs(ref schemas.Ref) (jobs []schemas.Job, err e } // ListPipelineJobs .. -func (c *Client) ListPipelineJobs(projectName string, pipelineID int) (jobs []schemas.Job, err error) { - var foundJobs []*goGitlab.Job - var resp *goGitlab.Response +func (c *Client) ListPipelineJobs(ctx context.Context, projectNameOrID string, pipelineID int) (jobs []schemas.Job, err error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:ListPipelineJobs") + defer span.End() + span.SetAttributes(attribute.String("project_name_or_id", projectNameOrID)) + span.SetAttributes(attribute.Int("pipeline_id", pipelineID)) + + var ( + foundJobs []*goGitlab.Job + resp *goGitlab.Response + ) options := &goGitlab.ListJobsOptions{ ListOptions: goGitlab.ListOptions{ @@ -51,36 +71,48 @@ func (c *Client) ListPipelineJobs(projectName string, pipelineID int) (jobs []sc } for { - c.rateLimit() - foundJobs, resp, err = c.Jobs.ListPipelineJobs(projectName, pipelineID, options) + c.rateLimit(ctx) + + foundJobs, resp, err = c.Jobs.ListPipelineJobs(projectNameOrID, pipelineID, options, goGitlab.WithContext(ctx)) if err != nil { return } + c.requestsRemaining(resp) + for _, job := range foundJobs { jobs = append(jobs, schemas.NewJob(*job)) } - if resp.CurrentPage >= resp.TotalPages { + if resp.CurrentPage >= resp.NextPage { log.WithFields( log.Fields{ - "project-name": projectName, - "pipeline-id": pipelineID, - "jobs-count": resp.TotalItems, + "project-name-or-id": projectNameOrID, + "pipeline-id": pipelineID, + "jobs-count": resp.TotalItems, }, ).Debug("found pipeline jobs") + break } options.Page = resp.NextPage } + return } // ListPipelineBridges .. -func (c *Client) ListPipelineBridges(projectName string, pipelineID int) (bridges []*goGitlab.Bridge, err error) { - var foundBridges []*goGitlab.Bridge - var resp *goGitlab.Response +func (c *Client) ListPipelineBridges(ctx context.Context, projectNameOrID string, pipelineID int) (bridges []*goGitlab.Bridge, err error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:ListPipelineBridges") + defer span.End() + span.SetAttributes(attribute.String("project_name_or_id", projectNameOrID)) + span.SetAttributes(attribute.Int("pipeline_id", pipelineID)) + + var ( + foundBridges []*goGitlab.Bridge + resp *goGitlab.Response + ) options := &goGitlab.ListJobsOptions{ ListOptions: goGitlab.ListOptions{ @@ -90,44 +122,62 @@ func (c *Client) ListPipelineBridges(projectName string, pipelineID int) (bridge } for { - c.rateLimit() - foundBridges, resp, err = c.Jobs.ListPipelineBridges(projectName, pipelineID, options) + c.rateLimit(ctx) + + foundBridges, resp, err = c.Jobs.ListPipelineBridges(projectNameOrID, pipelineID, options, goGitlab.WithContext(ctx)) if err != nil { return } + c.requestsRemaining(resp) + bridges = append(bridges, foundBridges...) - if resp.CurrentPage >= resp.TotalPages { + if resp.CurrentPage >= resp.NextPage { log.WithFields( log.Fields{ - "project-name": projectName, - "pipeline-id": pipelineID, - "bridges-count": resp.TotalItems, + "project-name-or-id": projectNameOrID, + "pipeline-id": pipelineID, + "bridges-count": resp.TotalItems, }, ).Debug("found pipeline bridges") + break } options.Page = resp.NextPage } + return } // ListPipelineChildJobs .. -func (c *Client) ListPipelineChildJobs(projectName string, parentPipelineID int) (jobs []schemas.Job, err error) { - pipelineIDs := []int{parentPipelineID} +func (c *Client) ListPipelineChildJobs(ctx context.Context, projectNameOrID string, parentPipelineID int) (jobs []schemas.Job, err error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:ListPipelineChildJobs") + defer span.End() + span.SetAttributes(attribute.String("project_name_or_id", projectNameOrID)) + span.SetAttributes(attribute.Int("parent_pipeline_id", parentPipelineID)) + + type pipelineDef struct { + projectNameOrID string + pipelineID int + } + + pipelines := []pipelineDef{{projectNameOrID, parentPipelineID}} for { - if len(pipelineIDs) == 0 { + if len(pipelines) == 0 { return } - pipelineID := pipelineIDs[len(pipelineIDs)-1] - pipelineIDs = pipelineIDs[:len(pipelineIDs)-1] + var ( + foundBridges []*goGitlab.Bridge + pipeline = pipelines[len(pipelines)-1] + ) - var foundBridges []*goGitlab.Bridge - foundBridges, err = c.ListPipelineBridges(projectName, pipelineID) + pipelines = pipelines[:len(pipelines)-1] + + foundBridges, err = c.ListPipelineBridges(ctx, pipeline.projectNameOrID, pipeline.pipelineID) if err != nil { return } @@ -140,9 +190,11 @@ func (c *Client) ListPipelineChildJobs(projectName string, parentPipelineID int) continue } - pipelineIDs = append(pipelineIDs, foundBridge.DownstreamPipeline.ID) + pipelines = append(pipelines, pipelineDef{strconv.Itoa(foundBridge.DownstreamPipeline.ProjectID), foundBridge.DownstreamPipeline.ID}) + var foundJobs []schemas.Job - foundJobs, err = c.ListPipelineJobs(projectName, foundBridge.DownstreamPipeline.ID) + + foundJobs, err = c.ListPipelineJobs(ctx, strconv.Itoa(foundBridge.DownstreamPipeline.ProjectID), foundBridge.DownstreamPipeline.ID) if err != nil { return } @@ -153,14 +205,20 @@ func (c *Client) ListPipelineChildJobs(projectName string, parentPipelineID int) } // ListRefMostRecentJobs .. -func (c *Client) ListRefMostRecentJobs(ref schemas.Ref) (jobs []schemas.Job, err error) { +func (c *Client) ListRefMostRecentJobs(ctx context.Context, ref schemas.Ref) (jobs []schemas.Job, err error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:ListRefMostRecentJobs") + defer span.End() + span.SetAttributes(attribute.String("project_name", ref.Project.Name)) + span.SetAttributes(attribute.String("ref_name", ref.Name)) + if len(ref.LatestJobs) == 0 { log.WithFields( log.Fields{ - "project-name": ref.ProjectName, + "project-name": ref.Project.Name, "ref": ref.Name, }, ).Debug("no jobs are currently held in memory, exiting..") + return } @@ -170,26 +228,45 @@ func (c *Client) ListRefMostRecentJobs(ref schemas.Ref) (jobs []schemas.Job, err jobsToRefresh[k] = v } - var foundJobs []*goGitlab.Job - var resp *goGitlab.Response - - options := &goGitlab.ListJobsOptions{ - ListOptions: goGitlab.ListOptions{ - Page: 1, - PerPage: 100, - }, + var ( + foundJobs []*goGitlab.Job + resp *goGitlab.Response + opt *goGitlab.ListJobsOptions + ) + + keysetPagination := c.Version().PipelineJobsKeysetPaginationSupported() + if keysetPagination { + opt = &goGitlab.ListJobsOptions{ + ListOptions: goGitlab.ListOptions{ + Pagination: "keyset", + PerPage: 100, + }, + } + } else { + opt = &goGitlab.ListJobsOptions{ + ListOptions: goGitlab.ListOptions{ + Page: 1, + PerPage: 100, + }, + } } + options := []goGitlab.RequestOptionFunc{goGitlab.WithContext(ctx)} + for { - c.rateLimit() - foundJobs, resp, err = c.Jobs.ListProjectJobs(ref.ProjectName, options) + c.rateLimit(ctx) + + foundJobs, resp, err = c.Jobs.ListProjectJobs(ref.Project.Name, opt, options...) if err != nil { return } + c.requestsRemaining(resp) + for _, job := range foundJobs { if _, ok := jobsToRefresh[job.Name]; ok { - if ref.Name == job.Ref { + jobRefName, _ := schemas.GetMergeRequestIIDFromRefName(job.Ref) + if ref.Name == jobRefName { jobs = append(jobs, schemas.NewJob(*job)) delete(jobsToRefresh, job.Name) } @@ -198,33 +275,45 @@ func (c *Client) ListRefMostRecentJobs(ref schemas.Ref) (jobs []schemas.Job, err if len(jobsToRefresh) == 0 { log.WithFields( log.Fields{ - "project-name": ref.ProjectName, + "project-name": ref.Project.Name, "ref": ref.Name, "jobs-count": len(ref.LatestJobs), }, ).Debug("found all jobs to refresh") + return } } - if resp.CurrentPage >= resp.TotalPages { + if keysetPagination && resp.NextLink == "" || + (!keysetPagination && resp.CurrentPage >= resp.NextPage) { var notFoundJobs []string + for k := range jobsToRefresh { notFoundJobs = append(notFoundJobs, k) } - log.WithFields( - log.Fields{ - "project-name": ref.ProjectName, - "ref": ref.Name, - "jobs-count": resp.TotalItems, - "not-found-jobs": strings.Join(notFoundJobs, ","), - }, - ).Warn("found some ref jobs but did not manage to refresh all jobs which were in memory") + log.WithContext(ctx). + WithFields( + log.Fields{ + "project-name": ref.Project.Name, + "ref": ref.Name, + "jobs-count": resp.TotalItems, + "not-found-jobs": strings.Join(notFoundJobs, ","), + }, + ). + Warn("found some ref jobs but did not manage to refresh all jobs which were in memory") + break } - options.Page = resp.NextPage + if keysetPagination { + options = []goGitlab.RequestOptionFunc{ + goGitlab.WithContext(ctx), + goGitlab.WithKeysetPaginationParameters(resp.NextLink), + } + } } + return } diff --git a/pkg/gitlab/jobs_test.go b/pkg/gitlab/jobs_test.go index bb39c423..93b28813 100644 --- a/pkg/gitlab/jobs_test.go +++ b/pkg/gitlab/jobs_test.go @@ -6,22 +6,22 @@ import ( "net/url" "testing" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) func TestListRefPipelineJobs(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() ref := schemas.Ref{ - ProjectName: "foo", - Name: "yay", - PullPipelineJobsFromChildPipelinesEnabled: true, + Project: schemas.NewProject("foo"), + Name: "yay", } // Test with no most recent pipeline defined - jobs, err := c.ListRefPipelineJobs(ref) + jobs, err := c.ListRefPipelineJobs(ctx, ref) assert.NoError(t, err) assert.Len(t, jobs, 0) @@ -30,27 +30,27 @@ func TestListRefPipelineJobs(t *testing.T) { fmt.Fprint(w, `[{"id":10}]`) }) - mux.HandleFunc("/api/v4/projects/foo/pipelines/2/jobs", + mux.HandleFunc("/api/v4/projects/11/pipelines/2/jobs", func(w http.ResponseWriter, r *http.Request) { fmt.Fprint(w, `[{"id":20}]`) }) - mux.HandleFunc("/api/v4/projects/foo/pipelines/3/jobs", + mux.HandleFunc("/api/v4/projects/12/pipelines/3/jobs", func(w http.ResponseWriter, r *http.Request) { fmt.Fprint(w, `[{"id":30}]`) }) mux.HandleFunc("/api/v4/projects/foo/pipelines/1/bridges", func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `[{"id":1,"downstream_pipeline":{"id":2}}]`) + fmt.Fprint(w, `[{"id":1,"downstream_pipeline":{"id":2, "project_id": 11}}]`) }) - mux.HandleFunc("/api/v4/projects/foo/pipelines/2/bridges", + mux.HandleFunc("/api/v4/projects/11/pipelines/2/bridges", func(w http.ResponseWriter, r *http.Request) { - fmt.Fprint(w, `[{"id":1,"downstream_pipeline":{"id":3}}]`) + fmt.Fprint(w, `[{"id":1,"downstream_pipeline":{"id":3, "project_id": 12}}]`) }) - mux.HandleFunc("/api/v4/projects/foo/pipelines/3/bridges", + mux.HandleFunc("/api/v4/projects/12/pipelines/3/bridges", func(w http.ResponseWriter, r *http.Request) { fmt.Fprint(w, `[]`) }) @@ -59,7 +59,7 @@ func TestListRefPipelineJobs(t *testing.T) { ID: 1, } - jobs, err = c.ListRefPipelineJobs(ref) + jobs, err = c.ListRefPipelineJobs(ctx, ref) assert.NoError(t, err) assert.Equal(t, []schemas.Job{ {ID: 10}, @@ -68,13 +68,13 @@ func TestListRefPipelineJobs(t *testing.T) { }, jobs) // Test invalid project id - ref.ProjectName = "bar" - _, err = c.ListRefPipelineJobs(ref) + ref.Project.Name = "bar" + _, err = c.ListRefPipelineJobs(ctx, ref) assert.Error(t, err) } func TestListPipelineJobs(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() mux.HandleFunc("/api/v4/projects/foo/pipelines/1/jobs", @@ -93,17 +93,17 @@ func TestListPipelineJobs(t *testing.T) { w.WriteHeader(http.StatusNotFound) }) - jobs, err := c.ListPipelineJobs("foo", 1) + jobs, err := c.ListPipelineJobs(ctx, "foo", 1) assert.NoError(t, err) assert.Len(t, jobs, 2) // Test invalid project id - _, err = c.ListPipelineJobs("bar", 1) + _, err = c.ListPipelineJobs(ctx, "bar", 1) assert.Error(t, err) } func TestListPipelineBridges(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() mux.HandleFunc("/api/v4/projects/foo/pipelines/1/bridges", @@ -122,74 +122,103 @@ func TestListPipelineBridges(t *testing.T) { w.WriteHeader(http.StatusNotFound) }) - bridges, err := c.ListPipelineBridges("foo", 1) + bridges, err := c.ListPipelineBridges(ctx, "foo", 1) assert.NoError(t, err) assert.Len(t, bridges, 1) // Test invalid project id - _, err = c.ListPipelineBridges("bar", 1) + _, err = c.ListPipelineBridges(ctx, "bar", 1) assert.Error(t, err) } func TestListRefMostRecentJobs(t *testing.T) { - mux, server, c := getMockedClient() - defer server.Close() - - ref := schemas.Ref{ - ProjectName: "foo", - Name: "yay", + tests := []struct { + name string + keysetPagination bool + expectedQueryParams url.Values + }{ + { + name: "offset pagination", + keysetPagination: false, + expectedQueryParams: url.Values{ + "page": []string{"1"}, + "per_page": []string{"100"}, + }, + }, + { + name: "keyset pagination", + keysetPagination: true, + expectedQueryParams: url.Values{ + "pagination": []string{"keyset"}, + "per_page": []string{"100"}, + }, + }, } - jobs, err := c.ListRefMostRecentJobs(ref) - assert.NoError(t, err) - assert.Len(t, jobs, 0) + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + ctx, mux, server, c := getMockedClient() + defer server.Close() - mux.HandleFunc("/api/v4/projects/foo/jobs", - func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, "GET", r.Method) - expectedQueryParams := url.Values{ - "page": []string{"1"}, - "per_page": []string{"100"}, + if tc.keysetPagination { + c.UpdateVersion(NewGitLabVersion("16.0.0")) + } else { + c.UpdateVersion(NewGitLabVersion("15.0.0")) } - assert.Equal(t, expectedQueryParams, r.URL.Query()) - fmt.Fprint(w, `[{"id":3,"name":"foo","ref":"yay"},{"id":4,"name":"bar","ref":"yay"}]`) - }) - mux.HandleFunc(fmt.Sprintf("/api/v4/projects/bar/jobs"), - func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusNotFound) - }) + ref := schemas.Ref{ + Project: schemas.NewProject("foo"), + Name: "yay", + } - ref.LatestJobs = schemas.Jobs{ - "foo": { - ID: 1, - Name: "foo", - }, - "bar": { - ID: 2, - Name: "bar", - }, - } + jobs, err := c.ListRefMostRecentJobs(ctx, ref) + assert.NoError(t, err) + assert.Len(t, jobs, 0) + + mux.HandleFunc("/api/v4/projects/foo/jobs", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + assert.Equal(t, tc.expectedQueryParams, r.URL.Query()) + fmt.Fprint(w, `[{"id":3,"name":"foo","ref":"yay"},{"id":4,"name":"bar","ref":"yay"}]`) + }) + + mux.HandleFunc(fmt.Sprintf("/api/v4/projects/bar/jobs"), + func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNotFound) + }) + + ref.LatestJobs = schemas.Jobs{ + "foo": { + ID: 1, + Name: "foo", + }, + "bar": { + ID: 2, + Name: "bar", + }, + } - jobs, err = c.ListRefMostRecentJobs(ref) - assert.NoError(t, err) - assert.Len(t, jobs, 2) - assert.Equal(t, 3, jobs[0].ID) - assert.Equal(t, 4, jobs[1].ID) + jobs, err = c.ListRefMostRecentJobs(ctx, ref) + assert.NoError(t, err) + assert.Len(t, jobs, 2) + assert.Equal(t, 3, jobs[0].ID) + assert.Equal(t, 4, jobs[1].ID) - ref.LatestJobs["baz"] = schemas.Job{ - ID: 5, - Name: "baz", - } + ref.LatestJobs["baz"] = schemas.Job{ + ID: 5, + Name: "baz", + } - jobs, err = c.ListRefMostRecentJobs(ref) - assert.NoError(t, err) - assert.Len(t, jobs, 2) - assert.Equal(t, 3, jobs[0].ID) - assert.Equal(t, 4, jobs[1].ID) + jobs, err = c.ListRefMostRecentJobs(ctx, ref) + assert.NoError(t, err) + assert.Len(t, jobs, 2) + assert.Equal(t, 3, jobs[0].ID) + assert.Equal(t, 4, jobs[1].ID) - // Test invalid project id - ref.ProjectName = "bar" - _, err = c.ListRefMostRecentJobs(ref) - assert.Error(t, err) + // Test invalid project id + ref.Project.Name = "bar" + _, err = c.ListRefMostRecentJobs(ctx, ref) + assert.Error(t, err) + }) + } } diff --git a/pkg/gitlab/pipelines.go b/pkg/gitlab/pipelines.go index cd3331d0..4557c7a9 100644 --- a/pkg/gitlab/pipelines.go +++ b/pkg/gitlab/pipelines.go @@ -1,35 +1,56 @@ package gitlab import ( + "context" "fmt" + "reflect" "regexp" + "strconv" "strings" "time" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/openlyinc/pointy" log "github.com/sirupsen/logrus" - "github.com/xanzy/go-gitlab" goGitlab "github.com/xanzy/go-gitlab" -) + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" -const ( - mergeRequestRefRegexp = `^refs/merge-requests` + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) // GetRefPipeline .. -func (c *Client) GetRefPipeline(ref schemas.Ref, pipelineID int) (p schemas.Pipeline, err error) { - c.rateLimit() - var gp *goGitlab.Pipeline - gp, _, err = c.Pipelines.GetPipeline(ref.ProjectName, pipelineID) +func (c *Client) GetRefPipeline(ctx context.Context, ref schemas.Ref, pipelineID int) (p schemas.Pipeline, err error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:GetRefPipeline") + defer span.End() + span.SetAttributes(attribute.String("project_name", ref.Project.Name)) + span.SetAttributes(attribute.String("ref_name", ref.Name)) + span.SetAttributes(attribute.Int("pipeline_id", pipelineID)) + + c.rateLimit(ctx) + + gp, resp, err := c.Pipelines.GetPipeline(ref.Project.Name, pipelineID, goGitlab.WithContext(ctx)) if err != nil || gp == nil { - return schemas.Pipeline{}, fmt.Errorf("could not read content of pipeline %s - %s | %s", ref.ProjectName, ref.Name, err.Error()) + return schemas.Pipeline{}, fmt.Errorf("could not read content of pipeline %s - %s | %s", ref.Project.Name, ref.Name, err.Error()) } - return schemas.NewPipeline(*gp), nil + + c.requestsRemaining(resp) + + return schemas.NewPipeline(ctx, *gp), nil } // GetProjectPipelines .. -func (c *Client) GetProjectPipelines(projectName string, options *goGitlab.ListProjectPipelinesOptions) ([]*goGitlab.PipelineInfo, error) { +func (c *Client) GetProjectPipelines( + ctx context.Context, + projectName string, + options *goGitlab.ListProjectPipelinesOptions, +) ( + []*goGitlab.PipelineInfo, + *goGitlab.Response, + error, +) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:GetProjectPipelines") + defer span.End() + span.SetAttributes(attribute.String("project_name", projectName)) + fields := log.Fields{ "project-name": projectName, } @@ -50,97 +71,67 @@ func (c *Client) GetProjectPipelines(projectName string, options *goGitlab.ListP fields["scope"] = *options.Scope } - log.WithFields(fields).Debug("listing project pipelines") + fields["page"] = options.Page + log.WithFields(fields).Trace("listing project pipelines") - c.rateLimit() - pipelines, _, err := c.Pipelines.ListProjectPipelines(projectName, options) - if err != nil { - return nil, fmt.Errorf("error listing project pipelines for project %s: %s", projectName, err.Error()) - } - return pipelines, nil -} - -// GetProjectMergeRequestsPipelines .. -func (c *Client) GetProjectMergeRequestsPipelines(projectName string, fetchLimit int, maxAgeSeconds uint) ([]string, error) { - var names []string + c.rateLimit(ctx) - options := &goGitlab.ListProjectPipelinesOptions{ - ListOptions: goGitlab.ListOptions{ - Page: 1, - PerPage: 100, - }, + pipelines, resp, err := c.Pipelines.ListProjectPipelines(projectName, options, goGitlab.WithContext(ctx)) + if err != nil { + return nil, resp, fmt.Errorf("error listing project pipelines for project %s: %s", projectName, err.Error()) } - re := regexp.MustCompile(mergeRequestRefRegexp) - - for { - c.rateLimit() - pipelines, resp, err := c.Pipelines.ListProjectPipelines(projectName, options) - if err != nil { - return nil, fmt.Errorf("error listing project pipelines for project %s: %s", projectName, err.Error()) - } - - for _, pipeline := range pipelines { - if re.MatchString(pipeline.Ref) { - if maxAgeSeconds > 0 && time.Now().Sub(*pipeline.UpdatedAt) > (time.Duration(maxAgeSeconds)*time.Second) { - log.WithFields(log.Fields{ - "project-name": projectName, - "ref": pipeline.Ref, - "ref-kind": schemas.RefKindMergeRequest, - "max-age-seconds": maxAgeSeconds, - "updated-at": *pipeline.UpdatedAt, - }).Debug("merge request ref pipeline last updated at a date outside of the required timeframe, ignoring..") - continue - } - names = append(names, pipeline.Ref) - if len(names) >= fetchLimit { - return names, nil - } - } - } - - if resp.CurrentPage >= resp.TotalPages { - break - } - - options.Page = resp.NextPage - } + c.requestsRemaining(resp) - return names, nil + return pipelines, resp, nil } // GetRefPipelineVariablesAsConcatenatedString .. -func (c *Client) GetRefPipelineVariablesAsConcatenatedString(ref schemas.Ref) (string, error) { - if ref.LatestPipeline == (schemas.Pipeline{}) { +func (c *Client) GetRefPipelineVariablesAsConcatenatedString(ctx context.Context, ref schemas.Ref) (string, error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:GetRefPipelineVariablesAsConcatenatedString") + defer span.End() + span.SetAttributes(attribute.String("project_name", ref.Project.Name)) + span.SetAttributes(attribute.String("ref_name", ref.Name)) + + if reflect.DeepEqual(ref.LatestPipeline, (schemas.Pipeline{})) { log.WithFields( log.Fields{ - "project-name": ref.ProjectName, + "project-name": ref.Project.Name, "ref": ref.Name, }, ).Debug("most recent pipeline not defined, exiting..") + return "", nil } log.WithFields( log.Fields{ - "project-name": ref.ProjectName, + "project-name": ref.Project.Name, "ref": ref.Name, "pipeline-id": ref.LatestPipeline.ID, }, ).Debug("fetching pipeline variables") - variablesFilter, err := regexp.Compile(ref.PullPipelineVariablesRegexp) + variablesFilter, err := regexp.Compile(ref.Project.Pull.Pipeline.Variables.Regexp) if err != nil { - return "", fmt.Errorf("the provided filter regex for pipeline variables is invalid '(%s)': %v", ref.PullPipelineVariablesRegexp, err) + return "", fmt.Errorf( + "the provided filter regex for pipeline variables is invalid '(%s)': %v", + ref.Project.Pull.Pipeline.Variables.Regexp, + err, + ) } - c.rateLimit() - variables, _, err := c.Pipelines.GetPipelineVariables(ref.ProjectName, ref.LatestPipeline.ID) + c.rateLimit(ctx) + + variables, resp, err := c.Pipelines.GetPipelineVariables(ref.Project.Name, ref.LatestPipeline.ID, goGitlab.WithContext(ctx)) if err != nil { return "", fmt.Errorf("could not fetch pipeline variables for %d: %s", ref.LatestPipeline.ID, err.Error()) } + c.requestsRemaining(resp) + var keptVariables []string + if len(variables) > 0 { for _, v := range variables { if variablesFilter.MatchString(v.Key) { @@ -153,84 +144,239 @@ func (c *Client) GetRefPipelineVariablesAsConcatenatedString(ref schemas.Ref) (s } // GetRefsFromPipelines .. -func (c *Client) GetRefsFromPipelines(p schemas.Project, topics string) (schemas.Refs, error) { - re, err := regexp.Compile(p.Pull.Refs.Regexp()) - if err != nil { - return nil, err - } +func (c *Client) GetRefsFromPipelines(ctx context.Context, p schemas.Project, refKind schemas.RefKind) (refs schemas.Refs, err error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:GetRefsFromPipelines") + defer span.End() + span.SetAttributes(attribute.String("project_name", p.Name)) + span.SetAttributes(attribute.String("ref_kind", string(refKind))) + + refs = make(schemas.Refs) options := &goGitlab.ListProjectPipelinesOptions{ ListOptions: goGitlab.ListOptions{ - Page: 1, - // TODO: Get a proper loop to split this query up - PerPage: p.Pull.Refs.From.Pipelines.Depth(), + Page: 1, + PerPage: 100, }, - Scope: pointy.String("branches"), + OrderBy: goGitlab.String("updated_at"), } - if options.PerPage > 100 { - log.WithFields(log.Fields{ - "project-name": p.Name, - "required-depth": p.Pull.Refs.From.Pipelines.Depth(), - }).Warn("required pipeline depth was capped to '100'") - options.PerPage = 100 + var re *regexp.Regexp + + if re, err = schemas.GetRefRegexp(p.Pull.Refs, refKind); err != nil { + return } - branchPipelines, err := c.GetProjectPipelines(p.Name, options) - if err != nil { - return nil, err + var ( + mostRecent, maxAgeSeconds uint + limitToMostRecent, excludeDeleted bool + existingRefs schemas.Refs + ) + + switch refKind { + case schemas.RefKindMergeRequest: + maxAgeSeconds = p.Pull.Refs.MergeRequests.MaxAgeSeconds + mostRecent = p.Pull.Refs.MergeRequests.MostRecent + case schemas.RefKindBranch: + options.Scope = goGitlab.String("branches") + maxAgeSeconds = p.Pull.Refs.Branches.MaxAgeSeconds + mostRecent = p.Pull.Refs.Branches.MostRecent + + if p.Pull.Refs.Branches.ExcludeDeleted { + excludeDeleted = true + + if existingRefs, err = c.GetProjectBranches(ctx, p); err != nil { + return + } + } + case schemas.RefKindTag: + options.Scope = goGitlab.String("tags") + maxAgeSeconds = p.Pull.Refs.Tags.MaxAgeSeconds + mostRecent = p.Pull.Refs.Tags.MostRecent + + if p.Pull.Refs.Tags.ExcludeDeleted { + excludeDeleted = true + + if existingRefs, err = c.GetProjectTags(ctx, p); err != nil { + return + } + } + default: + return refs, fmt.Errorf("unsupported ref kind %v", refKind) } - options.Scope = pointy.String("tags") - tagsPipelines, err := c.GetProjectPipelines(p.Name, options) - if err != nil { - return nil, err + if mostRecent > 0 { + limitToMostRecent = true + } + + if maxAgeSeconds > 0 { + t := time.Now().Add(-time.Second * time.Duration(maxAgeSeconds)) + options.UpdatedAfter = &t } - refs := make(schemas.Refs) - for kind, pipelines := range map[schemas.RefKind][]*gitlab.PipelineInfo{ - schemas.RefKindBranch: branchPipelines, - schemas.RefKindTag: tagsPipelines, - } { + for { + var ( + pipelines []*goGitlab.PipelineInfo + resp *goGitlab.Response + ) + + pipelines, resp, err = c.GetProjectPipelines(ctx, p.Name, options) + if err != nil { + return + } + for _, pipeline := range pipelines { - if re.MatchString(pipeline.Ref) { - if p.Pull.Refs.MaxAgeSeconds() > 0 && time.Now().Sub(*pipeline.UpdatedAt) > (time.Duration(p.Pull.Refs.MaxAgeSeconds())*time.Second) { - log.WithFields(log.Fields{ - "project-name": p.Name, - "ref": pipeline.Ref, - "ref-kind": kind, - "regexp": p.Pull.Refs.Regexp(), - "max-age-seconds": p.Pull.Refs.MaxAgeSeconds(), - "updated-at": *pipeline.UpdatedAt, - }).Debug("ref matching regexp but pipeline last updated at a date outside of the required timeframe, ignoring..") + refName := pipeline.Ref + if !re.MatchString(refName) { + // It is quite verbose otherwise.. + if refKind != schemas.RefKindMergeRequest { + log.WithField("ref", refName).Debug("discovered pipeline ref not matching regexp") + } + + continue + } + + if refKind == schemas.RefKindMergeRequest { + if refName, err = schemas.GetMergeRequestIIDFromRefName(refName); err != nil { + log.WithContext(ctx). + WithField("ref", refName). + WithError(err). + Warn() + continue } + } - ref := schemas.NewRef( - kind, - p.Name, - pipeline.Ref, - topics, - p.OutputSparseStatusMetrics(), - p.Pull.Pipeline.Jobs.Enabled(), - p.Pull.Pipeline.Jobs.FromChildPipelines.Enabled(), - p.Pull.Pipeline.Jobs.RunnerDescription.Enabled(), - p.Pull.Pipeline.Variables.Enabled(), - p.Pull.Pipeline.Variables.Regexp(), - p.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexp(), - ) - - if _, ok := refs[ref.Key()]; !ok { + ref := schemas.NewRef( + p, + refKind, + refName, + ) + + if excludeDeleted { + if _, refExists := existingRefs[ref.Key()]; !refExists { log.WithFields(log.Fields{ - "project-name": p.Name, - "ref": pipeline.Ref, - "ref-kind": kind, - }).Info("found ref") - refs[ref.Key()] = ref + "project-name": ref.Project.Name, + "ref": ref.Name, + "ref-kind": ref.Kind, + }).Debug("found deleted ref, ignoring..") + + continue + } + } + + if _, ok := refs[ref.Key()]; !ok { + log.WithFields(log.Fields{ + "project-name": ref.Project.Name, + "ref": ref.Name, + "ref-kind": ref.Kind, + }).Trace("found ref") + + refs[ref.Key()] = ref + + if limitToMostRecent { + mostRecent-- + if mostRecent <= 0 { + return + } } } } + + if resp.CurrentPage >= resp.NextPage { + break + } + + options.Page = resp.NextPage + } + + return +} + +// GetRefPipelineTestReport .. +func (c *Client) GetRefPipelineTestReport(ctx context.Context, ref schemas.Ref) (schemas.TestReport, error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:GetRefPipelineTestReport") + defer span.End() + span.SetAttributes(attribute.String("project_name", ref.Project.Name)) + span.SetAttributes(attribute.String("ref_name", ref.Name)) + + if reflect.DeepEqual(ref.LatestPipeline, (schemas.Pipeline{})) { + log.WithFields( + log.Fields{ + "project-name": ref.Project.Name, + "ref": ref.Name, + }, + ).Debug("most recent pipeline not defined, exiting...") + + return schemas.TestReport{}, nil } - return refs, nil + log.WithFields( + log.Fields{ + "project-name": ref.Project.Name, + "ref": ref.Name, + "pipeline-id": ref.LatestPipeline.ID, + }, + ).Debug("fetching pipeline test report") + + c.rateLimit(ctx) + + type pipelineDef struct { + projectNameOrID string + pipelineID int + } + + var currentPipeline pipelineDef + + baseTestReport := schemas.TestReport{ + TotalTime: 0, + TotalCount: 0, + SuccessCount: 0, + FailedCount: 0, + SkippedCount: 0, + ErrorCount: 0, + TestSuites: []schemas.TestSuite{}, + } + childPipelines := []pipelineDef{{ref.Project.Name, ref.LatestPipeline.ID}} + + for { + if len(childPipelines) == 0 { + return baseTestReport, nil + } + + currentPipeline, childPipelines = childPipelines[0], childPipelines[1:] + + testReport, resp, err := c.Pipelines.GetPipelineTestReport(currentPipeline.projectNameOrID, currentPipeline.pipelineID, goGitlab.WithContext(ctx)) + if err != nil { + return schemas.TestReport{}, fmt.Errorf("could not fetch test report for %d: %s", ref.LatestPipeline.ID, err.Error()) + } + + c.requestsRemaining(resp) + + convertedTestReport := schemas.NewTestReport(*testReport) + + baseTestReport = schemas.TestReport{ + TotalTime: baseTestReport.TotalTime + convertedTestReport.TotalTime, + TotalCount: baseTestReport.TotalCount + convertedTestReport.TotalCount, + SuccessCount: baseTestReport.SuccessCount + convertedTestReport.SuccessCount, + FailedCount: baseTestReport.FailedCount + convertedTestReport.FailedCount, + SkippedCount: baseTestReport.SkippedCount + convertedTestReport.SkippedCount, + ErrorCount: baseTestReport.ErrorCount + convertedTestReport.ErrorCount, + TestSuites: append(baseTestReport.TestSuites, convertedTestReport.TestSuites...), + } + + if ref.Project.Pull.Pipeline.TestReports.FromChildPipelines.Enabled { + foundBridges, err := c.ListPipelineBridges(ctx, currentPipeline.projectNameOrID, currentPipeline.pipelineID) + if err != nil { + return baseTestReport, err + } + + for _, foundBridge := range foundBridges { + if foundBridge.DownstreamPipeline == nil { + continue + } + + childPipelines = append(childPipelines, pipelineDef{strconv.Itoa(foundBridge.DownstreamPipeline.ProjectID), foundBridge.DownstreamPipeline.ID}) + } + } + } } diff --git a/pkg/gitlab/pipelines_test.go b/pkg/gitlab/pipelines_test.go index 5c4492c9..a4a8ebc7 100644 --- a/pkg/gitlab/pipelines_test.go +++ b/pkg/gitlab/pipelines_test.go @@ -6,14 +6,16 @@ import ( "net/url" "testing" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/openlyinc/pointy" + log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/xanzy/go-gitlab" + "go.openly.dev/pointy" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) func TestGetRefPipeline(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() mux.HandleFunc("/api/v4/projects/foo/pipelines/1", @@ -23,18 +25,18 @@ func TestGetRefPipeline(t *testing.T) { }) ref := schemas.Ref{ - ProjectName: "foo", - Name: "yay", + Project: schemas.NewProject("foo"), + Name: "yay", } - pipeline, err := c.GetRefPipeline(ref, 1) + pipeline, err := c.GetRefPipeline(ctx, ref, 1) assert.NoError(t, err) assert.NotNil(t, pipeline) assert.Equal(t, 1, pipeline.ID) } func TestGetProjectPipelines(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() mux.HandleFunc(fmt.Sprintf("/api/v4/projects/foo/pipelines"), @@ -50,7 +52,7 @@ func TestGetProjectPipelines(t *testing.T) { fmt.Fprint(w, `[{"id":1},{"id":2}]`) }) - pipelines, err := c.GetProjectPipelines("foo", &gitlab.ListProjectPipelinesOptions{ + pipelines, _, err := c.GetProjectPipelines(ctx, "foo", &gitlab.ListProjectPipelinesOptions{ Ref: pointy.String("foo"), Scope: pointy.String("bar"), }) @@ -59,28 +61,8 @@ func TestGetProjectPipelines(t *testing.T) { assert.Len(t, pipelines, 2) } -func TestGetProjectMergeRequestsPipelines(t *testing.T) { - mux, server, c := getMockedClient() - defer server.Close() - - mux.HandleFunc("/api/v4/projects/foo/pipelines", - func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, "GET", r.Method) - expectedQueryParams := url.Values{ - "page": []string{"1"}, - "per_page": []string{"100"}, - } - assert.Equal(t, expectedQueryParams, r.URL.Query()) - fmt.Fprint(w, `[{"id":1,"ref":"refs/merge-requests/foo"},{"id":2,"ref":"refs/merge-requests/bar"},{"id":3,"ref":"yolo"}]`) - }) - - pipelines, err := c.GetProjectMergeRequestsPipelines("foo", 10, 0) - assert.NoError(t, err) - assert.Len(t, pipelines, 2) -} - func TestGetRefPipelineVariablesAsConcatenatedString(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() mux.HandleFunc("/api/v4/projects/foo/pipelines/1/variables", @@ -89,15 +71,16 @@ func TestGetRefPipelineVariablesAsConcatenatedString(t *testing.T) { fmt.Fprint(w, `[{"key":"foo","value":"bar"},{"key":"bar","value":"baz"}]`) }) + p := schemas.NewProject("foo") + p.Pull.Pipeline.Variables.Enabled = true + p.Pull.Pipeline.Variables.Regexp = `[` ref := schemas.Ref{ - ProjectName: "foo", - Name: "yay", - PullPipelineVariablesEnabled: true, - PullPipelineVariablesRegexp: "[", + Project: p, + Name: "yay", } // Should return right away as MostRecentPipeline is not defined - variables, err := c.GetRefPipelineVariablesAsConcatenatedString(ref) + variables, err := c.GetRefPipelineVariablesAsConcatenatedString(ctx, ref) assert.NoError(t, err) assert.Equal(t, "", variables) @@ -106,21 +89,29 @@ func TestGetRefPipelineVariablesAsConcatenatedString(t *testing.T) { } // Should fail as we have an invalid regexp pattern - variables, err = c.GetRefPipelineVariablesAsConcatenatedString(ref) + variables, err = c.GetRefPipelineVariablesAsConcatenatedString(ctx, ref) assert.Error(t, err) assert.Contains(t, err.Error(), "the provided filter regex for pipeline variables is invalid") assert.Equal(t, "", variables) // Should work - ref.PullPipelineVariablesRegexp = ".*" - variables, err = c.GetRefPipelineVariablesAsConcatenatedString(ref) + ref.Project.Pull.Pipeline.Variables.Regexp = `.*` + variables, err = c.GetRefPipelineVariablesAsConcatenatedString(ctx, ref) assert.NoError(t, err) assert.Equal(t, "foo:bar,bar:baz", variables) } func TestGetRefsFromPipelines(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() + log.SetLevel(log.TraceLevel) + + mux.HandleFunc(fmt.Sprintf("/api/v4/projects/foo/repository/branches"), + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `[{"name":"keep_main"}]`) + + return + }) mux.HandleFunc("/api/v4/projects/foo/pipelines", func(w http.ResponseWriter, r *http.Request) { @@ -131,78 +122,428 @@ func TestGetRefsFromPipelines(t *testing.T) { if scope, ok := urlValues["scope"]; ok && len(scope) == 1 && scope[0] == "branches" { fmt.Fprint(w, `[{"id":1,"ref":"keep_dev"},{"id":2,"ref":"keep_main"}]`) + return } if scope, ok := urlValues["scope"]; ok && len(scope) == 1 && scope[0] == "tags" { fmt.Fprint(w, `[{"id":3,"ref":"donotkeep_0.0.1"},{"id":4,"ref":"keep_0.0.2"}]`) + return } - fmt.Fprint(w, `{"error": "undefined or unsupported scope"`) + fmt.Fprint(w, `[{"id":1,"ref":"keep_dev"},{"id":2,"ref":"keep_main"},{"id":3,"ref":"donotkeep_0.0.1"},{"id":4,"ref":"keep_0.0.2"},{"id":5,"ref":"refs/merge-requests/1234/head"}]`) + }) + + p := schemas.NewProject("foo") + + // Branches + p.Pull.Refs.Branches.Regexp = `[` // invalid regexp pattern + refs, err := c.GetRefsFromPipelines(ctx, p, schemas.RefKindBranch) + assert.Error(t, err) + assert.Contains(t, err.Error(), "error parsing regexp") + assert.Len(t, refs, 0) + + p.Pull.Refs.Branches.Regexp = "^keep.*" + refs, err = c.GetRefsFromPipelines(ctx, p, schemas.RefKindBranch) + assert.NoError(t, err) + + assert.Equal(t, schemas.Refs{ + "1035317703": schemas.NewRef(p, schemas.RefKindBranch, "keep_main"), + }, refs) + + // Tags + p.Pull.Refs.Tags.Regexp = `[` // invalid regexp pattern + refs, err = c.GetRefsFromPipelines(ctx, p, schemas.RefKindTag) + assert.Error(t, err) + assert.Contains(t, err.Error(), "error parsing regexp") + assert.Len(t, refs, 0) + + p.Pull.Refs.Tags.Regexp = `^keep` + p.Pull.Refs.Tags.ExcludeDeleted = false + refs, err = c.GetRefsFromPipelines(ctx, p, schemas.RefKindTag) + assert.NoError(t, err) + + assert.Equal(t, schemas.Refs{ + "1929034016": schemas.NewRef(p, schemas.RefKindTag, "keep_0.0.2"), + }, refs) + + // Merge requests + refs, err = c.GetRefsFromPipelines(ctx, p, schemas.RefKindMergeRequest) + assert.NoError(t, err) + assert.Equal(t, schemas.Refs{ + "622996356": schemas.NewRef(p, schemas.RefKindMergeRequest, "1234"), + }, refs) +} + +func TestGetRefPipelineTestReport(t *testing.T) { + ctx, mux, server, c := getMockedClient() + defer server.Close() + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1/test_report", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `{"total_time": 5, "total_count": 1, "success_count": 1, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_suites": [{"name": "Secure", "total_time": 5, "total_count": 1, "success_count": 1, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_cases": [{"status": "success", "name": "Security Reports can create an auto-remediation MR", "classname": "vulnerability_management_spec", "execution_time": 5, "system_output": null, "stack_trace": null}]}]}`) }) - p := schemas.Project{ - Name: "foo", - ProjectParameters: schemas.ProjectParameters{ - Pull: schemas.ProjectPull{ - Refs: schemas.ProjectPullRefs{ - RegexpValue: pointy.String("["), // invalid regexp pattern - From: schemas.ProjectPullRefsFrom{ - Pipelines: schemas.ProjectPullRefsFromPipelines{ - EnabledValue: pointy.Bool(true), - DepthValue: pointy.Int(150), - }, + p := schemas.NewProject("foo") + + ref := schemas.Ref{ + Project: p, + Name: "yay", + } + + // Should return right away as MostRecentPipeline is not defined + tr, err := c.GetRefPipelineTestReport(ctx, ref) + assert.NoError(t, err) + assert.Equal(t, schemas.TestReport{}, tr) + + ref.LatestPipeline = schemas.Pipeline{ + ID: 1, + } + + // Should work + tr, err = c.GetRefPipelineTestReport(ctx, ref) + assert.NoError(t, err) + assert.Equal(t, schemas.TestReport{ + TotalTime: 5, + TotalCount: 1, + SuccessCount: 1, + FailedCount: 0, + SkippedCount: 0, + ErrorCount: 0, + TestSuites: []schemas.TestSuite{ + { + Name: "Secure", + TotalTime: 5, + TotalCount: 1, + SuccessCount: 1, + FailedCount: 0, + SkippedCount: 0, + ErrorCount: 0, + TestCases: []schemas.TestCase{ + { + Name: "Security Reports can create an auto-remediation MR", + Classname: "vulnerability_management_spec", + ExecutionTime: 5, + Status: "success", }, }, }, }, + }, tr) +} + +func TestGetRefPipelineFailedTestReport(t *testing.T) { + ctx, mux, server, c := getMockedClient() + defer server.Close() + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1/test_report", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `{"total_time": 5, "total_count": 2, "success_count": 1, "failed_count": 1, "skipped_count": 0, "error_count": 0, "test_suites": [{"name": "Secure", "total_time": 5, "total_count": 2, "success_count": 1, "failed_count": 1, "skipped_count": 0, "error_count": 0, "test_cases": [{"status": "failed", "name": "Security Reports can create an auto-remediation MR", "classname": "vulnerability_management_spec", "execution_time": 5, "system_output": "Failed message", "stack_trace": null}]}]}`) + }) + + p := schemas.NewProject("foo") + + ref := schemas.Ref{ + Project: p, + Name: "yay", } - refs, err := c.GetRefsFromPipelines(p, "") - assert.Error(t, err) - assert.Contains(t, err.Error(), "error parsing regexp") - assert.Len(t, refs, 0) + // Should return right away as MostRecentPipeline is not defined + tr, err := c.GetRefPipelineTestReport(ctx, ref) + assert.NoError(t, err) + assert.Equal(t, schemas.TestReport{}, tr) + + ref.LatestPipeline = schemas.Pipeline{ + ID: 1, + } - p.Pull.Refs.RegexpValue = pointy.String("^keep.*") - refs, err = c.GetRefsFromPipelines(p, "") - assert.NoError(t, err) - - expectedRefs := schemas.Refs{ - "2231079763": schemas.Ref{ - Kind: schemas.RefKindBranch, - ProjectName: "foo", - Name: "keep_dev", - LatestJobs: make(schemas.Jobs), - OutputSparseStatusMetrics: true, - PullPipelineJobsFromChildPipelinesEnabled: true, - PullPipelineJobsRunnerDescriptionEnabled: true, - PullPipelineVariablesRegexp: ".*", - PullPipelineJobsRunnerDescriptionAggregationRegexp: "shared-runners-manager-(\\d*)\\.gitlab\\.com", + // Should work + tr, err = c.GetRefPipelineTestReport(ctx, ref) + assert.NoError(t, err) + assert.Equal(t, schemas.TestReport{ + TotalTime: 5, + TotalCount: 2, + SuccessCount: 1, + FailedCount: 1, + SkippedCount: 0, + ErrorCount: 0, + TestSuites: []schemas.TestSuite{ + { + Name: "Secure", + TotalTime: 5, + TotalCount: 2, + SuccessCount: 1, + FailedCount: 1, + SkippedCount: 0, + ErrorCount: 0, + TestCases: []schemas.TestCase{ + { + Name: "Security Reports can create an auto-remediation MR", + Classname: "vulnerability_management_spec", + ExecutionTime: 5, + Status: "failed", + }, + }, + }, }, - "1035317703": schemas.Ref{ - Kind: schemas.RefKindBranch, - ProjectName: "foo", - Name: "keep_main", - LatestJobs: make(schemas.Jobs), - OutputSparseStatusMetrics: true, - PullPipelineJobsFromChildPipelinesEnabled: true, - PullPipelineJobsRunnerDescriptionEnabled: true, - PullPipelineVariablesRegexp: ".*", - PullPipelineJobsRunnerDescriptionAggregationRegexp: "shared-runners-manager-(\\d*)\\.gitlab\\.com", + }, tr) +} + +func TestGetRefPipelineWithParentChildTestReport(t *testing.T) { + ctx, mux, server, c := getMockedClient() + defer server.Close() + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1/test_report", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `{"total_time": 5, "total_count": 1, "success_count": 1, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_suites": [{"name": "Secure", "total_time": 5, "total_count": 1, "success_count": 1, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_cases": [{"status": "success", "name": "Security Reports can create an auto-remediation MR", "classname": "vulnerability_management_spec", "execution_time": 5, "system_output": null, "stack_trace": null}]}]}`) + }) + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1/bridges", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `[{"id":1,"downstream_pipeline":{"id":2, "project_id": 1}}]`) + }) + + mux.HandleFunc("/api/v4/projects/1/pipelines/2/test_report", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `{"total_time": 3, "total_count": 3, "success_count": 3, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_suites": [{"name": "Secure", "total_time": 3, "total_count": 3, "success_count": 3, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_cases": [{"status": "success", "name": "Security Reports can create an auto-remediation MR", "classname": "vulnerability_management_spec", "execution_time": 3, "system_output": null, "stack_trace": null}]}]}`) + }) + + mux.HandleFunc("/api/v4/projects/1/pipelines/2/bridges", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `[]`) + }) + + p := schemas.NewProject("foo") + + p.Project.Pull.Pipeline.TestReports.FromChildPipelines.Enabled = true + + ref := schemas.Ref{ + Project: p, + Name: "yay", + } + + // Should return right away as MostRecentPipeline is not defined + tr, err := c.GetRefPipelineTestReport(ctx, ref) + assert.NoError(t, err) + assert.Equal(t, schemas.TestReport{}, tr) + + ref.LatestPipeline = schemas.Pipeline{ + ID: 1, + } + + // Should work + tr, err = c.GetRefPipelineTestReport(ctx, ref) + assert.NoError(t, err) + assert.Equal(t, schemas.TestReport{ + TotalTime: 8, + TotalCount: 4, + SuccessCount: 4, + FailedCount: 0, + SkippedCount: 0, + ErrorCount: 0, + TestSuites: []schemas.TestSuite{ + { + Name: "Secure", + TotalTime: 5, + TotalCount: 1, + SuccessCount: 1, + FailedCount: 0, + SkippedCount: 0, + ErrorCount: 0, + TestCases: []schemas.TestCase{ + { + Name: "Security Reports can create an auto-remediation MR", + Classname: "vulnerability_management_spec", + ExecutionTime: 5, + Status: "success", + }, + }, + }, + { + Name: "Secure", + TotalTime: 3, + TotalCount: 3, + SuccessCount: 3, + FailedCount: 0, + SkippedCount: 0, + ErrorCount: 0, + TestCases: []schemas.TestCase{ + { + Name: "Security Reports can create an auto-remediation MR", + Classname: "vulnerability_management_spec", + ExecutionTime: 3, + Status: "success", + }, + }, + }, }, - "1929034016": schemas.Ref{ - Kind: schemas.RefKindTag, - ProjectName: "foo", - Name: "keep_0.0.2", - LatestJobs: make(schemas.Jobs), - OutputSparseStatusMetrics: true, - PullPipelineJobsFromChildPipelinesEnabled: true, - PullPipelineJobsRunnerDescriptionEnabled: true, - PullPipelineVariablesRegexp: ".*", - PullPipelineJobsRunnerDescriptionAggregationRegexp: "shared-runners-manager-(\\d*)\\.gitlab\\.com", + }, tr) +} + +func TestGetRefPipelineWithMultiProjectTestReport(t *testing.T) { + ctx, mux, server, c := getMockedClient() + defer server.Close() + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1/test_report", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `{"total_time": 5, "total_count": 1, "success_count": 1, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_suites": [{"name": "Secure", "total_time": 5, "total_count": 1, "success_count": 1, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_cases": [{"status": "success", "name": "Security Reports can create an auto-remediation MR", "classname": "vulnerability_management_spec", "execution_time": 5, "system_output": null, "stack_trace": null}]}]}`) + }) + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1/bridges", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `[{"id":1,"downstream_pipeline":{"id":2, "project_id": 11}}]`) + }) + + mux.HandleFunc("/api/v4/projects/11/pipelines/2/test_report", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `{"total_time": 3, "total_count": 3, "success_count": 3, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_suites": [{"name": "Secure", "total_time": 3, "total_count": 3, "success_count": 3, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_cases": [{"status": "success", "name": "Security Reports can create an auto-remediation MR", "classname": "vulnerability_management_spec", "execution_time": 3, "system_output": null, "stack_trace": null}]}]}`) + }) + + mux.HandleFunc("/api/v4/projects/11/pipelines/2/bridges", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `[]`) + }) + + p := schemas.NewProject("foo") + + p.Project.Pull.Pipeline.TestReports.FromChildPipelines.Enabled = true + + ref := schemas.Ref{ + Project: p, + Name: "yay", + } + + // Should return right away as MostRecentPipeline is not defined + tr, err := c.GetRefPipelineTestReport(ctx, ref) + assert.NoError(t, err) + assert.Equal(t, schemas.TestReport{}, tr) + + ref.LatestPipeline = schemas.Pipeline{ + ID: 1, + } + + // Should work + tr, err = c.GetRefPipelineTestReport(ctx, ref) + assert.NoError(t, err) + assert.Equal(t, schemas.TestReport{ + TotalTime: 8, + TotalCount: 4, + SuccessCount: 4, + FailedCount: 0, + SkippedCount: 0, + ErrorCount: 0, + TestSuites: []schemas.TestSuite{ + { + Name: "Secure", + TotalTime: 5, + TotalCount: 1, + SuccessCount: 1, + FailedCount: 0, + SkippedCount: 0, + ErrorCount: 0, + TestCases: []schemas.TestCase{ + { + Name: "Security Reports can create an auto-remediation MR", + Classname: "vulnerability_management_spec", + ExecutionTime: 5, + Status: "success", + }, + }, + }, + { + Name: "Secure", + TotalTime: 3, + TotalCount: 3, + SuccessCount: 3, + FailedCount: 0, + SkippedCount: 0, + ErrorCount: 0, + TestCases: []schemas.TestCase{ + { + Name: "Security Reports can create an auto-remediation MR", + Classname: "vulnerability_management_spec", + ExecutionTime: 3, + Status: "success", + }, + }, + }, }, + }, tr) +} + +func TestGetRefPipelineWithNoChildrenTestReport(t *testing.T) { + ctx, mux, server, c := getMockedClient() + defer server.Close() + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1/test_report", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `{"total_time": 5, "total_count": 1, "success_count": 1, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_suites": [{"name": "Secure", "total_time": 5, "total_count": 1, "success_count": 1, "failed_count": 0, "skipped_count": 0, "error_count": 0, "test_cases": [{"status": "success", "name": "Security Reports can create an auto-remediation MR", "classname": "vulnerability_management_spec", "execution_time": 5, "system_output": null, "stack_trace": null}]}]}`) + }) + + mux.HandleFunc("/api/v4/projects/foo/pipelines/1/bridges", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `[]`) + }) + + p := schemas.NewProject("foo") + + p.Project.Pull.Pipeline.TestReports.FromChildPipelines.Enabled = true + + ref := schemas.Ref{ + Project: p, + Name: "yay", + } + + // Should return right away as MostRecentPipeline is not defined + tr, err := c.GetRefPipelineTestReport(ctx, ref) + assert.NoError(t, err) + assert.Equal(t, schemas.TestReport{}, tr) + + ref.LatestPipeline = schemas.Pipeline{ + ID: 1, } - assert.Equal(t, expectedRefs, refs) + // Should work + tr, err = c.GetRefPipelineTestReport(ctx, ref) + assert.NoError(t, err) + assert.Equal(t, schemas.TestReport{ + TotalTime: 5, + TotalCount: 1, + SuccessCount: 1, + FailedCount: 0, + SkippedCount: 0, + ErrorCount: 0, + TestSuites: []schemas.TestSuite{ + { + Name: "Secure", + TotalTime: 5, + TotalCount: 1, + SuccessCount: 1, + FailedCount: 0, + SkippedCount: 0, + ErrorCount: 0, + TestCases: []schemas.TestCase{ + { + Name: "Security Reports can create an auto-remediation MR", + Classname: "vulnerability_management_spec", + ExecutionTime: 5, + Status: "success", + }, + }, + }, + }, + }, tr) } diff --git a/pkg/gitlab/projects.go b/pkg/gitlab/projects.go index 41afeae2..adc5f1d6 100644 --- a/pkg/gitlab/projects.go +++ b/pkg/gitlab/projects.go @@ -1,29 +1,47 @@ package gitlab import ( + "context" "fmt" "regexp" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/openlyinc/pointy" log "github.com/sirupsen/logrus" - "github.com/xanzy/go-gitlab" goGitlab "github.com/xanzy/go-gitlab" + "go.openly.dev/pointy" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) // GetProject .. -func (c *Client) GetProject(name string) (*goGitlab.Project, error) { +func (c *Client) GetProject(ctx context.Context, name string) (*goGitlab.Project, error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:GetProject") + defer span.End() + span.SetAttributes(attribute.String("project_name", name)) + log.WithFields(log.Fields{ "project-name": name, }).Debug("reading project") - c.rateLimit() - p, _, err := c.Projects.GetProject(name, &goGitlab.GetProjectOptions{}) + c.rateLimit(ctx) + p, resp, err := c.Projects.GetProject(name, &goGitlab.GetProjectOptions{}, goGitlab.WithContext(ctx)) + c.requestsRemaining(resp) + return p, err } // ListProjects .. -func (c *Client) ListProjects(w schemas.Wildcard) ([]schemas.Project, error) { +func (c *Client) ListProjects(ctx context.Context, w config.Wildcard) ([]schemas.Project, error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:ListProjects") + defer span.End() + span.SetAttributes(attribute.String("wildcard_search", w.Search)) + span.SetAttributes(attribute.String("wildcard_owner_kind", w.Owner.Kind)) + span.SetAttributes(attribute.String("wildcard_owner_name", w.Owner.Name)) + span.SetAttributes(attribute.Bool("wildcard_owner_include_subgroups", w.Owner.IncludeSubgroups)) + span.SetAttributes(attribute.Bool("wildcard_archived", w.Archived)) + logFields := log.Fields{ "wildcard-search": w.Search, "wildcard-owner-kind": w.Owner.Kind, @@ -34,7 +52,8 @@ func (c *Client) ListProjects(w schemas.Wildcard) ([]schemas.Project, error) { log.WithFields(logFields).Debug("listing all projects from wildcard") var projects []schemas.Project - listOptions := gitlab.ListOptions{ + + listOptions := goGitlab.ListOptions{ Page: 1, PerPage: 100, } @@ -44,6 +63,7 @@ func (c *Client) ListProjects(w schemas.Wildcard) ([]schemas.Project, error) { // scoped wildcard. Therefore, if the wildcard owner name is set, we want to filter // out to project actually *belonging* to the owner. var ownerRegexp *regexp.Regexp + if len(w.Owner.Name) > 0 { ownerRegexp = regexp.MustCompile(fmt.Sprintf(`^%s\/`, w.Owner.Name)) } else { @@ -51,40 +71,49 @@ func (c *Client) ListProjects(w schemas.Wildcard) ([]schemas.Project, error) { } for { - var gps []*gitlab.Project - var resp *gitlab.Response - var err error + var ( + gps []*goGitlab.Project + resp *goGitlab.Response + err error + ) + + c.rateLimit(ctx) - c.rateLimit() switch w.Owner.Kind { case "user": gps, resp, err = c.Projects.ListUserProjects( w.Owner.Name, - &gitlab.ListProjectsOptions{ + &goGitlab.ListProjectsOptions{ Archived: &w.Archived, ListOptions: listOptions, Search: &w.Search, + Simple: pointy.Bool(true), }, + goGitlab.WithContext(ctx), ) case "group": gps, resp, err = c.Groups.ListGroupProjects( w.Owner.Name, - &gitlab.ListGroupProjectsOptions{ + &goGitlab.ListGroupProjectsOptions{ Archived: &w.Archived, WithShared: pointy.Bool(false), - IncludeSubgroups: &w.Owner.IncludeSubgroups, + IncludeSubGroups: &w.Owner.IncludeSubgroups, ListOptions: listOptions, Search: &w.Search, + Simple: pointy.Bool(true), }, + goGitlab.WithContext(ctx), ) default: // List all visible projects gps, resp, err = c.Projects.ListProjects( - &gitlab.ListProjectsOptions{ + &goGitlab.ListProjectsOptions{ ListOptions: listOptions, Archived: &w.Archived, Search: &w.Search, + Simple: pointy.Bool(true), }, + goGitlab.WithContext(ctx), ) } @@ -92,6 +121,8 @@ func (c *Client) ListProjects(w schemas.Wildcard) ([]schemas.Project, error) { return projects, fmt.Errorf("unable to list projects with search pattern '%s' from the GitLab API : %v", w.Search, err.Error()) } + c.requestsRemaining(resp) + // Copy relevant settings from wildcard into created project for _, gp := range gps { if !ownerRegexp.MatchString(gp.PathWithNamespace) { @@ -99,27 +130,16 @@ func (c *Client) ListProjects(w schemas.Wildcard) ([]schemas.Project, error) { "project-id": gp.ID, "project-name": gp.PathWithNamespace, }).Debug("project path not matching owner's name, skipping") - continue - } - if !gp.JobsEnabled { - log.WithFields(logFields).WithFields(log.Fields{ - "project-id": gp.ID, - "project-name": gp.PathWithNamespace, - }).Debug("jobs/pipelines not enabled on project, skipping") continue } - projects = append( - projects, - schemas.Project{ - ProjectParameters: w.ProjectParameters, - Name: gp.PathWithNamespace, - }, - ) + p := schemas.NewProject(gp.PathWithNamespace) + p.ProjectParameters = w.ProjectParameters + projects = append(projects, p) } - if resp.CurrentPage >= resp.TotalPages { + if resp.CurrentPage >= resp.NextPage { break } diff --git a/pkg/gitlab/projects_test.go b/pkg/gitlab/projects_test.go index 0fbf0a35..65613478 100644 --- a/pkg/gitlab/projects_test.go +++ b/pkg/gitlab/projects_test.go @@ -5,38 +5,35 @@ import ( "net/http" "testing" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" ) func TestGetProject(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() - project := "foo/bar" - mux.HandleFunc(fmt.Sprintf("/api/v4/projects/%s", project), + mux.HandleFunc("/api/v4/projects/foo%2Fbar", func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, r.Method, "GET") - fmt.Fprint(w, `{"id":1}`) + _, _ = fmt.Fprint(w, `{"id":1}`) }) - p, err := c.GetProject(project) + p, err := c.GetProject(ctx, "foo/bar") assert.NoError(t, err) - assert.NotNil(t, p) + require.NotNil(t, p) assert.Equal(t, 1, p.ID) } func TestListUserProjects(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() - w := schemas.Wildcard{ + w := config.Wildcard{ Search: "bar", - Owner: struct { - Name string `yaml:"name"` - Kind string `yaml:"kind"` - IncludeSubgroups bool `yaml:"include_subgroups"` - }{ + Owner: config.WildcardOwner{ Name: "foo", Kind: "user", IncludeSubgroups: false, @@ -47,26 +44,22 @@ func TestListUserProjects(t *testing.T) { mux.HandleFunc(fmt.Sprintf("/api/v4/users/%s/projects", w.Owner.Name), func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, r.Method, "GET") - fmt.Fprint(w, `[{"id":1,"path_with_namespace":"foo/bar","jobs_enabled":true},{"id":2,"path_with_namespace":"bar/baz","jobs_enabled":true}]`) + _, _ = fmt.Fprint(w, `[{"id":1,"path_with_namespace":"foo/bar"},{"id":2,"path_with_namespace":"bar/baz"}]`) }) - projects, err := c.ListProjects(w) + projects, err := c.ListProjects(ctx, w) assert.NoError(t, err) assert.Len(t, projects, 1) assert.Equal(t, "foo/bar", projects[0].Name) } func TestListGroupProjects(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() - w := schemas.Wildcard{ + w := config.Wildcard{ Search: "bar", - Owner: struct { - Name string `yaml:"name"` - Kind string `yaml:"kind"` - IncludeSubgroups bool `yaml:"include_subgroups"` - }{ + Owner: config.WildcardOwner{ Name: "foo", Kind: "group", IncludeSubgroups: false, @@ -77,26 +70,22 @@ func TestListGroupProjects(t *testing.T) { mux.HandleFunc(fmt.Sprintf("/api/v4/groups/%s/projects", w.Owner.Name), func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, r.Method, "GET") - fmt.Fprint(w, `[{"id":1,"path_with_namespace":"foo/bar","jobs_enabled":true},{"id":2,"path_with_namespace":"bar/baz","jobs_enabled":true}]`) + _, _ = fmt.Fprint(w, `[{"id":1,"path_with_namespace":"foo/bar"},{"id":2,"path_with_namespace":"bar/baz"}]`) }) - projects, err := c.ListProjects(w) + projects, err := c.ListProjects(ctx, w) assert.NoError(t, err) assert.Len(t, projects, 1) assert.Equal(t, "foo/bar", projects[0].Name) } func TestListProjects(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() - w := schemas.Wildcard{ + w := config.Wildcard{ Search: "bar", - Owner: struct { - Name string `yaml:"name"` - Kind string `yaml:"kind"` - IncludeSubgroups bool `yaml:"include_subgroups"` - }{ + Owner: config.WildcardOwner{ Name: "", Kind: "", IncludeSubgroups: false, @@ -107,26 +96,22 @@ func TestListProjects(t *testing.T) { mux.HandleFunc("/api/v4/projects", func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, r.Method, "GET") - fmt.Fprint(w, `[{"id":1,"path_with_namespace":"foo","jobs_enabled":false},{"id":2,"path_with_namespace":"bar","jobs_enabled":true}]`) + _, _ = fmt.Fprint(w, `[{"id":2,"path_with_namespace":"bar"}]`) }) - projects, err := c.ListProjects(w) + projects, err := c.ListProjects(ctx, w) assert.NoError(t, err) assert.Len(t, projects, 1) assert.Equal(t, "bar", projects[0].Name) } func TestListProjectsAPIError(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() - w := schemas.Wildcard{ + w := config.Wildcard{ Search: "bar", - Owner: struct { - Name string `yaml:"name"` - Kind string `yaml:"kind"` - IncludeSubgroups bool `yaml:"include_subgroups"` - }{ + Owner: config.WildcardOwner{ Name: "foo", Kind: "user", }, @@ -136,25 +121,10 @@ func TestListProjectsAPIError(t *testing.T) { mux.HandleFunc(fmt.Sprintf("/api/v4/users/%s/projects", w.Owner.Name), func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusInternalServerError) - w.Write([]byte("500 - Something bad happened!")) + _, _ = w.Write([]byte("500 - Something bad happened!")) }) - _, err := c.ListProjects(w) + _, err := c.ListProjects(ctx, w) assert.Error(t, err) assert.Contains(t, err.Error(), "unable to list projects with search pattern") } - -func readProjects(until chan struct{}, projects ...schemas.Project) <-chan schemas.Project { - p := make(chan schemas.Project) - go func() { - defer close(p) - for _, i := range projects { - select { - case <-until: - return - case p <- i: - } - } - }() - return p -} diff --git a/pkg/gitlab/repositories.go b/pkg/gitlab/repositories.go index 253fb5fc..5fa84742 100644 --- a/pkg/gitlab/repositories.go +++ b/pkg/gitlab/repositories.go @@ -1,24 +1,46 @@ package gitlab import ( - "github.com/openlyinc/pointy" + "context" + "fmt" + log "github.com/sirupsen/logrus" goGitlab "github.com/xanzy/go-gitlab" + "go.openly.dev/pointy" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" ) // GetCommitCountBetweenRefs .. -func (c *Client) GetCommitCountBetweenRefs(project, from, to string) (int, error) { +func (c *Client) GetCommitCountBetweenRefs(ctx context.Context, project, from, to string) (int, error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:GetCommitCountBetweenRefs") + defer span.End() + span.SetAttributes(attribute.String("project_name", project)) + span.SetAttributes(attribute.String("from_ref", from)) + span.SetAttributes(attribute.String("to_ref", to)) + log.WithFields(log.Fields{ "project-name": project, "from-ref": from, "to-ref": to, }).Debug("comparing refs") - c.rateLimit() - cmp, _, err := c.Repositories.Compare(project, &goGitlab.CompareOptions{ + c.rateLimit(ctx) + + cmp, resp, err := c.Repositories.Compare(project, &goGitlab.CompareOptions{ From: &from, To: &to, Straight: pointy.Bool(true), - }, nil) - return len(cmp.Commits), err + }, goGitlab.WithContext(ctx)) + if err != nil { + return 0, err + } + + c.requestsRemaining(resp) + + if cmp == nil { + return 0, fmt.Errorf("could not compare refs successfully") + } + + return len(cmp.Commits), nil } diff --git a/pkg/gitlab/repositories_test.go b/pkg/gitlab/repositories_test.go index 4d4b35af..dbed1085 100644 --- a/pkg/gitlab/repositories_test.go +++ b/pkg/gitlab/repositories_test.go @@ -1 +1,33 @@ package gitlab + +import ( + "fmt" + "net/http" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGetCommitCountBetweenRefs(t *testing.T) { + ctx, mux, server, c := getMockedClient() + defer server.Close() + + mux.HandleFunc("/api/v4/projects/foo/repository/compare", + func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "GET", r.Method) + fmt.Fprint(w, `{"commits":[{},{},{}]}`) + }) + + mux.HandleFunc("/api/v4/projects/bar/repository/compare", + func(w http.ResponseWriter, r *http.Request) { + fmt.Fprint(w, `{`) + }) + + commitCount, err := c.GetCommitCountBetweenRefs(ctx, "foo", "bar", "baz") + assert.NoError(t, err) + assert.Equal(t, 3, commitCount) + + commitCount, err = c.GetCommitCountBetweenRefs(ctx, "bar", "", "") + assert.Error(t, err) + assert.Equal(t, 0, commitCount) +} diff --git a/pkg/gitlab/tags.go b/pkg/gitlab/tags.go index 6febc8c8..0381c03a 100644 --- a/pkg/gitlab/tags.go +++ b/pkg/gitlab/tags.go @@ -1,16 +1,26 @@ package gitlab import ( + "context" "regexp" - "time" - log "github.com/sirupsen/logrus" goGitlab "github.com/xanzy/go-gitlab" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) // GetProjectTags .. -func (c *Client) GetProjectTags(projectName, filterRegexp string, maxAgeSeconds uint) ([]string, error) { - var names []string +func (c *Client) GetProjectTags(ctx context.Context, p schemas.Project) ( + refs schemas.Refs, + err error, +) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:GetProjectTags") + defer span.End() + span.SetAttributes(attribute.String("project_name", p.Name)) + + refs = make(schemas.Refs) options := &goGitlab.ListTagsOptions{ ListOptions: goGitlab.ListOptions{ @@ -19,45 +29,51 @@ func (c *Client) GetProjectTags(projectName, filterRegexp string, maxAgeSeconds }, } - re, err := regexp.Compile(filterRegexp) - if err != nil { - return nil, err + var re *regexp.Regexp + + if re, err = regexp.Compile(p.Pull.Refs.Tags.Regexp); err != nil { + return } for { - c.rateLimit() - tags, resp, err := c.Tags.ListTags(projectName, options) + c.rateLimit(ctx) + + var ( + tags []*goGitlab.Tag + resp *goGitlab.Response + ) + + tags, resp, err = c.Tags.ListTags(p.Name, options, goGitlab.WithContext(ctx)) if err != nil { - return names, err + return } + c.requestsRemaining(resp) + for _, tag := range tags { if re.MatchString(tag.Name) { - if maxAgeSeconds > 0 && time.Now().Sub(*tag.Commit.AuthoredDate) > (time.Duration(maxAgeSeconds)*time.Second) { - log.WithFields(log.Fields{ - "project-name": projectName, - "tag": tag.Name, - "regexp": filterRegexp, - "max-age-seconds": maxAgeSeconds, - "authored-date": *tag.Commit.AuthoredDate, - }).Debug("tag matching regexp but last authored at a date outside of the required timeframe, ignoring..") - continue - } - names = append(names, tag.Name) + ref := schemas.NewRef(p, schemas.RefKindTag, tag.Name) + refs[ref.Key()] = ref } } - if resp.CurrentPage >= resp.TotalPages { + if resp.CurrentPage >= resp.NextPage { break } + options.Page = resp.NextPage } - return names, nil + return } // GetProjectMostRecentTagCommit .. -func (c *Client) GetProjectMostRecentTagCommit(projectName, filterRegexp string) (string, float64, error) { +func (c *Client) GetProjectMostRecentTagCommit(ctx context.Context, projectName, filterRegexp string) (string, float64, error) { + ctx, span := otel.Tracer(tracerName).Start(ctx, "gitlab:GetProjectTags") + defer span.End() + span.SetAttributes(attribute.String("project_name", projectName)) + span.SetAttributes(attribute.String("regexp", filterRegexp)) + options := &goGitlab.ListTagsOptions{ ListOptions: goGitlab.ListOptions{ Page: 1, @@ -71,21 +87,25 @@ func (c *Client) GetProjectMostRecentTagCommit(projectName, filterRegexp string) } for { - c.rateLimit() - tags, resp, err := c.Tags.ListTags(projectName, options) + c.rateLimit(ctx) + + tags, resp, err := c.Tags.ListTags(projectName, options, goGitlab.WithContext(ctx)) if err != nil { return "", 0, err } + c.requestsRemaining(resp) + for _, tag := range tags { if re.MatchString(tag.Name) { return tag.Commit.ShortID, float64(tag.Commit.CommittedDate.Unix()), nil } } - if resp.CurrentPage >= resp.TotalPages { + if resp.CurrentPage >= resp.NextPage { break } + options.Page = resp.NextPage } diff --git a/pkg/gitlab/tags_test.go b/pkg/gitlab/tags_test.go index f6e0ddf6..ffeb6346 100644 --- a/pkg/gitlab/tags_test.go +++ b/pkg/gitlab/tags_test.go @@ -7,10 +7,12 @@ import ( "testing" "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" ) func TestGetProjectTags(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() mux.HandleFunc("/api/v4/projects/foo/repository/tags", @@ -24,18 +26,31 @@ func TestGetProjectTags(t *testing.T) { fmt.Fprint(w, `[{"name":"foo"},{"name":"bar"}]`) }) - tags, err := c.GetProjectTags("foo", "[", 0) - assert.Error(t, err) - assert.Contains(t, err.Error(), "error parsing regexp") - assert.Len(t, tags, 0) + p := schemas.NewProject("foo") + p.Pull.Refs.Tags.Regexp = `^f` - tags, err = c.GetProjectTags("foo", "^f", 0) + expectedRef := schemas.NewRef(p, schemas.RefKindTag, "foo") + refs, err := c.GetProjectTags(ctx, p) assert.NoError(t, err) - assert.Equal(t, []string{"foo"}, tags) + assert.Len(t, refs, 1) + assert.Equal(t, schemas.Refs{ + expectedRef.Key(): expectedRef, + }, refs) + + // Test invalid project name + p.Name = "invalid" + _, err = c.GetProjectTags(ctx, p) + assert.Error(t, err) + + // Test invalid regexp + p.Name = "foo" + p.Pull.Refs.Tags.Regexp = `[` + _, err = c.GetProjectTags(ctx, p) + assert.Error(t, err) } func TestGetProjectMostRecentTagCommit(t *testing.T) { - mux, server, c := getMockedClient() + ctx, mux, server, c := getMockedClient() defer server.Close() mux.HandleFunc(fmt.Sprintf("/api/v4/projects/foo/repository/tags"), @@ -61,11 +76,11 @@ func TestGetProjectMostRecentTagCommit(t *testing.T) { ]`) }) - _, _, err := c.GetProjectMostRecentTagCommit("foo", "[") + _, _, err := c.GetProjectMostRecentTagCommit(ctx, "foo", "[") assert.Error(t, err) assert.Contains(t, err.Error(), "error parsing regexp") - commitShortID, commitCreatedAt, err := c.GetProjectMostRecentTagCommit("foo", "^f") + commitShortID, commitCreatedAt, err := c.GetProjectMostRecentTagCommit(ctx, "foo", "^f") assert.NoError(t, err) assert.Equal(t, "7b5c3cc", commitShortID) assert.Equal(t, float64(1553540113), commitCreatedAt) diff --git a/pkg/gitlab/version.go b/pkg/gitlab/version.go new file mode 100644 index 00000000..efb5dcfb --- /dev/null +++ b/pkg/gitlab/version.go @@ -0,0 +1,32 @@ +package gitlab + +import ( + "strings" + + "golang.org/x/mod/semver" +) + +type GitLabVersion struct { + Version string +} + +func NewGitLabVersion(version string) GitLabVersion { + ver := "" + if strings.HasPrefix(version, "v") { + ver = version + } else if version != "" { + ver = "v" + version + } + + return GitLabVersion{Version: ver} +} + +// PipelineJobsKeysetPaginationSupported returns true if the GitLab instance +// is running 15.9 or later. +func (v GitLabVersion) PipelineJobsKeysetPaginationSupported() bool { + if v.Version == "" { + return false + } + + return semver.Compare(v.Version, "v15.9.0") >= 0 +} diff --git a/pkg/gitlab/version_test.go b/pkg/gitlab/version_test.go new file mode 100644 index 00000000..d12febf4 --- /dev/null +++ b/pkg/gitlab/version_test.go @@ -0,0 +1,64 @@ +package gitlab + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestPipelineJobsKeysetPaginationSupported(t *testing.T) { + tests := []struct { + name string + version GitLabVersion + expectedResult bool + }{ + { + name: "unknown", + version: NewGitLabVersion(""), + expectedResult: false, + }, + { + name: "15.8.0", + version: NewGitLabVersion("15.8.0"), + expectedResult: false, + }, + { + name: "v15.8.0", + version: NewGitLabVersion("v15.8.0"), + expectedResult: false, + }, + { + name: "15.9.0", + version: NewGitLabVersion("15.9.0"), + expectedResult: true, + }, + { + name: "v15.9.0", + version: NewGitLabVersion("v15.9.0"), + expectedResult: true, + }, + { + name: "15.9.1", + version: NewGitLabVersion("15.9.1"), + expectedResult: true, + }, + { + name: "15.10.2", + version: NewGitLabVersion("15.10.2"), + expectedResult: true, + }, + { + name: "16.0.0", + version: NewGitLabVersion("16.0.0"), + expectedResult: true, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + result := tc.version.PipelineJobsKeysetPaginationSupported() + + assert.Equal(t, tc.expectedResult, result) + }) + } +} diff --git a/pkg/monitor/client/client.go b/pkg/monitor/client/client.go new file mode 100644 index 00000000..b564c44f --- /dev/null +++ b/pkg/monitor/client/client.go @@ -0,0 +1,37 @@ +package client + +import ( + "context" + "net/url" + + log "github.com/sirupsen/logrus" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials/insecure" + + pb "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/monitor/protobuf" +) + +// Client .. +type Client struct { + pb.MonitorClient +} + +// NewClient .. +func NewClient(ctx context.Context, endpoint *url.URL) *Client { + log.WithField("endpoint", endpoint.String()).Debug("establishing gRPC connection to the server..") + + conn, err := grpc.DialContext( + ctx, + endpoint.String(), + grpc.WithTransportCredentials(insecure.NewCredentials()), + ) + if err != nil { + log.WithField("endpoint", endpoint.String()).WithField("error", err).Fatal("could not connect to the server") + } + + log.Debug("gRPC connection established") + + return &Client{ + MonitorClient: pb.NewMonitorClient(conn), + } +} diff --git a/pkg/monitor/monitor.go b/pkg/monitor/monitor.go new file mode 100644 index 00000000..4ad51057 --- /dev/null +++ b/pkg/monitor/monitor.go @@ -0,0 +1,8 @@ +package monitor + +import "time" + +type TaskSchedulingStatus struct { + Last time.Time + Next time.Time +} diff --git a/pkg/monitor/protobuf/monitor.pb.go b/pkg/monitor/protobuf/monitor.pb.go new file mode 100644 index 00000000..b4a09208 --- /dev/null +++ b/pkg/monitor/protobuf/monitor.pb.go @@ -0,0 +1,496 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.28.0 +// protoc v3.21.0 +// source: pkg/monitor/protobuf/monitor.proto + +package protobuf + +import ( + reflect "reflect" + sync "sync" + + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + timestamppb "google.golang.org/protobuf/types/known/timestamppb" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type Empty struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *Empty) Reset() { + *x = Empty{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_monitor_protobuf_monitor_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Empty) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Empty) ProtoMessage() {} + +func (x *Empty) ProtoReflect() protoreflect.Message { + mi := &file_pkg_monitor_protobuf_monitor_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Empty.ProtoReflect.Descriptor instead. +func (*Empty) Descriptor() ([]byte, []int) { + return file_pkg_monitor_protobuf_monitor_proto_rawDescGZIP(), []int{0} +} + +type Config struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Content string `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"` +} + +func (x *Config) Reset() { + *x = Config{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_monitor_protobuf_monitor_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Config) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Config) ProtoMessage() {} + +func (x *Config) ProtoReflect() protoreflect.Message { + mi := &file_pkg_monitor_protobuf_monitor_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Config.ProtoReflect.Descriptor instead. +func (*Config) Descriptor() ([]byte, []int) { + return file_pkg_monitor_protobuf_monitor_proto_rawDescGZIP(), []int{1} +} + +func (x *Config) GetContent() string { + if x != nil { + return x.Content + } + return "" +} + +type Telemetry struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + GitlabApiUsage float64 `protobuf:"fixed64,1,opt,name=gitlab_api_usage,json=gitlabApiUsage,proto3" json:"gitlab_api_usage,omitempty"` + GitlabApiRequestsCount uint64 `protobuf:"varint,2,opt,name=gitlab_api_requests_count,json=gitlabApiRequestsCount,proto3" json:"gitlab_api_requests_count,omitempty"` + GitlabApiRateLimit float64 `protobuf:"fixed64,3,opt,name=gitlab_api_rate_limit,json=gitlabApiRateLimit,proto3" json:"gitlab_api_rate_limit,omitempty"` + GitlabApiLimitRemaining uint64 `protobuf:"varint,4,opt,name=gitlab_api_limit_remaining,json=gitlabApiLimitRemaining,proto3" json:"gitlab_api_limit_remaining,omitempty"` + TasksBufferUsage float64 `protobuf:"fixed64,5,opt,name=tasks_buffer_usage,json=tasksBufferUsage,proto3" json:"tasks_buffer_usage,omitempty"` + TasksExecutedCount uint64 `protobuf:"varint,6,opt,name=tasks_executed_count,json=tasksExecutedCount,proto3" json:"tasks_executed_count,omitempty"` + Projects *Entity `protobuf:"bytes,7,opt,name=projects,proto3" json:"projects,omitempty"` + Refs *Entity `protobuf:"bytes,8,opt,name=refs,proto3" json:"refs,omitempty"` + Envs *Entity `protobuf:"bytes,9,opt,name=envs,proto3" json:"envs,omitempty"` + Metrics *Entity `protobuf:"bytes,10,opt,name=metrics,proto3" json:"metrics,omitempty"` +} + +func (x *Telemetry) Reset() { + *x = Telemetry{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_monitor_protobuf_monitor_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Telemetry) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Telemetry) ProtoMessage() {} + +func (x *Telemetry) ProtoReflect() protoreflect.Message { + mi := &file_pkg_monitor_protobuf_monitor_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Telemetry.ProtoReflect.Descriptor instead. +func (*Telemetry) Descriptor() ([]byte, []int) { + return file_pkg_monitor_protobuf_monitor_proto_rawDescGZIP(), []int{2} +} + +func (x *Telemetry) GetGitlabApiUsage() float64 { + if x != nil { + return x.GitlabApiUsage + } + return 0 +} + +func (x *Telemetry) GetGitlabApiRequestsCount() uint64 { + if x != nil { + return x.GitlabApiRequestsCount + } + return 0 +} + +func (x *Telemetry) GetGitlabApiRateLimit() float64 { + if x != nil { + return x.GitlabApiRateLimit + } + return 0 +} + +func (x *Telemetry) GetGitlabApiLimitRemaining() uint64 { + if x != nil { + return x.GitlabApiLimitRemaining + } + return 0 +} + +func (x *Telemetry) GetTasksBufferUsage() float64 { + if x != nil { + return x.TasksBufferUsage + } + return 0 +} + +func (x *Telemetry) GetTasksExecutedCount() uint64 { + if x != nil { + return x.TasksExecutedCount + } + return 0 +} + +func (x *Telemetry) GetProjects() *Entity { + if x != nil { + return x.Projects + } + return nil +} + +func (x *Telemetry) GetRefs() *Entity { + if x != nil { + return x.Refs + } + return nil +} + +func (x *Telemetry) GetEnvs() *Entity { + if x != nil { + return x.Envs + } + return nil +} + +func (x *Telemetry) GetMetrics() *Entity { + if x != nil { + return x.Metrics + } + return nil +} + +type Entity struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Count int64 `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"` + LastGc *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=last_gc,json=lastGc,proto3" json:"last_gc,omitempty"` + LastPull *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=last_pull,json=lastPull,proto3" json:"last_pull,omitempty"` + NextGc *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=next_gc,json=nextGc,proto3" json:"next_gc,omitempty"` + NextPull *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=next_pull,json=nextPull,proto3" json:"next_pull,omitempty"` +} + +func (x *Entity) Reset() { + *x = Entity{} + if protoimpl.UnsafeEnabled { + mi := &file_pkg_monitor_protobuf_monitor_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Entity) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Entity) ProtoMessage() {} + +func (x *Entity) ProtoReflect() protoreflect.Message { + mi := &file_pkg_monitor_protobuf_monitor_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Entity.ProtoReflect.Descriptor instead. +func (*Entity) Descriptor() ([]byte, []int) { + return file_pkg_monitor_protobuf_monitor_proto_rawDescGZIP(), []int{3} +} + +func (x *Entity) GetCount() int64 { + if x != nil { + return x.Count + } + return 0 +} + +func (x *Entity) GetLastGc() *timestamppb.Timestamp { + if x != nil { + return x.LastGc + } + return nil +} + +func (x *Entity) GetLastPull() *timestamppb.Timestamp { + if x != nil { + return x.LastPull + } + return nil +} + +func (x *Entity) GetNextGc() *timestamppb.Timestamp { + if x != nil { + return x.NextGc + } + return nil +} + +func (x *Entity) GetNextPull() *timestamppb.Timestamp { + if x != nil { + return x.NextPull + } + return nil +} + +var File_pkg_monitor_protobuf_monitor_proto protoreflect.FileDescriptor + +var file_pkg_monitor_protobuf_monitor_proto_rawDesc = []byte{ + 0x0a, 0x22, 0x70, 0x6b, 0x67, 0x2f, 0x6d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x2f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x6d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x07, 0x6d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x1a, 0x1f, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x07, + 0x0a, 0x05, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x22, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x12, 0x18, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0xe2, 0x03, 0x0a, 0x09, + 0x54, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x74, 0x72, 0x79, 0x12, 0x28, 0x0a, 0x10, 0x67, 0x69, 0x74, + 0x6c, 0x61, 0x62, 0x5f, 0x61, 0x70, 0x69, 0x5f, 0x75, 0x73, 0x61, 0x67, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x01, 0x52, 0x0e, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x41, 0x70, 0x69, 0x55, 0x73, + 0x61, 0x67, 0x65, 0x12, 0x39, 0x0a, 0x19, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x5f, 0x61, 0x70, + 0x69, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x16, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x41, 0x70, + 0x69, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x73, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x31, + 0x0a, 0x15, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x5f, 0x61, 0x70, 0x69, 0x5f, 0x72, 0x61, 0x74, + 0x65, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x12, 0x67, + 0x69, 0x74, 0x6c, 0x61, 0x62, 0x41, 0x70, 0x69, 0x52, 0x61, 0x74, 0x65, 0x4c, 0x69, 0x6d, 0x69, + 0x74, 0x12, 0x3b, 0x0a, 0x1a, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x5f, 0x61, 0x70, 0x69, 0x5f, + 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x5f, 0x72, 0x65, 0x6d, 0x61, 0x69, 0x6e, 0x69, 0x6e, 0x67, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x04, 0x52, 0x17, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, 0x41, 0x70, 0x69, + 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x52, 0x65, 0x6d, 0x61, 0x69, 0x6e, 0x69, 0x6e, 0x67, 0x12, 0x2c, + 0x0a, 0x12, 0x74, 0x61, 0x73, 0x6b, 0x73, 0x5f, 0x62, 0x75, 0x66, 0x66, 0x65, 0x72, 0x5f, 0x75, + 0x73, 0x61, 0x67, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x01, 0x52, 0x10, 0x74, 0x61, 0x73, 0x6b, + 0x73, 0x42, 0x75, 0x66, 0x66, 0x65, 0x72, 0x55, 0x73, 0x61, 0x67, 0x65, 0x12, 0x30, 0x0a, 0x14, + 0x74, 0x61, 0x73, 0x6b, 0x73, 0x5f, 0x65, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x64, 0x5f, 0x63, + 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x04, 0x52, 0x12, 0x74, 0x61, 0x73, 0x6b, + 0x73, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x64, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x2b, + 0x0a, 0x08, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x0f, 0x2e, 0x6d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, + 0x79, 0x52, 0x08, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x73, 0x12, 0x23, 0x0a, 0x04, 0x72, + 0x65, 0x66, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6d, 0x6f, 0x6e, 0x69, + 0x74, 0x6f, 0x72, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x04, 0x72, 0x65, 0x66, 0x73, + 0x12, 0x23, 0x0a, 0x04, 0x65, 0x6e, 0x76, 0x73, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, + 0x2e, 0x6d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, + 0x04, 0x65, 0x6e, 0x76, 0x73, 0x12, 0x29, 0x0a, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, + 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x6d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, + 0x2e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x07, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, + 0x22, 0xfa, 0x01, 0x0a, 0x06, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x63, + 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, + 0x74, 0x12, 0x33, 0x0a, 0x07, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x67, 0x63, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x06, + 0x6c, 0x61, 0x73, 0x74, 0x47, 0x63, 0x12, 0x37, 0x0a, 0x09, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x70, + 0x75, 0x6c, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, + 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, + 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x08, 0x6c, 0x61, 0x73, 0x74, 0x50, 0x75, 0x6c, 0x6c, 0x12, + 0x33, 0x0a, 0x07, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x67, 0x63, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x06, 0x6e, 0x65, + 0x78, 0x74, 0x47, 0x63, 0x12, 0x37, 0x0a, 0x09, 0x6e, 0x65, 0x78, 0x74, 0x5f, 0x70, 0x75, 0x6c, + 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, + 0x61, 0x6d, 0x70, 0x52, 0x08, 0x6e, 0x65, 0x78, 0x74, 0x50, 0x75, 0x6c, 0x6c, 0x32, 0x71, 0x0a, + 0x07, 0x4d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x12, 0x2e, 0x0a, 0x09, 0x47, 0x65, 0x74, 0x43, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x0e, 0x2e, 0x6d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x2e, + 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x0f, 0x2e, 0x6d, 0x6f, 0x6e, 0x69, 0x74, 0x6f, 0x72, 0x2e, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0x00, 0x12, 0x36, 0x0a, 0x0c, 0x47, 0x65, 0x74, 0x54, + 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x74, 0x72, 0x79, 0x12, 0x0e, 0x2e, 0x6d, 0x6f, 0x6e, 0x69, 0x74, + 0x6f, 0x72, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x12, 0x2e, 0x6d, 0x6f, 0x6e, 0x69, 0x74, + 0x6f, 0x72, 0x2e, 0x54, 0x65, 0x6c, 0x65, 0x6d, 0x65, 0x74, 0x72, 0x79, 0x22, 0x00, 0x30, 0x01, + 0x42, 0x49, 0x5a, 0x47, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x6d, + 0x76, 0x69, 0x73, 0x6f, 0x6e, 0x6e, 0x65, 0x61, 0x75, 0x2f, 0x67, 0x69, 0x74, 0x6c, 0x61, 0x62, + 0x2d, 0x63, 0x69, 0x2d, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x73, 0x2d, 0x65, 0x78, + 0x70, 0x6f, 0x72, 0x74, 0x65, 0x72, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x6d, 0x6f, 0x6e, 0x69, 0x74, + 0x6f, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x62, 0x06, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x33, +} + +var ( + file_pkg_monitor_protobuf_monitor_proto_rawDescOnce sync.Once + file_pkg_monitor_protobuf_monitor_proto_rawDescData = file_pkg_monitor_protobuf_monitor_proto_rawDesc +) + +func file_pkg_monitor_protobuf_monitor_proto_rawDescGZIP() []byte { + file_pkg_monitor_protobuf_monitor_proto_rawDescOnce.Do(func() { + file_pkg_monitor_protobuf_monitor_proto_rawDescData = protoimpl.X.CompressGZIP(file_pkg_monitor_protobuf_monitor_proto_rawDescData) + }) + return file_pkg_monitor_protobuf_monitor_proto_rawDescData +} + +var ( + file_pkg_monitor_protobuf_monitor_proto_msgTypes = make([]protoimpl.MessageInfo, 4) + file_pkg_monitor_protobuf_monitor_proto_goTypes = []interface{}{ + (*Empty)(nil), // 0: monitor.Empty + (*Config)(nil), // 1: monitor.Config + (*Telemetry)(nil), // 2: monitor.Telemetry + (*Entity)(nil), // 3: monitor.Entity + (*timestamppb.Timestamp)(nil), // 4: google.protobuf.Timestamp + } +) + +var file_pkg_monitor_protobuf_monitor_proto_depIdxs = []int32{ + 3, // 0: monitor.Telemetry.projects:type_name -> monitor.Entity + 3, // 1: monitor.Telemetry.refs:type_name -> monitor.Entity + 3, // 2: monitor.Telemetry.envs:type_name -> monitor.Entity + 3, // 3: monitor.Telemetry.metrics:type_name -> monitor.Entity + 4, // 4: monitor.Entity.last_gc:type_name -> google.protobuf.Timestamp + 4, // 5: monitor.Entity.last_pull:type_name -> google.protobuf.Timestamp + 4, // 6: monitor.Entity.next_gc:type_name -> google.protobuf.Timestamp + 4, // 7: monitor.Entity.next_pull:type_name -> google.protobuf.Timestamp + 0, // 8: monitor.Monitor.GetConfig:input_type -> monitor.Empty + 0, // 9: monitor.Monitor.GetTelemetry:input_type -> monitor.Empty + 1, // 10: monitor.Monitor.GetConfig:output_type -> monitor.Config + 2, // 11: monitor.Monitor.GetTelemetry:output_type -> monitor.Telemetry + 10, // [10:12] is the sub-list for method output_type + 8, // [8:10] is the sub-list for method input_type + 8, // [8:8] is the sub-list for extension type_name + 8, // [8:8] is the sub-list for extension extendee + 0, // [0:8] is the sub-list for field type_name +} + +func init() { file_pkg_monitor_protobuf_monitor_proto_init() } +func file_pkg_monitor_protobuf_monitor_proto_init() { + if File_pkg_monitor_protobuf_monitor_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_pkg_monitor_protobuf_monitor_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Empty); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_monitor_protobuf_monitor_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Config); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_monitor_protobuf_monitor_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Telemetry); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_pkg_monitor_protobuf_monitor_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Entity); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_pkg_monitor_protobuf_monitor_proto_rawDesc, + NumEnums: 0, + NumMessages: 4, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_pkg_monitor_protobuf_monitor_proto_goTypes, + DependencyIndexes: file_pkg_monitor_protobuf_monitor_proto_depIdxs, + MessageInfos: file_pkg_monitor_protobuf_monitor_proto_msgTypes, + }.Build() + File_pkg_monitor_protobuf_monitor_proto = out.File + file_pkg_monitor_protobuf_monitor_proto_rawDesc = nil + file_pkg_monitor_protobuf_monitor_proto_goTypes = nil + file_pkg_monitor_protobuf_monitor_proto_depIdxs = nil +} diff --git a/pkg/monitor/protobuf/monitor.proto b/pkg/monitor/protobuf/monitor.proto new file mode 100644 index 00000000..7ca0e03f --- /dev/null +++ b/pkg/monitor/protobuf/monitor.proto @@ -0,0 +1,39 @@ +syntax = "proto3"; + +import "google/protobuf/timestamp.proto"; + +option go_package = "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/monitor/protobuf"; + +package monitor; + +service Monitor { + rpc GetConfig(Empty) returns (Config) {} + rpc GetTelemetry(Empty) returns (stream Telemetry) {} +} + +message Empty {} + +message Config { + string content = 1; +} + +message Telemetry { + double gitlab_api_usage = 1; + uint64 gitlab_api_requests_count = 2; + double gitlab_api_rate_limit = 3; + uint64 gitlab_api_limit_remaining = 4; + double tasks_buffer_usage = 5; + uint64 tasks_executed_count = 6; + Entity projects = 7; + Entity refs = 8; + Entity envs = 9; + Entity metrics = 10; +} + +message Entity { + int64 count = 1; + google.protobuf.Timestamp last_gc = 2; + google.protobuf.Timestamp last_pull = 3; + google.protobuf.Timestamp next_gc = 4; + google.protobuf.Timestamp next_pull = 5; +} diff --git a/pkg/monitor/protobuf/monitor_grpc.pb.go b/pkg/monitor/protobuf/monitor_grpc.pb.go new file mode 100644 index 00000000..a7e396b4 --- /dev/null +++ b/pkg/monitor/protobuf/monitor_grpc.pb.go @@ -0,0 +1,170 @@ +// Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.2.0 +// - protoc v3.21.0 +// source: pkg/monitor/protobuf/monitor.proto + +package protobuf + +import ( + context "context" + + grpc "google.golang.org/grpc" + codes "google.golang.org/grpc/codes" + status "google.golang.org/grpc/status" +) + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +// Requires gRPC-Go v1.32.0 or later. +const _ = grpc.SupportPackageIsVersion7 + +// MonitorClient is the client API for Monitor service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream. +type MonitorClient interface { + GetConfig(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Config, error) + GetTelemetry(ctx context.Context, in *Empty, opts ...grpc.CallOption) (Monitor_GetTelemetryClient, error) +} + +type monitorClient struct { + cc grpc.ClientConnInterface +} + +func NewMonitorClient(cc grpc.ClientConnInterface) MonitorClient { + return &monitorClient{cc} +} + +func (c *monitorClient) GetConfig(ctx context.Context, in *Empty, opts ...grpc.CallOption) (*Config, error) { + out := new(Config) + err := c.cc.Invoke(ctx, "/monitor.Monitor/GetConfig", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *monitorClient) GetTelemetry(ctx context.Context, in *Empty, opts ...grpc.CallOption) (Monitor_GetTelemetryClient, error) { + stream, err := c.cc.NewStream(ctx, &Monitor_ServiceDesc.Streams[0], "/monitor.Monitor/GetTelemetry", opts...) + if err != nil { + return nil, err + } + x := &monitorGetTelemetryClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type Monitor_GetTelemetryClient interface { + Recv() (*Telemetry, error) + grpc.ClientStream +} + +type monitorGetTelemetryClient struct { + grpc.ClientStream +} + +func (x *monitorGetTelemetryClient) Recv() (*Telemetry, error) { + m := new(Telemetry) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +// MonitorServer is the server API for Monitor service. +// All implementations must embed UnimplementedMonitorServer +// for forward compatibility +type MonitorServer interface { + GetConfig(context.Context, *Empty) (*Config, error) + GetTelemetry(*Empty, Monitor_GetTelemetryServer) error + mustEmbedUnimplementedMonitorServer() +} + +// UnimplementedMonitorServer must be embedded to have forward compatible implementations. +type UnimplementedMonitorServer struct{} + +func (UnimplementedMonitorServer) GetConfig(context.Context, *Empty) (*Config, error) { + return nil, status.Errorf(codes.Unimplemented, "method GetConfig not implemented") +} + +func (UnimplementedMonitorServer) GetTelemetry(*Empty, Monitor_GetTelemetryServer) error { + return status.Errorf(codes.Unimplemented, "method GetTelemetry not implemented") +} +func (UnimplementedMonitorServer) mustEmbedUnimplementedMonitorServer() {} + +// UnsafeMonitorServer may be embedded to opt out of forward compatibility for this service. +// Use of this interface is not recommended, as added methods to MonitorServer will +// result in compilation errors. +type UnsafeMonitorServer interface { + mustEmbedUnimplementedMonitorServer() +} + +func RegisterMonitorServer(s grpc.ServiceRegistrar, srv MonitorServer) { + s.RegisterService(&Monitor_ServiceDesc, srv) +} + +func _Monitor_GetConfig_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(Empty) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MonitorServer).GetConfig(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/monitor.Monitor/GetConfig", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MonitorServer).GetConfig(ctx, req.(*Empty)) + } + return interceptor(ctx, in, info, handler) +} + +func _Monitor_GetTelemetry_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(Empty) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(MonitorServer).GetTelemetry(m, &monitorGetTelemetryServer{stream}) +} + +type Monitor_GetTelemetryServer interface { + Send(*Telemetry) error + grpc.ServerStream +} + +type monitorGetTelemetryServer struct { + grpc.ServerStream +} + +func (x *monitorGetTelemetryServer) Send(m *Telemetry) error { + return x.ServerStream.SendMsg(m) +} + +// Monitor_ServiceDesc is the grpc.ServiceDesc for Monitor service. +// It's only intended for direct use with grpc.RegisterService, +// and not to be introspected or modified (even as a copy) +var Monitor_ServiceDesc = grpc.ServiceDesc{ + ServiceName: "monitor.Monitor", + HandlerType: (*MonitorServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "GetConfig", + Handler: _Monitor_GetConfig_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "GetTelemetry", + Handler: _Monitor_GetTelemetry_Handler, + ServerStreams: true, + }, + }, + Metadata: "pkg/monitor/protobuf/monitor.proto", +} diff --git a/pkg/monitor/server/server.go b/pkg/monitor/server/server.go new file mode 100644 index 00000000..b342428c --- /dev/null +++ b/pkg/monitor/server/server.go @@ -0,0 +1,223 @@ +package server + +import ( + "context" + "net" + "os" + "time" + + log "github.com/sirupsen/logrus" + "google.golang.org/grpc" + "google.golang.org/protobuf/types/known/timestamppb" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/gitlab" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/monitor" + pb "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/monitor/protobuf" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/store" +) + +// Server .. +type Server struct { + pb.UnimplementedMonitorServer + + gitlabClient *gitlab.Client + cfg config.Config + store store.Store + taskSchedulingMonitoring map[schemas.TaskType]*monitor.TaskSchedulingStatus +} + +// NewServer .. +func NewServer( + gitlabClient *gitlab.Client, + c config.Config, + st store.Store, + tsm map[schemas.TaskType]*monitor.TaskSchedulingStatus, +) (s *Server) { + s = &Server{ + gitlabClient: gitlabClient, + cfg: c, + store: st, + taskSchedulingMonitoring: tsm, + } + + return +} + +// Serve .. +func (s *Server) Serve() { + if s.cfg.Global.InternalMonitoringListenerAddress == nil { + log.Info("internal monitoring listener address not set") + + return + } + + log.WithFields(log.Fields{ + "scheme": s.cfg.Global.InternalMonitoringListenerAddress.Scheme, + "host": s.cfg.Global.InternalMonitoringListenerAddress.Host, + "path": s.cfg.Global.InternalMonitoringListenerAddress.Path, + }).Info("internal monitoring listener set") + + grpcServer := grpc.NewServer() + pb.RegisterMonitorServer(grpcServer, s) + + var ( + l net.Listener + err error + ) + + switch s.cfg.Global.InternalMonitoringListenerAddress.Scheme { + case "unix": + unixAddr, err := net.ResolveUnixAddr("unix", s.cfg.Global.InternalMonitoringListenerAddress.Path) + if err != nil { + log.WithError(err).Fatal() + } + + if _, err := os.Stat(s.cfg.Global.InternalMonitoringListenerAddress.Path); err == nil { + if err := os.Remove(s.cfg.Global.InternalMonitoringListenerAddress.Path); err != nil { + log.WithError(err).Fatal() + } + } + + defer func(path string) { + if err := os.Remove(path); err != nil { + log.WithError(err).Fatal() + } + }(s.cfg.Global.InternalMonitoringListenerAddress.Path) + + if l, err = net.ListenUnix("unix", unixAddr); err != nil { + log.WithError(err).Fatal() + } + + default: + if l, err = net.Listen(s.cfg.Global.InternalMonitoringListenerAddress.Scheme, s.cfg.Global.InternalMonitoringListenerAddress.Host); err != nil { + log.WithError(err).Fatal() + } + } + + defer l.Close() + + if err = grpcServer.Serve(l); err != nil { + log.WithError(err).Fatal() + } +} + +// GetConfig .. +func (s *Server) GetConfig(ctx context.Context, _ *pb.Empty) (*pb.Config, error) { + return &pb.Config{ + Content: s.cfg.ToYAML(), + }, nil +} + +// GetTelemetry .. +func (s *Server) GetTelemetry(_ *pb.Empty, ts pb.Monitor_GetTelemetryServer) (err error) { + ctx := ts.Context() + ticker := time.NewTicker(time.Second) + + for { + telemetry := &pb.Telemetry{ + Projects: &pb.Entity{}, + Envs: &pb.Entity{}, + Refs: &pb.Entity{}, + Metrics: &pb.Entity{}, + } + + telemetry.GitlabApiUsage = float64(s.gitlabClient.RateCounter.Rate()) / float64(s.cfg.Gitlab.MaximumRequestsPerSecond) + if telemetry.GitlabApiUsage > 1 { + telemetry.GitlabApiUsage = 1 + } + + telemetry.GitlabApiRequestsCount = s.gitlabClient.RequestsCounter.Load() + + telemetry.GitlabApiRateLimit = float64(s.gitlabClient.RequestsRemaining) / float64(s.gitlabClient.RequestsLimit) + if telemetry.GitlabApiRateLimit > 1 { + telemetry.GitlabApiRateLimit = 1 + } + + telemetry.GitlabApiLimitRemaining = uint64(s.gitlabClient.RequestsRemaining) + + var queuedTasks uint64 + + queuedTasks, err = s.store.CurrentlyQueuedTasksCount(ctx) + if err != nil { + return + } + + telemetry.TasksBufferUsage = float64(queuedTasks) / 1000 + + telemetry.TasksExecutedCount, err = s.store.ExecutedTasksCount(ctx) + if err != nil { + return + } + + telemetry.Projects.Count, err = s.store.ProjectsCount(ctx) + if err != nil { + return + } + + telemetry.Envs.Count, err = s.store.EnvironmentsCount(ctx) + if err != nil { + return + } + + telemetry.Refs.Count, err = s.store.RefsCount(ctx) + if err != nil { + return + } + + telemetry.Metrics.Count, err = s.store.MetricsCount(ctx) + if err != nil { + return + } + + if _, ok := s.taskSchedulingMonitoring[schemas.TaskTypePullProjectsFromWildcards]; ok { + telemetry.Projects.LastPull = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypePullProjectsFromWildcards].Last) + telemetry.Projects.NextPull = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypePullProjectsFromWildcards].Next) + } + + if _, ok := s.taskSchedulingMonitoring[schemas.TaskTypeGarbageCollectProjects]; ok { + telemetry.Projects.LastGc = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypeGarbageCollectProjects].Last) + telemetry.Projects.NextGc = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypeGarbageCollectProjects].Next) + } + + if _, ok := s.taskSchedulingMonitoring[schemas.TaskTypePullEnvironmentsFromProjects]; ok { + telemetry.Envs.LastPull = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypePullEnvironmentsFromProjects].Last) + telemetry.Envs.NextPull = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypePullEnvironmentsFromProjects].Next) + } + + if _, ok := s.taskSchedulingMonitoring[schemas.TaskTypeGarbageCollectEnvironments]; ok { + telemetry.Envs.LastGc = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypeGarbageCollectEnvironments].Last) + telemetry.Envs.NextGc = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypeGarbageCollectEnvironments].Next) + } + + if _, ok := s.taskSchedulingMonitoring[schemas.TaskTypePullRefsFromProjects]; ok { + telemetry.Refs.LastPull = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypePullRefsFromProjects].Last) + telemetry.Refs.NextPull = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypePullRefsFromProjects].Next) + } + + if _, ok := s.taskSchedulingMonitoring[schemas.TaskTypeGarbageCollectRefs]; ok { + telemetry.Refs.LastGc = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypeGarbageCollectRefs].Last) + telemetry.Refs.NextGc = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypeGarbageCollectRefs].Next) + } + + if _, ok := s.taskSchedulingMonitoring[schemas.TaskTypePullMetrics]; ok { + telemetry.Metrics.LastPull = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypePullMetrics].Last) + telemetry.Metrics.NextPull = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypePullMetrics].Next) + } + + if _, ok := s.taskSchedulingMonitoring[schemas.TaskTypeGarbageCollectMetrics]; ok { + telemetry.Metrics.LastGc = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypeGarbageCollectMetrics].Last) + telemetry.Metrics.NextGc = timestamppb.New(s.taskSchedulingMonitoring[schemas.TaskTypeGarbageCollectMetrics].Next) + } + + ts.Send(telemetry) + + select { + case <-ctx.Done(): + return + case <-ticker.C: + time.Sleep(1) + } + } +} diff --git a/pkg/monitor/ui/ui.go b/pkg/monitor/ui/ui.go new file mode 100644 index 00000000..85007afa --- /dev/null +++ b/pkg/monitor/ui/ui.go @@ -0,0 +1,372 @@ +package ui + +import ( + "context" + "fmt" + "net/url" + "os" + "strconv" + "strings" + "time" + + "github.com/charmbracelet/bubbles/progress" + "github.com/charmbracelet/bubbles/viewport" + tea "github.com/charmbracelet/bubbletea" + "github.com/charmbracelet/lipgloss" + log "github.com/sirupsen/logrus" + "github.com/xeonx/timeago" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/monitor/client" + pb "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/monitor/protobuf" +) + +type tab string + +const ( + tabTelemetry tab = "telemetry" + tabConfig tab = "config" +) + +var tabs = [...]tab{ + tabTelemetry, + tabConfig, +} + +var ( + subtle = lipgloss.AdaptiveColor{Light: "#D9DCCF", Dark: "#383838"} + highlight = lipgloss.AdaptiveColor{Light: "#874BFD", Dark: "#7D56F4"} + + dataStyle = lipgloss.NewStyle(). + MarginLeft(1). + MarginRight(5). + Padding(0, 1). + Bold(true). + Foreground(lipgloss.Color("#000000")). + Background(lipgloss.Color("#a9a9a9")) + + // Tabs. + + activeTabBorder = lipgloss.Border{ + Top: "─", + Bottom: " ", + Left: "│", + Right: "│", + TopLeft: "╭", + TopRight: "╮", + BottomLeft: "┘", + BottomRight: "└", + } + + tabBorder = lipgloss.Border{ + Top: "─", + Bottom: "─", + Left: "│", + Right: "│", + TopLeft: "╭", + TopRight: "╮", + BottomLeft: "┴", + BottomRight: "┴", + } + + inactiveTab = lipgloss.NewStyle(). + Border(tabBorder, true). + BorderForeground(highlight). + Padding(0, 1) + + activeTab = inactiveTab.Copy().Border(activeTabBorder, true) + + tabGap = inactiveTab.Copy(). + BorderTop(false). + BorderLeft(false). + BorderRight(false) + + // List. + + entityStyle = lipgloss.NewStyle(). + Border(lipgloss.NormalBorder(), true, false, false, false). + BorderForeground(subtle) + + // Status Bar. + + statusStyle = lipgloss.NewStyle(). + Inherit(statusBarStyle). + Foreground(lipgloss.Color("#FFFDF5")). + Background(lipgloss.Color("#003d80")). + Padding(0, 1). + MarginRight(1) + + statusNugget = lipgloss.NewStyle(). + Foreground(lipgloss.Color("#FFFDF5")). + Padding(0, 1) + + statusBarStyle = lipgloss.NewStyle(). + Foreground(lipgloss.AdaptiveColor{Light: "#343433", Dark: "#C1C6B2"}). + Background(lipgloss.AdaptiveColor{Light: "#D9DCCF", Dark: "#353533"}) + + statusText = lipgloss.NewStyle().Inherit(statusBarStyle) + + versionStyle = statusNugget.Copy(). + Background(lipgloss.Color("#0062cc")) + + // Page. + docStyle = lipgloss.NewStyle() +) + +type model struct { + version string + client *client.Client + vp viewport.Model + progress *progress.Model + telemetry *pb.Telemetry + telemetryStream chan *pb.Telemetry + tabID int +} + +func (m *model) renderConfigViewport() string { + config, err := m.client.GetConfig(context.TODO(), &pb.Empty{}) + if err != nil || config == nil { + log.WithError(err).Fatal() + } + + return config.GetContent() +} + +func (m *model) renderTelemetryViewport() string { + if m.telemetry == nil { + return "\nloading data.." + } + + gitlabAPIUsage := lipgloss.JoinHorizontal( + lipgloss.Top, + " GitLab API usage ", + m.progress.ViewAs(m.telemetry.GitlabApiUsage), + "\n", + ) + + gitlabAPIRequestsCount := lipgloss.JoinHorizontal( + lipgloss.Top, + " GitLab API requests ", + dataStyle.SetString(strconv.Itoa(int(m.telemetry.GetGitlabApiRequestsCount()))).String(), + "\n", + ) + + gitlabAPIRateLimit := lipgloss.JoinHorizontal( + lipgloss.Top, + " GitLab API limit usage ", + m.progress.ViewAs(m.telemetry.GetGitlabApiRateLimit()), + "\n", + ) + + gitlabAPIRateLimitRemaining := lipgloss.JoinHorizontal( + lipgloss.Top, + " GitLab API limit requests remaining ", + dataStyle.SetString(strconv.Itoa(int(m.telemetry.GetGitlabApiLimitRemaining()))).String(), + "\n", + ) + + tasksBufferUsage := lipgloss.JoinHorizontal( + lipgloss.Top, + " Tasks buffer usage ", + m.progress.ViewAs(m.telemetry.GetTasksBufferUsage()), + "\n", + ) + + tasksExecuted := lipgloss.JoinHorizontal( + lipgloss.Top, + " Tasks executed ", + dataStyle.SetString(strconv.Itoa(int(m.telemetry.GetTasksExecutedCount()))).String(), + "\n", + ) + + return strings.Join([]string{ + "", + gitlabAPIUsage, + gitlabAPIRequestsCount, + gitlabAPIRateLimit, + gitlabAPIRateLimitRemaining, + tasksBufferUsage, + tasksExecuted, + renderEntity("Projects", m.telemetry.GetProjects()), + renderEntity("Environments", m.telemetry.GetEnvs()), + renderEntity("Refs", m.telemetry.GetRefs()), + renderEntity("Metrics", m.telemetry.GetMetrics()), + }, "\n") +} + +func renderEntity(name string, e *pb.Entity) string { + return entityStyle.Render(lipgloss.JoinHorizontal( + lipgloss.Top, + " "+name+strings.Repeat(" ", 24-len(name)), + lipgloss.JoinVertical( + lipgloss.Left, + "Total "+dataStyle.SetString(strconv.Itoa(int(e.Count))).String()+"\n", + "Last Pull "+dataStyle.SetString(prettyTimeago(e.LastPull.AsTime())).String()+"\n", + "Last GC "+dataStyle.SetString(prettyTimeago(e.LastGc.AsTime())).String()+"\n", + "Next Pull "+dataStyle.SetString(prettyTimeago(e.NextPull.AsTime())).String()+"\n", + "Next GC "+dataStyle.SetString(prettyTimeago(e.NextGc.AsTime())).String()+"\n", + ), + "\n", + )) +} + +func prettyTimeago(t time.Time) string { + if t.IsZero() { + return "N/A" + } + + return timeago.English.Format(t) +} + +func newModel(version string, endpoint *url.URL) (m *model) { + p := progress.NewModel(progress.WithScaledGradient("#80c904", "#ff9d5c")) + + m = &model{ + version: version, + vp: viewport.Model{}, + telemetryStream: make(chan *pb.Telemetry), + progress: &p, + client: client.NewClient(context.TODO(), endpoint), + } + + return +} + +func (m *model) Init() tea.Cmd { + return tea.Batch( + m.streamTelemetry(context.TODO()), + waitForTelemetryUpdate(m.telemetryStream), + ) +} + +func (m *model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { + switch msg := msg.(type) { + case tea.WindowSizeMsg: + m.vp.Width = msg.Width + m.vp.Height = msg.Height - 4 + m.progress.Width = msg.Width - 27 + m.setPaneContent() + + return m, nil + case tea.KeyMsg: + switch msg.Type { + case tea.KeyCtrlC, tea.KeyEsc: + return m, tea.Quit + case tea.KeyLeft: + if m.tabID > 0 { + m.tabID-- + m.setPaneContent() + } + + return m, nil + case tea.KeyRight: + if m.tabID < len(tabs)-1 { + m.tabID++ + m.setPaneContent() + } + + return m, nil + case tea.KeyUp, tea.KeyDown, tea.KeyPgDown, tea.KeyPgUp: + vp, cmd := m.vp.Update(msg) + m.vp = vp + + return m, cmd + } + case *pb.Telemetry: + m.telemetry = msg + m.setPaneContent() + + return m, waitForTelemetryUpdate(m.telemetryStream) + } + + return m, nil +} + +func (m *model) View() string { + doc := strings.Builder{} + + // TABS + { + renderedTabs := []string{} + + for tabID, t := range tabs { + if m.tabID == tabID { + renderedTabs = append(renderedTabs, activeTab.Render(string(t))) + + continue + } + + renderedTabs = append(renderedTabs, inactiveTab.Render(string(t))) + } + + row := lipgloss.JoinHorizontal(lipgloss.Top, renderedTabs...) + gap := tabGap.Render(strings.Repeat(" ", max(0, m.vp.Width-lipgloss.Width(row)))) + row = lipgloss.JoinHorizontal(lipgloss.Bottom, row, gap) + doc.WriteString(row + "\n") + } + + // Pane. + { + doc.WriteString(m.vp.View() + "\n") + } + + // Status bar. + { + bar := lipgloss.JoinHorizontal(lipgloss.Top, + statusStyle.Render("github.com/mvisonneau/gitlab-ci-pipelines-exporter"), + statusText.Copy(). + Width(max(0, m.vp.Width-(55+len(m.version)))). + Render(""), + versionStyle.Render(m.version), + ) + + doc.WriteString(statusBarStyle.Width(m.vp.Width).Render(bar)) + } + + return docStyle.Render(doc.String()) +} + +func (m *model) streamTelemetry(ctx context.Context) tea.Cmd { + c, err := m.client.GetTelemetry(ctx, &pb.Empty{}) + if err != nil { + log.WithError(err).Fatal() + } + + go func(m *model) { + for { + telemetry, err := c.Recv() + if err != nil { + log.WithError(err).Fatal() + } + + m.telemetryStream <- telemetry + } + }(m) + + return nil +} + +func waitForTelemetryUpdate(t chan *pb.Telemetry) tea.Cmd { + return func() tea.Msg { + return <-t + } +} + +// Start .. +func Start(version string, listenerAddress *url.URL) { + if err := tea.NewProgram( + newModel(version, listenerAddress), + tea.WithAltScreen(), + ).Start(); err != nil { + fmt.Println("Error running program:", err) + os.Exit(1) + } +} + +func (m *model) setPaneContent() { + switch tabs[m.tabID] { + case tabTelemetry: + m.vp.SetContent(m.renderTelemetryViewport()) + case tabConfig: + m.vp.SetContent(m.renderConfigViewport()) + } +} diff --git a/pkg/ratelimit/local.go b/pkg/ratelimit/local.go index 45376394..a951d10a 100644 --- a/pkg/ratelimit/local.go +++ b/pkg/ratelimit/local.go @@ -1,17 +1,34 @@ package ratelimit import ( - localRatelimit "go.uber.org/ratelimit" + "context" + "time" + + log "github.com/sirupsen/logrus" + "golang.org/x/time/rate" ) // Local .. type Local struct { - localRatelimit.Limiter + *rate.Limiter } // NewLocalLimiter .. -func NewLocalLimiter(maxRPS int) Limiter { +func NewLocalLimiter(maximumRPS, burstableRPS int) Limiter { return Local{ - localRatelimit.New(maxRPS), + rate.NewLimiter(rate.Limit(maximumRPS), burstableRPS), + } +} + +// Take .. +func (l Local) Take(ctx context.Context) time.Duration { + start := time.Now() + + if err := l.Limiter.Wait(ctx); err != nil { + log.WithContext(ctx). + WithError(err). + Fatal() } + + return start.Sub(time.Now()) } diff --git a/pkg/ratelimit/local_test.go b/pkg/ratelimit/local_test.go index c2f632f3..e1aa6597 100644 --- a/pkg/ratelimit/local_test.go +++ b/pkg/ratelimit/local_test.go @@ -7,5 +7,5 @@ import ( ) func TestNewLocalLimiter(t *testing.T) { - assert.IsType(t, Local{}, NewLocalLimiter(10)) + assert.IsType(t, Local{}, NewLocalLimiter(10, 1)) } diff --git a/pkg/ratelimit/ratelimit.go b/pkg/ratelimit/ratelimit.go index b3228346..cbb5e16f 100644 --- a/pkg/ratelimit/ratelimit.go +++ b/pkg/ratelimit/ratelimit.go @@ -1,26 +1,16 @@ package ratelimit import ( + "context" "time" - - log "github.com/sirupsen/logrus" ) // Limiter .. type Limiter interface { - Take() time.Time + Take(ctx context.Context) time.Duration } // Take .. -func Take(l Limiter) { - now := time.Now() - throttled := l.Take() - if throttled.Sub(now).Milliseconds() > 10 { - log.WithFields( - log.Fields{ - "for": throttled.Sub(now), - }, - ).Debug("throttled GitLab requests") - } - return +func Take(ctx context.Context, l Limiter) { + l.Take(ctx) } diff --git a/pkg/ratelimit/ratelimit_test.go b/pkg/ratelimit/ratelimit_test.go index 2dd88b86..71a330c7 100644 --- a/pkg/ratelimit/ratelimit_test.go +++ b/pkg/ratelimit/ratelimit_test.go @@ -8,18 +8,21 @@ import ( "time" "github.com/alicebob/miniredis/v2" - "github.com/go-redis/redis/v8" + "github.com/redis/go-redis/v9" "github.com/stretchr/testify/assert" ) func MeasureTakeDuration(l Limiter) int64 { start := time.Now() - Take(l) - return int64(time.Now().Sub(start)) + + Take(context.TODO(), l) + + return int64(time.Since(start)) } func TestLocalTake(t *testing.T) { - l := NewLocalLimiter(1) + l := NewLocalLimiter(1, 1) + assert.LessOrEqual(t, MeasureTakeDuration(l), int64(100*time.Millisecond)) assert.GreaterOrEqual(t, MeasureTakeDuration(l), int64(time.Second)) } @@ -29,34 +32,37 @@ func TestRedisTake(t *testing.T) { if err != nil { panic(err) } + defer s.Close() l := NewRedisLimiter( - context.Background(), redis.NewClient(&redis.Options{Addr: s.Addr()}), 1, ) - assert.LessOrEqual(t, MeasureTakeDuration(l), int64(100*time.Millisecond)) + assert.LessOrEqual(t, MeasureTakeDuration(l), int64(250*time.Millisecond)) assert.GreaterOrEqual(t, MeasureTakeDuration(l), int64(900*time.Millisecond)) } func TestRedisTakeError(t *testing.T) { if os.Getenv("SHOULD_ERROR") == "1" { l := NewRedisLimiter( - context.Background(), redis.NewClient(&redis.Options{Addr: "doesnotexist"}), 1, ) - Take(l) + + Take(context.TODO(), l) + return } cmd := exec.Command(os.Args[0], "-test.run=TestRedisTakeError") cmd.Env = append(os.Environ(), "SHOULD_ERROR=1") + err := cmd.Run() if e, ok := err.(*exec.ExitError); ok && !e.Success() { return } + t.Fatal("process ran successfully, wanted exit status 1") } diff --git a/pkg/ratelimit/redis.go b/pkg/ratelimit/redis.go index 9592a403..8544e60d 100644 --- a/pkg/ratelimit/redis.go +++ b/pkg/ratelimit/redis.go @@ -4,8 +4,8 @@ import ( "context" "time" - "github.com/go-redis/redis/v8" - "github.com/go-redis/redis_rate/v9" + "github.com/go-redis/redis_rate/v10" + "github.com/redis/go-redis/v9" log "github.com/sirupsen/logrus" ) @@ -14,25 +14,40 @@ const redisKey string = `gcpe:gitlab:api` // Redis .. type Redis struct { *redis_rate.Limiter - Context context.Context - MaxRPS int + MaxRPS int } // NewRedisLimiter .. -func NewRedisLimiter(ctx context.Context, redisClient *redis.Client, maxRPS int) Limiter { +func NewRedisLimiter(redisClient *redis.Client, maxRPS int) Limiter { return Redis{ Limiter: redis_rate.NewLimiter(redisClient), - Context: ctx, MaxRPS: maxRPS, } } // Take .. -func (r Redis) Take() time.Time { - res, err := r.Allow(r.Context, redisKey, redis_rate.PerSecond(r.MaxRPS)) - if err != nil { - log.Fatalf(err.Error()) +func (r Redis) Take(ctx context.Context) time.Duration { + start := time.Now() + + for { + res, err := r.Allow(ctx, redisKey, redis_rate.PerSecond(r.MaxRPS)) + if err != nil { + log.WithContext(ctx). + WithError(err). + Fatal() + } + + if res.Allowed > 0 { + break + } else { + log.WithFields( + log.Fields{ + "for": res.RetryAfter.String(), + }, + ).Debug("throttled GitLab requests") + time.Sleep(res.RetryAfter) + } } - time.Sleep(res.RetryAfter) - return time.Now() + + return start.Sub(time.Now()) } diff --git a/pkg/ratelimit/redis_test.go b/pkg/ratelimit/redis_test.go index 65141c41..c8c52d18 100644 --- a/pkg/ratelimit/redis_test.go +++ b/pkg/ratelimit/redis_test.go @@ -1,25 +1,22 @@ package ratelimit import ( - "context" "testing" - "github.com/go-redis/redis/v8" - "github.com/go-redis/redis_rate/v9" + "github.com/go-redis/redis_rate/v10" + "github.com/redis/go-redis/v9" "github.com/stretchr/testify/assert" ) func TestNewRedisLimiter(t *testing.T) { redisClient := redis.NewClient(&redis.Options{}) l := NewRedisLimiter( - context.Background(), redisClient, 10, ) expectedValue := Redis{ Limiter: redis_rate.NewLimiter(redisClient), - Context: context.Background(), MaxRPS: 10, } diff --git a/pkg/schemas/config.go b/pkg/schemas/config.go deleted file mode 100644 index c6789230..00000000 --- a/pkg/schemas/config.go +++ /dev/null @@ -1,275 +0,0 @@ -package schemas - -import ( - "fmt" - "io/ioutil" - "path/filepath" - - log "github.com/sirupsen/logrus" - "gopkg.in/yaml.v3" -) - -// Default values -const ( - defaultServerConfigEnablePprof = false - defaultServerConfigListenAddress = ":8080" - defaultServerConfigMetricsEnabled = true - defaultServerConfigMetricsEnableOpenmetricsEncoding = false - defaultServerConfigWebhookEnabled = false - defaultGitlabConfigURL = "https://gitlab.com" - defaultGitlabConfigHealthURL = "https://gitlab.com/explore" - defaultGitlabConfigEnableHealthCheck = true - defaultGitlabConfigEnableTLSVerify = true - defaultPullConfigMaximumGitLabAPIRequestsPerSecond = 10 - defaultPullConfigProjectsFromWildcardsOnInit = true - defaultPullConfigProjectsFromWildcardsScheduled = true - defaultPullConfigProjectsFromWildcardsIntervalSeconds = 1800 - defaultPullConfigEnvironmentsFromProjectsOnInit = true - defaultPullConfigEnvironmentsFromProjectsScheduled = true - defaultPullConfigEnvironmentsFromProjectsIntervalSeconds = 1800 - defaultPullConfigRefsFromProjectsOnInit = true - defaultPullConfigRefsFromProjectsScheduled = true - defaultPullConfigRefsFromProjectsIntervalSeconds = 300 - defaultPullConfigMetricsOnInit = true - defaultPullConfigMetricsScheduled = true - defaultPullConfigMetricsIntervalSeconds = 30 - defaultGarbageCollectConfigProjectsOnInit = false - defaultGarbageCollectConfigProjectsScheduled = true - defaultGarbageCollectConfigProjectsIntervalSeconds = 14400 - defaultGarbageCollectConfigProjectsEnvironmentsOnInit = false - defaultGarbageCollectConfigProjectsEnvironmentsScheduled = true - defaultGarbageCollectConfigProjectsEnvironmentsIntervalSeconds = 14400 - defaultGarbageCollectConfigProjectsRefsOnInit = false - defaultGarbageCollectConfigProjectsRefsScheduled = true - defaultGarbageCollectConfigProjectsRefsIntervalSeconds = 1800 - defaultGarbageCollectConfigProjectsRefsMetricsOnInit = false - defaultGarbageCollectConfigProjectsRefsMetricsScheduled = true - defaultGarbageCollectConfigProjectsRefsMetricsIntervalSeconds = 300 -) - -// Config represents what can be defined as a yaml config file -type Config struct { - // Server related configuration - Server ServerConfig `yaml:"server"` - - // GitLab related configuration - Gitlab GitlabConfig `yaml:"gitlab"` - - // Redis related configuration - Redis RedisConfig `yaml:"redis"` - - // Pull configuration - Pull PullConfig `yaml:"pull"` - - // GarbageCollect configuration - GarbageCollect GarbageCollectConfig `yaml:"garbage_collect"` - - // Default parameters which can be overridden at either the Project or Wildcard level - ProjectDefaults ProjectParameters `yaml:"project_defaults"` - - // List of projects to pull - Projects []Project `yaml:"projects"` - - // List of wildcards to search projects from - Wildcards Wildcards `yaml:"wildcards"` -} - -// ServerConfig .. -type ServerConfig struct { - // Enable profiling pages - EnablePprof bool `yaml:"enable_pprof"` - - // [address:port] to make the process listen upon - ListenAddress string `yaml:"listen_address"` - - Metrics ServerConfigMetrics `yaml:"metrics"` - Webhook ServerConfigWebhook `yaml:"webhook"` -} - -// ServerConfigMetrics .. -type ServerConfigMetrics struct { - // Enable /metrics endpoint - Enabled bool `yaml:"enabled"` - - // Enable OpenMetrics content encoding in prometheus HTTP handler - EnableOpenmetricsEncoding bool `yaml:"enable_openmetrics_encoding"` -} - -// ServerConfigWebhook .. -type ServerConfigWebhook struct { - // Enable /webhook endpoint to support GitLab requests - Enabled bool `yaml:"enabled"` - - // Secret token to authenticate legitimate webhook requests coming from the GitLab server - SecretToken string `yaml:"secret_token"` -} - -// GitlabConfig .. -type GitlabConfig struct { - // The URL of the GitLab server/api - URL string `yaml:"url"` - - // Token to use to authenticate against the API - Token string `yaml:"token"` - - // The URL of the GitLab server/api health endpoint (default to /users/sign_in which is publicly available on gitlab.com) - HealthURL string `yaml:"health_url"` - - // Whether to validate the service is reachable calling HealthURL - EnableHealthCheck bool `yaml:"enable_health_check"` - - // Whether to skip TLS validation when querying HealthURL - EnableTLSVerify bool `yaml:"enable_tls_verify"` -} - -// RedisConfig .. -type RedisConfig struct { - // URL used to connect onto the redis endpoint - // format: redis[s]://[:password@]host[:port][/db-number][?option=value]) - URL string `yaml:"url"` -} - -// SchedulerConfig .. -type SchedulerConfig struct { - OnInit bool `yaml:"on_init"` - Scheduled bool `yaml:"scheduled"` - IntervalSeconds int `yaml:"interval_seconds"` -} - -// PullConfig .. -type PullConfig struct { - // Maximum amount of requests per seconds to make against the GitLab API (default: 10) - MaximumGitLabAPIRequestsPerSecond int `yaml:"maximum_gitlab_api_requests_per_second"` - - // ProjectsFromWildcards configuration - ProjectsFromWildcards SchedulerConfig `yaml:"projects_from_wildcards"` - - // EnvironmentsFromProjects configuration - EnvironmentsFromProjects SchedulerConfig `yaml:"environments_from_projects"` - - // RefsFromProjects configuration - RefsFromProjects SchedulerConfig `yaml:"refs_from_projects"` - - // Metrics configuration - Metrics SchedulerConfig `yaml:"metrics"` -} - -// GarbageCollectConfig .. -type GarbageCollectConfig struct { - // Projects configuration - Projects SchedulerConfig `yaml:"projects"` - - // Environments configuration - Environments SchedulerConfig `yaml:"environments"` - - // Refs configuration - Refs SchedulerConfig `yaml:"refs"` - - // Metrics configuration - Metrics SchedulerConfig `yaml:"metrics"` -} - -// NewConfig returns a Config with default parameters values -func NewConfig() Config { - return Config{ - Server: ServerConfig{ - EnablePprof: defaultServerConfigEnablePprof, - ListenAddress: defaultServerConfigListenAddress, - Metrics: ServerConfigMetrics{ - Enabled: defaultServerConfigMetricsEnabled, - EnableOpenmetricsEncoding: defaultServerConfigMetricsEnableOpenmetricsEncoding, - }, - Webhook: ServerConfigWebhook{ - Enabled: defaultServerConfigWebhookEnabled, - }, - }, - Gitlab: GitlabConfig{ - URL: defaultGitlabConfigURL, - HealthURL: defaultGitlabConfigHealthURL, - EnableHealthCheck: defaultGitlabConfigEnableHealthCheck, - EnableTLSVerify: defaultGitlabConfigEnableTLSVerify, - }, - Pull: PullConfig{ - MaximumGitLabAPIRequestsPerSecond: defaultPullConfigMaximumGitLabAPIRequestsPerSecond, - ProjectsFromWildcards: SchedulerConfig{ - OnInit: defaultPullConfigProjectsFromWildcardsOnInit, - Scheduled: defaultPullConfigProjectsFromWildcardsScheduled, - IntervalSeconds: defaultPullConfigProjectsFromWildcardsIntervalSeconds, - }, - EnvironmentsFromProjects: SchedulerConfig{ - OnInit: defaultPullConfigEnvironmentsFromProjectsOnInit, - Scheduled: defaultPullConfigEnvironmentsFromProjectsScheduled, - IntervalSeconds: defaultPullConfigEnvironmentsFromProjectsIntervalSeconds, - }, - RefsFromProjects: SchedulerConfig{ - OnInit: defaultPullConfigRefsFromProjectsOnInit, - Scheduled: defaultPullConfigRefsFromProjectsScheduled, - IntervalSeconds: defaultPullConfigRefsFromProjectsIntervalSeconds, - }, - Metrics: SchedulerConfig{ - OnInit: defaultPullConfigMetricsOnInit, - Scheduled: defaultPullConfigMetricsScheduled, - IntervalSeconds: defaultPullConfigMetricsIntervalSeconds, - }, - }, - GarbageCollect: GarbageCollectConfig{ - Projects: SchedulerConfig{ - OnInit: defaultGarbageCollectConfigProjectsOnInit, - Scheduled: defaultGarbageCollectConfigProjectsScheduled, - IntervalSeconds: defaultGarbageCollectConfigProjectsIntervalSeconds, - }, - Environments: SchedulerConfig{ - OnInit: defaultGarbageCollectConfigProjectsEnvironmentsOnInit, - Scheduled: defaultGarbageCollectConfigProjectsEnvironmentsScheduled, - IntervalSeconds: defaultGarbageCollectConfigProjectsEnvironmentsIntervalSeconds, - }, - Refs: SchedulerConfig{ - OnInit: defaultGarbageCollectConfigProjectsRefsOnInit, - Scheduled: defaultGarbageCollectConfigProjectsRefsScheduled, - IntervalSeconds: defaultGarbageCollectConfigProjectsRefsIntervalSeconds, - }, - Metrics: SchedulerConfig{ - OnInit: defaultGarbageCollectConfigProjectsRefsMetricsOnInit, - Scheduled: defaultGarbageCollectConfigProjectsRefsMetricsScheduled, - IntervalSeconds: defaultGarbageCollectConfigProjectsRefsMetricsIntervalSeconds, - }, - }, - } -} - -// ParseConfigFile loads a yaml file into a Config structure -func ParseConfigFile(path string) (Config, error) { - cfg := NewConfig() - configFile, err := ioutil.ReadFile(filepath.Clean(path)) - if err != nil { - return cfg, fmt.Errorf("couldn't open config file : %v", err) - } - - if err = yaml.Unmarshal(configFile, &cfg); err != nil { - return cfg, fmt.Errorf("unable to parse config file: %v", err) - } - - // Hack to fix the missing health endpoint on gitlab.com - if cfg.Gitlab.URL != "https://gitlab.com" { - cfg.Gitlab.HealthURL = fmt.Sprintf("%s/-/health", cfg.Gitlab.URL) - } - - return cfg, nil -} - -// Log returns some logging fields to showcase the configuration to the enduser -func (sc *SchedulerConfig) Log() log.Fields { - onInit, scheduled := "no", "no" - if sc.OnInit { - onInit = "yes" - } - - if sc.Scheduled { - scheduled = fmt.Sprintf("every %vs", sc.IntervalSeconds) - } - - return log.Fields{ - "on-init": onInit, - "scheduled": scheduled, - } -} diff --git a/pkg/schemas/config_test.go b/pkg/schemas/config_test.go deleted file mode 100644 index c6301843..00000000 --- a/pkg/schemas/config_test.go +++ /dev/null @@ -1,399 +0,0 @@ -package schemas - -import ( - "fmt" - "io/ioutil" - "os" - "testing" - - "github.com/openlyinc/pointy" - log "github.com/sirupsen/logrus" - "github.com/stretchr/testify/assert" -) - -func TestParseConfigInvalidPath(t *testing.T) { - cfg, err := ParseConfigFile("/path_do_not_exist") - assert.Equal(t, fmt.Errorf("couldn't open config file : open /path_do_not_exist: no such file or directory"), err) - assert.Equal(t, NewConfig(), cfg) -} - -func TestParseConfigFileInvalidYaml(t *testing.T) { - f, err := ioutil.TempFile("/tmp", "test-") - assert.Nil(t, err) - defer os.Remove(f.Name()) - - // Invalid YAML content - f.WriteString("invalid_yaml") - cfg, err := ParseConfigFile(f.Name()) - assert.Error(t, err) - assert.Equal(t, NewConfig(), cfg) -} - -func TestParseConfigValidYaml(t *testing.T) { - f, err := ioutil.TempFile("/tmp", "test-") - assert.NoError(t, err) - defer os.Remove(f.Name()) - - // Valid minimal configuration - f.WriteString(` ---- -server: - enable_pprof: true - listen_address: :1025 - - metrics: - enabled: false - enable_openmetrics_encoding: false - - webhook: - enabled: true - secret_token: secret - -gitlab: - url: https://gitlab.example.com - token: xrN14n9-ywvAFxxxxxx - health_url: https://gitlab.example.com/-/health - enable_health_check: false - enable_tls_verify: false - -redis: - url: redis://popopo:1337 - -pull: - maximum_gitlab_api_requests_per_second: 1 - projects_from_wildcards: - on_init: false - scheduled: false - interval_seconds: 1 - environments_from_projects: - on_init: false - scheduled: false - interval_seconds: 2 - refs_from_projects: - on_init: false - scheduled: false - interval_seconds: 3 - metrics: - on_init: false - scheduled: false - interval_seconds: 4 - -garbage_collect: - projects: - on_init: true - scheduled: false - interval_seconds: 1 - environments: - on_init: true - scheduled: false - interval_seconds: 2 - refs: - on_init: true - scheduled: false - interval_seconds: 3 - metrics: - on_init: true - scheduled: false - interval_seconds: 4 - -project_defaults: - output_sparse_status_metrics: false - pull: - environments: - enabled: true - name_regexp: "^baz$" - tags_regexp: "^blah$" - refs: - regexp: "^baz$" - max_age_seconds: 1 - from: - pipelines: - enabled: true - depth: 1 - merge_requests: - enabled: true - depth: 2 - pipeline: - jobs: - enabled: true - variables: - enabled: true - regexp: "^CI_" - -projects: - - name: foo/project - - name: bar/project - pull: - environments: - enabled: false - name_regexp: "^foo$" - tags_regexp: "^foo$" - refs: - regexp: "^foo$" - max_age_seconds: 2 - - name: new/project - pull: - environments: - enabled: false - name_regexp: "^foo$" - tags_regexp: "^foo$" - refs: - regexp: "^bar$" - max_age_seconds: 3 - -wildcards: - - owner: - name: foo - kind: group - search: 'bar' - archived: true - pull: - environments: - enabled: false - name_regexp: "^foo$" - tags_regexp: "^foo$" - refs: - regexp: "^yolo$" - max_age_seconds: 4 -`) - - cfg, err := ParseConfigFile(f.Name()) - assert.NoError(t, err) - - expectedCfg := Config{ - Server: ServerConfig{ - EnablePprof: true, - ListenAddress: ":1025", - Metrics: ServerConfigMetrics{ - Enabled: false, - EnableOpenmetricsEncoding: false, - }, - Webhook: ServerConfigWebhook{ - Enabled: true, - SecretToken: "secret", - }, - }, - Gitlab: GitlabConfig{ - URL: "https://gitlab.example.com", - HealthURL: "https://gitlab.example.com/-/health", - Token: "xrN14n9-ywvAFxxxxxx", - EnableHealthCheck: false, - EnableTLSVerify: false, - }, - Redis: RedisConfig{ - URL: "redis://popopo:1337", - }, - Pull: PullConfig{ - MaximumGitLabAPIRequestsPerSecond: 1, - ProjectsFromWildcards: SchedulerConfig{ - OnInit: false, - Scheduled: false, - IntervalSeconds: 1, - }, - EnvironmentsFromProjects: SchedulerConfig{ - OnInit: false, - Scheduled: false, - IntervalSeconds: 2, - }, - RefsFromProjects: SchedulerConfig{ - OnInit: false, - Scheduled: false, - IntervalSeconds: 3, - }, - Metrics: SchedulerConfig{ - OnInit: false, - Scheduled: false, - IntervalSeconds: 4, - }, - }, - GarbageCollect: GarbageCollectConfig{ - Projects: SchedulerConfig{ - OnInit: true, - Scheduled: false, - IntervalSeconds: 1, - }, - Environments: SchedulerConfig{ - OnInit: true, - Scheduled: false, - IntervalSeconds: 2, - }, - Refs: SchedulerConfig{ - OnInit: true, - Scheduled: false, - IntervalSeconds: 3, - }, - Metrics: SchedulerConfig{ - OnInit: true, - Scheduled: false, - IntervalSeconds: 4, - }, - }, - ProjectDefaults: ProjectParameters{ - OutputSparseStatusMetricsValue: pointy.Bool(false), - Pull: ProjectPull{ - Environments: ProjectPullEnvironments{ - EnabledValue: pointy.Bool(true), - NameRegexpValue: pointy.String("^baz$"), - TagsRegexpValue: pointy.String("^blah$"), - }, - Refs: ProjectPullRefs{ - RegexpValue: pointy.String("^baz$"), - MaxAgeSecondsValue: pointy.Uint(1), - From: ProjectPullRefsFrom{ - Pipelines: ProjectPullRefsFromPipelines{ - EnabledValue: pointy.Bool(true), - DepthValue: pointy.Int(1), - }, - MergeRequests: ProjectPullRefsFromMergeRequests{ - EnabledValue: pointy.Bool(true), - DepthValue: pointy.Int(2), - }, - }, - }, - Pipeline: ProjectPullPipeline{ - Jobs: ProjectPullPipelineJobs{ - EnabledValue: pointy.Bool(true), - }, - Variables: ProjectPullPipelineVariables{ - EnabledValue: pointy.Bool(true), - RegexpValue: pointy.String("^CI_"), - }, - }, - }, - }, - Projects: []Project{ - { - Name: "foo/project", - }, - { - Name: "bar/project", - ProjectParameters: ProjectParameters{ - Pull: ProjectPull{ - Environments: ProjectPullEnvironments{ - EnabledValue: pointy.Bool(false), - NameRegexpValue: pointy.String("^foo$"), - TagsRegexpValue: pointy.String("^foo$"), - }, - Refs: ProjectPullRefs{ - RegexpValue: pointy.String("^foo$"), - MaxAgeSecondsValue: pointy.Uint(2), - }, - }, - }, - }, - { - Name: "new/project", - ProjectParameters: ProjectParameters{ - Pull: ProjectPull{ - Environments: ProjectPullEnvironments{ - EnabledValue: pointy.Bool(false), - NameRegexpValue: pointy.String("^foo$"), - TagsRegexpValue: pointy.String("^foo$"), - }, - Refs: ProjectPullRefs{ - RegexpValue: pointy.String("^bar$"), - MaxAgeSecondsValue: pointy.Uint(3), - }, - }, - }, - }, - }, - Wildcards: []Wildcard{ - { - Search: "bar", - Owner: struct { - Name string `yaml:"name"` - Kind string `yaml:"kind"` - IncludeSubgroups bool `yaml:"include_subgroups"` - }{ - Name: "foo", - Kind: "group", - }, - ProjectParameters: ProjectParameters{ - Pull: ProjectPull{ - Environments: ProjectPullEnvironments{ - EnabledValue: pointy.Bool(false), - NameRegexpValue: pointy.String("^foo$"), - TagsRegexpValue: pointy.String("^foo$"), - }, - Refs: ProjectPullRefs{ - RegexpValue: pointy.String("^yolo$"), - MaxAgeSecondsValue: pointy.Uint(4), - }, - }, - }, - Archived: true, - }, - }, - } - - // Test variable assignments - assert.Equal(t, expectedCfg, cfg) -} - -func TestParseConfigDefaultsValues(t *testing.T) { - f, err := ioutil.TempFile("/tmp", "test-") - assert.Nil(t, err) - defer os.Remove(f.Name()) - - // Valid minimal configuration - f.WriteString(` ---- -`) - - cfg, err := ParseConfigFile(f.Name()) - assert.NoError(t, err) - expectedCfg := NewConfig() - - // Test variable assignments - assert.Equal(t, expectedCfg, cfg) - - // Validate project default values - assert.Equal(t, defaultProjectOutputSparseStatusMetrics, cfg.ProjectDefaults.OutputSparseStatusMetrics()) - - assert.Equal(t, defaultProjectPullEnvironmentsEnabled, cfg.ProjectDefaults.Pull.Environments.Enabled()) - assert.Equal(t, defaultProjectPullEnvironmentsNameRegexp, cfg.ProjectDefaults.Pull.Environments.NameRegexp()) - assert.Equal(t, defaultProjectPullEnvironmentsTagsRegexp, cfg.ProjectDefaults.Pull.Environments.TagsRegexp()) - - assert.Equal(t, defaultProjectPullRefsRegexp, cfg.ProjectDefaults.Pull.Refs.Regexp()) - assert.Equal(t, defaultProjectPullRefsFromPipelinesEnabled, cfg.ProjectDefaults.Pull.Refs.From.Pipelines.Enabled()) - assert.Equal(t, defaultProjectPullRefsFromPipelinesDepth, cfg.ProjectDefaults.Pull.Refs.From.Pipelines.Depth()) - - assert.Equal(t, defaultProjectPullRefsFromMergeRequestsEnabled, cfg.ProjectDefaults.Pull.Refs.From.MergeRequests.Enabled()) - assert.Equal(t, defaultProjectPullRefsFromMergeRequestsDepth, cfg.ProjectDefaults.Pull.Refs.From.MergeRequests.Depth()) - - assert.Equal(t, defaultProjectPullPipelineJobsEnabled, cfg.ProjectDefaults.Pull.Pipeline.Jobs.Enabled()) - - assert.Equal(t, defaultProjectPullPipelineVariablesEnabled, cfg.ProjectDefaults.Pull.Pipeline.Variables.Enabled()) - assert.Equal(t, defaultProjectPullPipelineVariablesRegexp, cfg.ProjectDefaults.Pull.Pipeline.Variables.Regexp()) -} - -func TestParseConfigSelfHostedGitLab(t *testing.T) { - f, err := ioutil.TempFile("/tmp", "test-") - assert.Nil(t, err) - defer os.Remove(f.Name()) - - // Valid minimal configuration - f.WriteString(` ---- -gitlab: - url: https://gitlab.example.com -`) - - cfg, err := ParseConfigFile(f.Name()) - assert.NoError(t, err) - assert.Equal(t, "https://gitlab.example.com/-/health", cfg.Gitlab.HealthURL) -} - -func TestSchedulerConfigLog(t *testing.T) { - sc := SchedulerConfig{ - OnInit: true, - Scheduled: true, - IntervalSeconds: 300, - } - - assert.Equal(t, log.Fields{ - "on-init": "yes", - "scheduled": "every 300s", - }, sc.Log()) -} diff --git a/pkg/schemas/environments.go b/pkg/schemas/environments.go index 0399b18b..7bc4d196 100644 --- a/pkg/schemas/environments.go +++ b/pkg/schemas/environments.go @@ -14,7 +14,6 @@ type Environment struct { Available bool LatestDeployment Deployment - TagsRegexp string OutputSparseStatusMetrics bool } @@ -26,11 +25,10 @@ func (e Environment) Key() EnvironmentKey { return EnvironmentKey(strconv.Itoa(int(crc32.ChecksumIEEE([]byte(e.ProjectName + e.Name))))) } -// Environments allows us to keep track of all the Environment -// objects we have discovered +// Environments allows us to keep track of all the Environment objects we have discovered. type Environments map[EnvironmentKey]Environment -// Count returns the amount of environments in the map +// Count returns the amount of environments in the map. func (envs Environments) Count() int { return len(envs) } diff --git a/pkg/schemas/jobs.go b/pkg/schemas/jobs.go index 9f33dea2..e3f38056 100644 --- a/pkg/schemas/jobs.go +++ b/pkg/schemas/jobs.go @@ -1,19 +1,24 @@ package schemas import ( + "strings" + goGitlab "github.com/xanzy/go-gitlab" ) // Job .. type Job struct { - ID int - Name string - Stage string - Timestamp float64 - DurationSeconds float64 - Status string - ArtifactSize float64 - Runner Runner + ID int + Name string + Stage string + Timestamp float64 + DurationSeconds float64 + QueuedDurationSeconds float64 + Status string + TagList string + ArtifactSize float64 + FailureReason string + Runner Runner } // Runner .. @@ -26,24 +31,30 @@ type Jobs map[string]Job // NewJob .. func NewJob(gj goGitlab.Job) Job { - var artifactSize float64 + var ( + artifactSize float64 + timestamp float64 + ) + for _, artifact := range gj.Artifacts { artifactSize += float64(artifact.Size) } - var timestamp float64 if gj.CreatedAt != nil { timestamp = float64(gj.CreatedAt.Unix()) } return Job{ - ID: gj.ID, - Name: gj.Name, - Stage: gj.Stage, - Timestamp: timestamp, - DurationSeconds: gj.Duration, - Status: gj.Status, - ArtifactSize: artifactSize, + ID: gj.ID, + Name: gj.Name, + Stage: gj.Stage, + Timestamp: timestamp, + DurationSeconds: gj.Duration, + QueuedDurationSeconds: gj.QueuedDuration, + Status: gj.Status, + TagList: strings.Join(gj.TagList, ","), + ArtifactSize: artifactSize, + FailureReason: gj.FailureReason, Runner: Runner{ Description: gj.Runner.Description, diff --git a/pkg/schemas/jobs_test.go b/pkg/schemas/jobs_test.go index b799a762..8258a84c 100644 --- a/pkg/schemas/jobs_test.go +++ b/pkg/schemas/jobs_test.go @@ -10,13 +10,18 @@ import ( func TestNewJob(t *testing.T) { createdAt := time.Date(2020, 10, 1, 13, 5, 5, 0, time.UTC) + startedAt := time.Date(2020, 10, 1, 13, 5, 35, 0, time.UTC) + gitlabJob := goGitlab.Job{ - ID: 2, - Name: "foo", - CreatedAt: &createdAt, - Duration: 15, - Status: "failed", - Stage: "🚀", + ID: 2, + Name: "foo", + CreatedAt: &createdAt, + StartedAt: &startedAt, + Duration: 15, + QueuedDuration: 10, + Status: "failed", + Stage: "🚀", + TagList: []string{"test-tag"}, Runner: struct { ID int "json:\"id\"" Description string "json:\"description\"" @@ -42,13 +47,15 @@ func TestNewJob(t *testing.T) { } expectedJob := Job{ - ID: 2, - Name: "foo", - Stage: "🚀", - Timestamp: 1.601557505e+09, - DurationSeconds: 15, - Status: "failed", - ArtifactSize: 150, + ID: 2, + Name: "foo", + Stage: "🚀", + Timestamp: 1.601557505e+09, + DurationSeconds: 15, + QueuedDurationSeconds: 10, + Status: "failed", + TagList: "test-tag", + ArtifactSize: 150, Runner: Runner{ Description: "xxx", diff --git a/pkg/schemas/metric.go b/pkg/schemas/metric.go index e2ad54ff..bbf19752 100644 --- a/pkg/schemas/metric.go +++ b/pkg/schemas/metric.go @@ -9,7 +9,7 @@ import ( ) const ( - // MetricKindCoverage refers to the coerage of a job/pipeline + // MetricKindCoverage refers to the coerage of a job/pipeline. MetricKindCoverage MetricKind = iota // MetricKindDurationSeconds .. @@ -51,6 +51,9 @@ const ( // MetricKindJobID .. MetricKindJobID + // MetricKindJobQueuedDurationSeconds .. + MetricKindJobQueuedDurationSeconds + // MetricKindJobRunCount .. MetricKindJobRunCount @@ -60,14 +63,59 @@ const ( // MetricKindJobTimestamp .. MetricKindJobTimestamp - // MetricKindStatus .. - MetricKindStatus + // MetricKindQueuedDurationSeconds .. + MetricKindQueuedDurationSeconds // MetricKindRunCount .. MetricKindRunCount + // MetricKindStatus .. + MetricKindStatus + // MetricKindTimestamp .. MetricKindTimestamp + + // MetricKindTestReportTotalTime .. + MetricKindTestReportTotalTime + + // MetricKindTestReportTotalCount .. + MetricKindTestReportTotalCount + + // MetricKindTestReportSuccessCount .. + MetricKindTestReportSuccessCount + + // MetricKindTestReportFailedCount .. + MetricKindTestReportFailedCount + + // MetricKindTestReportSkippedCount .. + MetricKindTestReportSkippedCount + + // MetricKindTestReportErrorCount .. + MetricKindTestReportErrorCount + + // MetricKindTestSuiteTotalTime .. + MetricKindTestSuiteTotalTime + + // MetricKindTestSuiteTotalCount .. + MetricKindTestSuiteTotalCount + + // MetricKindTestSuiteSuccessCount .. + MetricKindTestSuiteSuccessCount + + // MetricKindTestSuiteFailedCount .. + MetricKindTestSuiteFailedCount + + // MetricKindTestSuiteSkippedCount .. + MetricKindTestSuiteSkippedCount + + // MetricKindTestSuiteErrorCount .. + MetricKindTestSuiteErrorCount + + // MetricKindTestCaseExecutionTime .. + MetricKindTestCaseExecutionTime + + // MetricKindTestCaseStatus .. + MetricKindTestCaseStatus ) // MetricKind .. @@ -91,20 +139,23 @@ func (m Metric) Key() MetricKey { key := strconv.Itoa(int(m.Kind)) switch m.Kind { - case MetricKindCoverage, MetricKindDurationSeconds, MetricKindID, MetricKindStatus, MetricKindRunCount, MetricKindTimestamp: + case MetricKindCoverage, MetricKindDurationSeconds, MetricKindID, MetricKindQueuedDurationSeconds, MetricKindRunCount, MetricKindStatus, MetricKindTimestamp, MetricKindTestReportTotalCount, MetricKindTestReportErrorCount, MetricKindTestReportFailedCount, MetricKindTestReportSkippedCount, MetricKindTestReportSuccessCount, MetricKindTestReportTotalTime: key += fmt.Sprintf("%v", []string{ m.Labels["project"], m.Labels["kind"], m.Labels["ref"], + m.Labels["source"], }) - case MetricKindJobArtifactSizeBytes, MetricKindJobDurationSeconds, MetricKindJobID, MetricKindJobRunCount, MetricKindJobStatus, MetricKindJobTimestamp: + case MetricKindJobArtifactSizeBytes, MetricKindJobDurationSeconds, MetricKindJobID, MetricKindJobQueuedDurationSeconds, MetricKindJobRunCount, MetricKindJobStatus, MetricKindJobTimestamp: key += fmt.Sprintf("%v", []string{ m.Labels["project"], m.Labels["kind"], m.Labels["ref"], m.Labels["stage"], + m.Labels["tag_list"], m.Labels["job_name"], + m.Labels["failure_reason"], }) case MetricKindEnvironmentBehindCommitsCount, MetricKindEnvironmentBehindDurationSeconds, MetricKindEnvironmentDeploymentCount, MetricKindEnvironmentDeploymentDurationSeconds, MetricKindEnvironmentDeploymentJobID, MetricKindEnvironmentDeploymentStatus, MetricKindEnvironmentDeploymentTimestamp, MetricKindEnvironmentInformation: @@ -112,11 +163,29 @@ func (m Metric) Key() MetricKey { m.Labels["project"], m.Labels["environment"], }) + + case MetricKindTestSuiteErrorCount, MetricKindTestSuiteFailedCount, MetricKindTestSuiteSkippedCount, MetricKindTestSuiteSuccessCount, MetricKindTestSuiteTotalCount, MetricKindTestSuiteTotalTime: + key += fmt.Sprintf("%v", []string{ + m.Labels["project"], + m.Labels["kind"], + m.Labels["ref"], + m.Labels["test_suite_name"], + }) + + case MetricKindTestCaseExecutionTime, MetricKindTestCaseStatus: + key += fmt.Sprintf("%v", []string{ + m.Labels["project"], + m.Labels["kind"], + m.Labels["ref"], + m.Labels["test_suite_name"], + m.Labels["test_case_name"], + m.Labels["test_case_classname"], + }) } // If the metric is a "status" one, add the status label switch m.Kind { - case MetricKindJobStatus, MetricKindEnvironmentDeploymentStatus, MetricKindStatus: + case MetricKindJobStatus, MetricKindEnvironmentDeploymentStatus, MetricKindStatus, MetricKindTestCaseStatus: key += m.Labels["status"] } diff --git a/pkg/schemas/metric_test.go b/pkg/schemas/metric_test.go index c5600b80..b97aeb93 100644 --- a/pkg/schemas/metric_test.go +++ b/pkg/schemas/metric_test.go @@ -8,7 +8,7 @@ import ( ) func TestMetricKey(t *testing.T) { - assert.Equal(t, MetricKey("3273426995"), Metric{ + assert.Equal(t, MetricKey("3797596385"), Metric{ Kind: MetricKindCoverage, Labels: prometheus.Labels{ "foo": "bar", @@ -24,15 +24,6 @@ func TestMetricKey(t *testing.T) { }, }.Key()) - assert.Equal(t, MetricKey("77312310"), Metric{ - Kind: MetricKindEnvironmentInformation, - Labels: prometheus.Labels{ - "project": "foo", - "environment": "bar", - "bar": "baz", - }, - }.Key()) - assert.Equal(t, MetricKey("1288741005"), Metric{ Kind: MetricKindEnvironmentInformation, }.Key()) diff --git a/pkg/schemas/pipelines.go b/pkg/schemas/pipelines.go index 130c5abd..065fa268 100644 --- a/pkg/schemas/pipelines.go +++ b/pkg/schemas/pipelines.go @@ -1,6 +1,7 @@ package schemas import ( + "context" "strconv" log "github.com/sirupsen/logrus" @@ -9,35 +10,125 @@ import ( // Pipeline .. type Pipeline struct { - ID int - Coverage float64 - Timestamp float64 - DurationSeconds float64 - Status string - Variables string + ID int + Coverage float64 + Timestamp float64 + DurationSeconds float64 + QueuedDurationSeconds float64 + Source string + Status string + Variables string + TestReport TestReport +} + +// TestReport .. +type TestReport struct { + TotalTime float64 + TotalCount int + SuccessCount int + FailedCount int + SkippedCount int + ErrorCount int + TestSuites []TestSuite +} + +// TestSuite .. +type TestSuite struct { + Name string + TotalTime float64 + TotalCount int + SuccessCount int + FailedCount int + SkippedCount int + ErrorCount int + TestCases []TestCase +} + +// TestCase .. +type TestCase struct { + Name string + Classname string + ExecutionTime float64 + Status string } // NewPipeline .. -func NewPipeline(gp goGitlab.Pipeline) Pipeline { - var coverage float64 - var err error +func NewPipeline(ctx context.Context, gp goGitlab.Pipeline) Pipeline { + var ( + coverage float64 + err error + timestamp float64 + ) + if gp.Coverage != "" { coverage, err = strconv.ParseFloat(gp.Coverage, 64) if err != nil { - log.WithField("error", err.Error()).Warnf("could not parse coverage string returned from GitLab API '%s' into Float64", gp.Coverage) + log.WithContext(ctx). + WithField("error", err.Error()). + Warnf("could not parse coverage string returned from GitLab API '%s' into Float64", gp.Coverage) } } - var timestamp float64 if gp.UpdatedAt != nil { timestamp = float64(gp.UpdatedAt.Unix()) } return Pipeline{ - ID: gp.ID, - Coverage: coverage, - Timestamp: timestamp, - DurationSeconds: float64(gp.Duration), - Status: gp.Status, + ID: gp.ID, + Coverage: coverage, + Timestamp: timestamp, + DurationSeconds: float64(gp.Duration), + QueuedDurationSeconds: float64(gp.QueuedDuration), + Source: gp.Source, + Status: gp.Status, + } +} + +// NewTestReport .. +func NewTestReport(gtr goGitlab.PipelineTestReport) TestReport { + testSuites := []TestSuite{} + + for _, x := range gtr.TestSuites { + testSuites = append(testSuites, NewTestSuite(x)) + } + + return TestReport{ + TotalTime: gtr.TotalTime, + TotalCount: gtr.TotalCount, + SuccessCount: gtr.SuccessCount, + FailedCount: gtr.FailedCount, + SkippedCount: gtr.SkippedCount, + ErrorCount: gtr.ErrorCount, + TestSuites: testSuites, + } +} + +// NewTestSuite .. +func NewTestSuite(gts *goGitlab.PipelineTestSuites) TestSuite { + testCases := []TestCase{} + + for _, x := range gts.TestCases { + testCases = append(testCases, NewTestCase(x)) + } + + return TestSuite{ + Name: gts.Name, + TotalTime: gts.TotalTime, + TotalCount: gts.TotalCount, + SuccessCount: gts.SuccessCount, + FailedCount: gts.FailedCount, + SkippedCount: gts.SkippedCount, + ErrorCount: gts.ErrorCount, + TestCases: testCases, + } +} + +// NewTestCase .. +func NewTestCase(gtc *goGitlab.PipelineTestCases) TestCase { + return TestCase{ + Name: gtc.Name, + Classname: gtc.Classname, + ExecutionTime: gtc.ExecutionTime, + Status: gtc.Status, } } diff --git a/pkg/schemas/pipelines_test.go b/pkg/schemas/pipelines_test.go index 07f45c49..2da34dc9 100644 --- a/pkg/schemas/pipelines_test.go +++ b/pkg/schemas/pipelines_test.go @@ -1,6 +1,7 @@ package schemas import ( + "context" "testing" "time" @@ -9,23 +10,163 @@ import ( ) func TestNewPipeline(t *testing.T) { - updatedAt := time.Date(2020, 10, 1, 13, 5, 10, 0, time.UTC) + createdAt := time.Date(2020, 10, 1, 13, 4, 10, 0, time.UTC) + startedAt := time.Date(2020, 10, 1, 13, 5, 10, 0, time.UTC) + updatedAt := time.Date(2020, 10, 1, 13, 5, 50, 0, time.UTC) gitlabPipeline := goGitlab.Pipeline{ - ID: 20, - Coverage: "25.6", - UpdatedAt: &updatedAt, - Duration: 15, - Status: "pending", + ID: 21, + Coverage: "25.6", + CreatedAt: &createdAt, + StartedAt: &startedAt, + UpdatedAt: &updatedAt, + Duration: 15, + QueuedDuration: 5, + Source: "schedule", + Status: "running", } expectedPipeline := Pipeline{ - ID: 20, - Coverage: 25.6, - Timestamp: 1.60155751e+09, - DurationSeconds: 15, - Status: "pending", + ID: 21, + Coverage: 25.6, + Timestamp: 1.60155755e+09, + DurationSeconds: 15, + QueuedDurationSeconds: 5, + Source: "schedule", + Status: "running", } - assert.Equal(t, expectedPipeline, NewPipeline(gitlabPipeline)) + assert.Equal(t, expectedPipeline, NewPipeline(context.Background(), gitlabPipeline)) +} + +func TestNewTestReport(t *testing.T) { + gitlabTestReport := goGitlab.PipelineTestReport{ + TotalTime: 10, + TotalCount: 2, + SuccessCount: 1, + FailedCount: 1, + SkippedCount: 0, + ErrorCount: 0, + TestSuites: []*goGitlab.PipelineTestSuites{ + { + Name: "First", + TotalTime: 3, + TotalCount: 1, + SuccessCount: 1, + FailedCount: 0, + SkippedCount: 0, + ErrorCount: 0, + TestCases: []*goGitlab.PipelineTestCases{ + { + Name: "First", + Classname: "ClassFirst", + ExecutionTime: 4, + Status: "success", + }, + }, + }, + { + Name: "Second", + TotalTime: 2, + TotalCount: 1, + SuccessCount: 0, + FailedCount: 1, + SkippedCount: 0, + ErrorCount: 0, + TestCases: []*goGitlab.PipelineTestCases{ + { + Name: "First", + Classname: "ClassFirst", + ExecutionTime: 4, + Status: "success", + }, + }, + }, + }, + } + + expectedTestReport := TestReport{ + TotalTime: 10, + TotalCount: 2, + SuccessCount: 1, + FailedCount: 1, + SkippedCount: 0, + ErrorCount: 0, + TestSuites: []TestSuite{ + { + Name: "First", + TotalTime: 3, + TotalCount: 1, + SuccessCount: 1, + FailedCount: 0, + SkippedCount: 0, + ErrorCount: 0, + TestCases: []TestCase{ + { + Name: "First", + Classname: "ClassFirst", + ExecutionTime: 4, + Status: "success", + }, + }, + }, + { + Name: "Second", + TotalTime: 2, + TotalCount: 1, + SuccessCount: 0, + FailedCount: 1, + SkippedCount: 0, + ErrorCount: 0, + TestCases: []TestCase{ + { + Name: "First", + Classname: "ClassFirst", + ExecutionTime: 4, + Status: "success", + }, + }, + }, + }, + } + assert.Equal(t, expectedTestReport, NewTestReport(gitlabTestReport)) +} + +func TestNewTestSuite(t *testing.T) { + gitlabTestSuite := &goGitlab.PipelineTestSuites{ + Name: "Suite", + TotalTime: 4, + TotalCount: 6, + SuccessCount: 2, + FailedCount: 2, + SkippedCount: 1, + ErrorCount: 1, + TestCases: []*goGitlab.PipelineTestCases{ + { + Name: "First", + Classname: "ClassFirst", + ExecutionTime: 4, + Status: "success", + }, + }, + } + + expectedTestSuite := TestSuite{ + Name: "Suite", + TotalTime: 4, + TotalCount: 6, + SuccessCount: 2, + FailedCount: 2, + SkippedCount: 1, + ErrorCount: 1, + TestCases: []TestCase{ + { + Name: "First", + Classname: "ClassFirst", + ExecutionTime: 4, + Status: "success", + }, + }, + } + assert.Equal(t, expectedTestSuite, NewTestSuite(gitlabTestSuite)) } diff --git a/pkg/schemas/project.go b/pkg/schemas/project.go deleted file mode 100644 index 65e11ed1..00000000 --- a/pkg/schemas/project.go +++ /dev/null @@ -1,355 +0,0 @@ -package schemas - -import ( - "hash/crc32" - "strconv" -) - -var ( - defaultProjectOutputSparseStatusMetrics = true - defaultProjectPullEnvironmentsEnabled = false - defaultProjectPullEnvironmentsNameRegexp = `.*` - defaultProjectPullEnvironmentsTagsRegexp = `.*` - defaultProjectPullRefsRegexp = `^(main|master)$` - defaultProjectPullRefsMaxAgeSeconds uint = 0 - defaultProjectPullRefsFromPipelinesEnabled = false - defaultProjectPullRefsFromPipelinesDepth = 100 - defaultProjectPullRefsFromMergeRequestsEnabled = false - defaultProjectPullRefsFromMergeRequestsDepth = 1 - defaultProjectPullPipelineJobsEnabled = false - defaultProjectPullPipelineJobsFromChildPipelinesEnabled = true - defaultProjectPullPipelineJobsRunnerDescriptionEnabled = true - defaultProjectPullPipelineJobsRunnerDescriptionAggregationRegexp = `shared-runners-manager-(\d*)\.gitlab\.com` - defaultProjectPullPipelineVariablesEnabled = false - defaultProjectPullPipelineVariablesRegexp = `.*` -) - -// ProjectParameters for the fetching configuration of Projects and Wildcards -type ProjectParameters struct { - // From handles ProjectPullParameters configuration - Pull ProjectPull `yaml:"pull"` - - // Whether or not to export all pipeline/job statuses (being 0) or solely the one of the last job (being 1). - OutputSparseStatusMetricsValue *bool `yaml:"output_sparse_status_metrics"` -} - -// ProjectPull .. -type ProjectPull struct { - Environments ProjectPullEnvironments `yaml:"environments"` - Refs ProjectPullRefs `yaml:"refs"` - Pipeline ProjectPullPipeline `yaml:"pipeline"` -} - -// ProjectPullEnvironments .. -type ProjectPullEnvironments struct { - // Whether to pull environments/deployments or not for this project - EnabledValue *bool `yaml:"enabled"` - - // Regular expression to filter environments to fetch by their names (defaults to '^prod') - NameRegexpValue *string `yaml:"name_regexp"` - - // Regular expression to filter out commit id to consider when deployments are based upon tags (defaults to '.*') - TagsRegexpValue *string `yaml:"tags_regexp"` -} - -// ProjectPullRefs .. -type ProjectPullRefs struct { - // Regular expression to filter refs to fetch (defaults to '.*') - RegexpValue *string `yaml:"regexp"` - - // If the age of the most recent pipeline for the ref is greater than this value, the ref won't get exported - MaxAgeSecondsValue *uint `yaml:"max_age_seconds"` - - // From handles ProjectPullRefsFromParameters configuration - From ProjectPullRefsFrom `yaml:"from"` -} - -// ProjectPullRefsFrom .. -type ProjectPullRefsFrom struct { - // Pipelines defines whether or not to fetch refs from historical pipelines - Pipelines ProjectPullRefsFromPipelines `yaml:"pipelines"` - - // MergeRequests defines whether or not to fetch refs from merge requests - MergeRequests ProjectPullRefsFromMergeRequests `yaml:"merge_requests"` -} - -// ProjectPullRefsFromParameters .. -type ProjectPullRefsFromParameters struct { - EnabledValue *bool `yaml:"enabled"` - DepthValue *int `yaml:"depth"` -} - -// ProjectPullRefsFromPipelines .. -type ProjectPullRefsFromPipelines ProjectPullRefsFromParameters - -// ProjectPullRefsFromMergeRequests .. -type ProjectPullRefsFromMergeRequests ProjectPullRefsFromParameters - -// ProjectPullPipeline .. -type ProjectPullPipeline struct { - Jobs ProjectPullPipelineJobs `yaml:"jobs"` - Variables ProjectPullPipelineVariables `yaml:"variables"` -} - -// ProjectPullPipelineJobs .. -type ProjectPullPipelineJobs struct { - // Enabled set to true will pull pipeline jobs related metrics - EnabledValue *bool `yaml:"enabled"` - - // Pull pipeline jobs from child/downstream pipelines - FromChildPipelines ProjectPullPipelineJobsFromChildPipelines `yaml:"from_child_pipelines"` - - // Configure the export of the runner description which ran the job - RunnerDescription ProjectPullPipelineJobsRunnerDescription `yaml:"runner_description"` -} - -// ProjectPullPipelineJobsFromChildPipelines .. -type ProjectPullPipelineJobsFromChildPipelines struct { - // Enabled set to true will pull pipeline jobs from child/downstream pipelines related metrics - EnabledValue *bool `yaml:"enabled"` -} - -// ProjectPullPipelineJobsRunnerDescription .. -type ProjectPullPipelineJobsRunnerDescription struct { - // Enabled set to true will export the description of the runner which ran the job - EnabledValue *bool `yaml:"enabled"` - - // Regular expression to be able to reduce the cardinality of the exported value when necessary - AggregationRegexpValue *string `yaml:"aggregation_regexp"` -} - -// ProjectPullPipelineVariables .. -type ProjectPullPipelineVariables struct { - // Enabled set to true will attempt to retrieve variables included in the pipeline - EnabledValue *bool `yaml:"enabled"` - - // Regexp to filter pipeline variables values to fetch (defaults to '.*') - RegexpValue *string `yaml:"regexp"` -} - -// UpdateProjectDefaults .. -func UpdateProjectDefaults(d ProjectParameters) { - if d.Pull.Environments.EnabledValue != nil { - defaultProjectPullEnvironmentsEnabled = *d.Pull.Environments.EnabledValue - } - - if d.Pull.Environments.NameRegexpValue != nil { - defaultProjectPullEnvironmentsNameRegexp = *d.Pull.Environments.NameRegexpValue - } - - if d.Pull.Environments.TagsRegexpValue != nil { - defaultProjectPullEnvironmentsTagsRegexp = *d.Pull.Environments.TagsRegexpValue - } - - if d.Pull.Refs.RegexpValue != nil { - defaultProjectPullRefsRegexp = *d.Pull.Refs.RegexpValue - } - - if d.Pull.Refs.MaxAgeSecondsValue != nil { - defaultProjectPullRefsMaxAgeSeconds = *d.Pull.Refs.MaxAgeSecondsValue - } - - if d.Pull.Refs.From.Pipelines.EnabledValue != nil { - defaultProjectPullRefsFromPipelinesEnabled = *d.Pull.Refs.From.Pipelines.EnabledValue - } - - if d.Pull.Refs.From.Pipelines.DepthValue != nil { - defaultProjectPullRefsFromPipelinesDepth = *d.Pull.Refs.From.Pipelines.DepthValue - } - - if d.Pull.Refs.From.MergeRequests.EnabledValue != nil { - defaultProjectPullRefsFromMergeRequestsEnabled = *d.Pull.Refs.From.MergeRequests.EnabledValue - } - - if d.Pull.Refs.From.MergeRequests.DepthValue != nil { - defaultProjectPullRefsFromMergeRequestsDepth = *d.Pull.Refs.From.MergeRequests.DepthValue - } - - if d.Pull.Pipeline.Jobs.EnabledValue != nil { - defaultProjectPullPipelineJobsEnabled = *d.Pull.Pipeline.Jobs.EnabledValue - } - - if d.Pull.Pipeline.Jobs.FromChildPipelines.EnabledValue != nil { - defaultProjectPullPipelineJobsFromChildPipelinesEnabled = *d.Pull.Pipeline.Jobs.FromChildPipelines.EnabledValue - } - - if d.Pull.Pipeline.Jobs.RunnerDescription.EnabledValue != nil { - defaultProjectPullPipelineJobsRunnerDescriptionEnabled = *d.Pull.Pipeline.Jobs.RunnerDescription.EnabledValue - } - - if d.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexpValue != nil { - defaultProjectPullPipelineJobsRunnerDescriptionAggregationRegexp = *d.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexpValue - } - - if d.Pull.Pipeline.Variables.EnabledValue != nil { - defaultProjectPullPipelineVariablesEnabled = *d.Pull.Pipeline.Variables.EnabledValue - } - - if d.Pull.Pipeline.Variables.RegexpValue != nil { - defaultProjectPullPipelineVariablesRegexp = *d.Pull.Pipeline.Variables.RegexpValue - } -} - -// Project holds information about a GitLab project -type Project struct { - // ProjectParameters holds parameters specific to this project - ProjectParameters `yaml:",inline"` - - // Name is actually what is commonly referred as path_with_namespace on GitLab - Name string `yaml:"name"` -} - -// ProjectKey .. -type ProjectKey string - -// Key .. -func (p Project) Key() ProjectKey { - return ProjectKey(strconv.Itoa(int(crc32.ChecksumIEEE([]byte(p.Name))))) -} - -// Projects .. -type Projects map[ProjectKey]Project - -// OutputSparseStatusMetrics ... -func (p *ProjectParameters) OutputSparseStatusMetrics() bool { - if p.OutputSparseStatusMetricsValue != nil { - return *p.OutputSparseStatusMetricsValue - } - - return defaultProjectOutputSparseStatusMetrics -} - -// Enabled ... -func (p *ProjectPullEnvironments) Enabled() bool { - if p.EnabledValue != nil { - return *p.EnabledValue - } - - return defaultProjectPullEnvironmentsEnabled -} - -// NameRegexp ... -func (p *ProjectPullEnvironments) NameRegexp() string { - if p.NameRegexpValue != nil { - return *p.NameRegexpValue - } - - return defaultProjectPullEnvironmentsNameRegexp -} - -// TagsRegexp ... -func (p *ProjectPullEnvironments) TagsRegexp() string { - if p.TagsRegexpValue != nil { - return *p.TagsRegexpValue - } - - return defaultProjectPullEnvironmentsTagsRegexp -} - -// Regexp ... -func (p *ProjectPullRefs) Regexp() string { - if p.RegexpValue != nil { - return *p.RegexpValue - } - - return defaultProjectPullRefsRegexp -} - -// MaxAgeSeconds ... -func (p *ProjectPullRefs) MaxAgeSeconds() uint { - if p.MaxAgeSecondsValue != nil { - return *p.MaxAgeSecondsValue - } - - return defaultProjectPullRefsMaxAgeSeconds -} - -// Enabled ... -func (p *ProjectPullRefsFromPipelines) Enabled() bool { - if p.EnabledValue != nil { - return *p.EnabledValue - } - - return defaultProjectPullRefsFromPipelinesEnabled -} - -// Depth ... -func (p *ProjectPullRefsFromPipelines) Depth() int { - if p.DepthValue != nil { - return *p.DepthValue - } - - return defaultProjectPullRefsFromPipelinesDepth -} - -// Enabled ... -func (p *ProjectPullRefsFromMergeRequests) Enabled() bool { - if p.EnabledValue != nil { - return *p.EnabledValue - } - - return defaultProjectPullRefsFromMergeRequestsEnabled -} - -// Depth ... -func (p *ProjectPullRefsFromMergeRequests) Depth() int { - if p.DepthValue != nil { - return *p.DepthValue - } - - return defaultProjectPullRefsFromMergeRequestsDepth -} - -// Enabled ... -func (p *ProjectPullPipelineJobs) Enabled() bool { - if p.EnabledValue != nil { - return *p.EnabledValue - } - - return defaultProjectPullPipelineJobsEnabled -} - -// Enabled ... -func (p *ProjectPullPipelineJobsFromChildPipelines) Enabled() bool { - if p.EnabledValue != nil { - return *p.EnabledValue - } - - return defaultProjectPullPipelineJobsFromChildPipelinesEnabled -} - -// Enabled ... -func (p *ProjectPullPipelineJobsRunnerDescription) Enabled() bool { - if p.EnabledValue != nil { - return *p.EnabledValue - } - - return defaultProjectPullPipelineJobsRunnerDescriptionEnabled -} - -// AggregationRegexp ... -func (p *ProjectPullPipelineJobsRunnerDescription) AggregationRegexp() string { - if p.AggregationRegexpValue != nil { - return *p.AggregationRegexpValue - } - - return defaultProjectPullPipelineJobsRunnerDescriptionAggregationRegexp -} - -// Enabled ... -func (p *ProjectPullPipelineVariables) Enabled() bool { - if p.EnabledValue != nil { - return *p.EnabledValue - } - - return defaultProjectPullPipelineVariablesEnabled -} - -// Regexp ... -func (p *ProjectPullPipelineVariables) Regexp() string { - if p.RegexpValue != nil { - return *p.RegexpValue - } - - return defaultProjectPullPipelineVariablesRegexp -} diff --git a/pkg/schemas/project_test.go b/pkg/schemas/project_test.go deleted file mode 100644 index a7cea5ec..00000000 --- a/pkg/schemas/project_test.go +++ /dev/null @@ -1,218 +0,0 @@ -package schemas - -import ( - "testing" - - "github.com/openlyinc/pointy" - "github.com/stretchr/testify/assert" -) - -func TestProjectKey(t *testing.T) { - p := Project{ - Name: "foo", - } - - assert.Equal(t, ProjectKey("2356372769"), p.Key()) -} - -func NewTestProjectVariables() (cfg *Config, project *Project) { - cfg = &Config{} - - project = &Project{ - Name: "foo", - } - - return -} - -func TestOutputSparseStatusMetrics(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectOutputSparseStatusMetrics, project.OutputSparseStatusMetrics()) - - cfg.ProjectDefaults.OutputSparseStatusMetricsValue = pointy.Bool(!defaultProjectOutputSparseStatusMetrics) - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, defaultProjectOutputSparseStatusMetrics, project.OutputSparseStatusMetrics()) - - project.OutputSparseStatusMetricsValue = pointy.Bool(defaultProjectOutputSparseStatusMetrics) - assert.Equal(t, defaultProjectOutputSparseStatusMetrics, project.OutputSparseStatusMetrics()) -} - -func TestPullEnvironmentsFromProjectsEnabled(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullEnvironmentsEnabled, project.Pull.Environments.Enabled()) - - cfg.ProjectDefaults.Pull.Environments.EnabledValue = pointy.Bool(!defaultProjectPullEnvironmentsEnabled) - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, defaultProjectPullEnvironmentsEnabled, project.Pull.Environments.Enabled()) - - project.Pull.Environments.EnabledValue = pointy.Bool(defaultProjectPullEnvironmentsEnabled) - assert.Equal(t, defaultProjectPullEnvironmentsEnabled, project.Pull.Environments.Enabled()) -} - -func TestPullEnvironmentsFromProjectsNameRegexp(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullEnvironmentsNameRegexp, project.Pull.Environments.NameRegexp()) - - cfg.ProjectDefaults.Pull.Environments.NameRegexpValue = pointy.String("foo") - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, "foo", project.Pull.Environments.NameRegexp()) - - project.Pull.Environments.NameRegexpValue = pointy.String("bar") - assert.Equal(t, "bar", project.Pull.Environments.NameRegexp()) -} - -func TestPullEnvironmentsFromProjectsTagsRegexp(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullEnvironmentsTagsRegexp, project.Pull.Environments.TagsRegexp()) - - cfg.ProjectDefaults.Pull.Environments.TagsRegexpValue = pointy.String("foo") - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, "foo", project.Pull.Environments.TagsRegexp()) - - project.Pull.Environments.TagsRegexpValue = pointy.String("bar") - assert.Equal(t, "bar", project.Pull.Environments.TagsRegexp()) -} - -func TestPullRefsRegexp(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullRefsRegexp, project.Pull.Refs.Regexp()) - - cfg.ProjectDefaults.Pull.Refs.RegexpValue = pointy.String("foo") - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, "foo", project.Pull.Refs.Regexp()) - - project.Pull.Refs.RegexpValue = pointy.String("bar") - assert.Equal(t, "bar", project.Pull.Refs.Regexp()) -} - -func TestPullRefsMaxAgeSeconds(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullRefsMaxAgeSeconds, project.Pull.Refs.MaxAgeSeconds()) - - cfg.ProjectDefaults.Pull.Refs.MaxAgeSecondsValue = pointy.Uint(1) - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, uint(1), project.Pull.Refs.MaxAgeSeconds()) - - project.Pull.Refs.MaxAgeSecondsValue = pointy.Uint(2) - assert.Equal(t, uint(2), project.Pull.Refs.MaxAgeSeconds()) -} - -func TestPullRefsFromPipelinesEnabled(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullRefsFromPipelinesEnabled, project.Pull.Refs.From.Pipelines.Enabled()) - - cfg.ProjectDefaults.Pull.Refs.From.Pipelines.EnabledValue = pointy.Bool(!defaultProjectPullRefsFromPipelinesEnabled) - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, defaultProjectPullRefsFromPipelinesEnabled, project.Pull.Refs.From.Pipelines.Enabled()) - - project.Pull.Refs.From.Pipelines.EnabledValue = pointy.Bool(defaultProjectPullRefsFromPipelinesEnabled) - assert.Equal(t, defaultProjectPullRefsFromPipelinesEnabled, project.Pull.Refs.From.Pipelines.Enabled()) -} - -func TestPullRefsFromPipelinesDepth(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullRefsFromPipelinesDepth, project.Pull.Refs.From.Pipelines.Depth()) - - cfg.ProjectDefaults.Pull.Refs.From.Pipelines.DepthValue = pointy.Int(1) - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, 1, project.Pull.Refs.From.Pipelines.Depth()) - - project.Pull.Refs.From.Pipelines.DepthValue = pointy.Int(2) - assert.Equal(t, 2, project.Pull.Refs.From.Pipelines.Depth()) -} - -func TestPullRefsFromMergeRequestsEnabled(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullRefsFromMergeRequestsEnabled, project.Pull.Refs.From.MergeRequests.Enabled()) - - cfg.ProjectDefaults.Pull.Refs.From.MergeRequests.EnabledValue = pointy.Bool(!defaultProjectPullRefsFromMergeRequestsEnabled) - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, defaultProjectPullRefsFromMergeRequestsEnabled, project.Pull.Refs.From.MergeRequests.Enabled()) - - project.Pull.Refs.From.MergeRequests.EnabledValue = pointy.Bool(defaultProjectPullRefsFromPipelinesEnabled) - assert.Equal(t, defaultProjectPullRefsFromMergeRequestsEnabled, project.Pull.Refs.From.MergeRequests.Enabled()) -} - -func TestPullRefsFromMergeRequestsDepth(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullRefsFromMergeRequestsDepth, project.Pull.Refs.From.MergeRequests.Depth()) - - cfg.ProjectDefaults.Pull.Refs.From.MergeRequests.DepthValue = pointy.Int(1) - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, 1, project.Pull.Refs.From.MergeRequests.Depth()) - - project.Pull.Refs.From.MergeRequests.DepthValue = pointy.Int(2) - assert.Equal(t, 2, project.Pull.Refs.From.MergeRequests.Depth()) -} - -func TestPullPipelineJobsEnabled(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullPipelineJobsEnabled, project.Pull.Pipeline.Jobs.Enabled()) - - cfg.ProjectDefaults.Pull.Pipeline.Jobs.EnabledValue = pointy.Bool(!defaultProjectPullPipelineJobsEnabled) - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, defaultProjectPullPipelineJobsEnabled, project.Pull.Pipeline.Jobs.Enabled()) - - project.Pull.Pipeline.Jobs.EnabledValue = pointy.Bool(defaultProjectPullPipelineJobsEnabled) - assert.Equal(t, defaultProjectPullPipelineJobsEnabled, project.Pull.Pipeline.Jobs.Enabled()) -} - -func TestPullPipelineJobsFromChildPipelinesEnabled(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullPipelineJobsFromChildPipelinesEnabled, project.Pull.Pipeline.Jobs.FromChildPipelines.Enabled()) - - cfg.ProjectDefaults.Pull.Pipeline.Jobs.FromChildPipelines.EnabledValue = pointy.Bool(!defaultProjectPullPipelineJobsFromChildPipelinesEnabled) - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, defaultProjectPullPipelineJobsFromChildPipelinesEnabled, project.Pull.Pipeline.Jobs.FromChildPipelines.Enabled()) - - project.Pull.Pipeline.Jobs.FromChildPipelines.EnabledValue = pointy.Bool(defaultProjectPullPipelineJobsFromChildPipelinesEnabled) - assert.Equal(t, defaultProjectPullPipelineJobsFromChildPipelinesEnabled, project.Pull.Pipeline.Jobs.FromChildPipelines.Enabled()) -} - -func TestPullPipelineJobsRunnerDescriptionEnabled(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullPipelineJobsRunnerDescriptionEnabled, project.Pull.Pipeline.Jobs.RunnerDescription.Enabled()) - - cfg.ProjectDefaults.Pull.Pipeline.Jobs.RunnerDescription.EnabledValue = pointy.Bool(!defaultProjectPullPipelineJobsRunnerDescriptionEnabled) - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, defaultProjectPullPipelineJobsRunnerDescriptionEnabled, project.Pull.Pipeline.Jobs.RunnerDescription.Enabled()) - - project.Pull.Pipeline.Jobs.RunnerDescription.EnabledValue = pointy.Bool(defaultProjectPullPipelineJobsRunnerDescriptionEnabled) - assert.Equal(t, defaultProjectPullPipelineJobsRunnerDescriptionEnabled, project.Pull.Pipeline.Jobs.RunnerDescription.Enabled()) -} - -func TestPullPipelineJobsRunnerDescriptionAggregationRegexp(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullPipelineJobsRunnerDescriptionAggregationRegexp, project.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexp()) - - cfg.ProjectDefaults.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexpValue = pointy.String("foo") - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, "foo", project.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexp()) - - project.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexpValue = pointy.String("bar") - assert.Equal(t, "bar", project.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexp()) -} - -func TestPullPipelineVariablesEnabled(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullPipelineVariablesEnabled, project.Pull.Pipeline.Variables.Enabled()) - - cfg.ProjectDefaults.Pull.Pipeline.Variables.EnabledValue = pointy.Bool(!defaultProjectPullPipelineVariablesEnabled) - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, defaultProjectPullPipelineVariablesEnabled, project.Pull.Pipeline.Variables.Enabled()) - - project.Pull.Pipeline.Variables.EnabledValue = pointy.Bool(defaultProjectPullPipelineVariablesEnabled) - assert.Equal(t, defaultProjectPullPipelineVariablesEnabled, project.Pull.Pipeline.Variables.Enabled()) -} - -func TestPullPipelineVariablesRegexp(t *testing.T) { - cfg, project := NewTestProjectVariables() - assert.Equal(t, defaultProjectPullPipelineVariablesRegexp, project.Pull.Pipeline.Variables.Regexp()) - - cfg.ProjectDefaults.Pull.Pipeline.Variables.RegexpValue = pointy.String("foo") - UpdateProjectDefaults(cfg.ProjectDefaults) - assert.Equal(t, "foo", project.Pull.Pipeline.Variables.Regexp()) - - project.Pull.Pipeline.Variables.RegexpValue = pointy.String("bar") - assert.Equal(t, "bar", project.Pull.Pipeline.Variables.Regexp()) -} diff --git a/pkg/schemas/projects.go b/pkg/schemas/projects.go new file mode 100644 index 00000000..66e9bd67 --- /dev/null +++ b/pkg/schemas/projects.go @@ -0,0 +1,31 @@ +package schemas + +import ( + "hash/crc32" + "strconv" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" +) + +// Project .. +type Project struct { + config.Project + + Topics string +} + +// ProjectKey .. +type ProjectKey string + +// Projects .. +type Projects map[ProjectKey]Project + +// Key .. +func (p Project) Key() ProjectKey { + return ProjectKey(strconv.Itoa(int(crc32.ChecksumIEEE([]byte(p.Name))))) +} + +// NewProject .. +func NewProject(name string) Project { + return Project{Project: config.NewProject(name)} +} diff --git a/pkg/schemas/projects_test.go b/pkg/schemas/projects_test.go new file mode 100644 index 00000000..f75b0ff4 --- /dev/null +++ b/pkg/schemas/projects_test.go @@ -0,0 +1,11 @@ +package schemas + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestProjectKey(t *testing.T) { + assert.Equal(t, ProjectKey("2356372769"), NewProject("foo").Key()) +} diff --git a/pkg/schemas/ref.go b/pkg/schemas/ref.go index abcee40b..8f403a86 100644 --- a/pkg/schemas/ref.go +++ b/pkg/schemas/ref.go @@ -1,41 +1,38 @@ package schemas import ( + "fmt" "hash/crc32" + "regexp" "strconv" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" ) const ( - // RefKindBranch refers to a branch + mergeRequestRegexp string = `^((\d+)|refs/merge-requests/(\d+)/head)$` + + // RefKindBranch refers to a branch. RefKindBranch RefKind = "branch" - // RefKindTag refers to a tag + // RefKindTag refers to a tag. RefKindTag RefKind = "tag" - // RefKindMergeRequest refers to a tag + // RefKindMergeRequest refers to a tag. RefKindMergeRequest RefKind = "merge-request" ) -// RefKind is used to determine the kind of the ref +// RefKind is used to determine the kind of the ref. type RefKind string // Ref is what we will use a metrics entity on which we will -// perform regular pulling operations +// perform regular pulling operations. type Ref struct { Kind RefKind - ProjectName string Name string - Topics string + Project Project LatestPipeline Pipeline LatestJobs Jobs - - OutputSparseStatusMetrics bool - PullPipelineJobsEnabled bool - PullPipelineJobsFromChildPipelinesEnabled bool - PullPipelineJobsRunnerDescriptionEnabled bool - PullPipelineJobsRunnerDescriptionAggregationRegexp string - PullPipelineVariablesEnabled bool - PullPipelineVariablesRegexp string } // RefKey .. @@ -43,14 +40,14 @@ type RefKey string // Key .. func (ref Ref) Key() RefKey { - return RefKey(strconv.Itoa(int(crc32.ChecksumIEEE([]byte(string(ref.Kind) + ref.ProjectName + ref.Name))))) + return RefKey(strconv.Itoa(int(crc32.ChecksumIEEE([]byte(string(ref.Kind) + ref.Project.Name + ref.Name))))) } // Refs allows us to keep track of all the Ref -// we have configured/discovered +// we have configured/discovered. type Refs map[RefKey]Ref -// Count returns the amount of projects refs in the map +// Count returns the amount of projects refs in the map. func (refs Refs) Count() int { return len(refs) } @@ -59,33 +56,54 @@ func (refs Refs) Count() int { func (ref Ref) DefaultLabelsValues() map[string]string { return map[string]string{ "kind": string(ref.Kind), - "project": ref.ProjectName, + "project": ref.Project.Name, "ref": ref.Name, - "topics": ref.Topics, + "topics": ref.Project.Topics, "variables": ref.LatestPipeline.Variables, + "source": ref.LatestPipeline.Source, } } -// NewRef is an helper which returns a new Ref pointer +// NewRef is an helper which returns a new Ref. func NewRef( + project Project, kind RefKind, - projectName, name, topics string, - outputSparseStatusMetrics, pullPipelineJobsEnabled, pullPipelineJobsFromChildPipelinesEnabled, pullPipelineJobsRunnerDescriptionEnabled, pullPipelineVariablesEnabled bool, - pullPipelineVariablesRegexp, pullPipelineJobsRunnerDescriptionAggregationRegexp string, + name string, ) Ref { return Ref{ - Kind: kind, - ProjectName: projectName, - Name: name, - Topics: topics, - LatestJobs: make(Jobs), - - OutputSparseStatusMetrics: outputSparseStatusMetrics, - PullPipelineJobsEnabled: pullPipelineJobsEnabled, - PullPipelineJobsFromChildPipelinesEnabled: pullPipelineJobsFromChildPipelinesEnabled, - PullPipelineJobsRunnerDescriptionEnabled: pullPipelineJobsRunnerDescriptionEnabled, - PullPipelineJobsRunnerDescriptionAggregationRegexp: pullPipelineJobsRunnerDescriptionAggregationRegexp, - PullPipelineVariablesEnabled: pullPipelineVariablesEnabled, - PullPipelineVariablesRegexp: pullPipelineVariablesRegexp, + Kind: kind, + Name: name, + Project: project, + LatestJobs: make(Jobs), + } +} + +// GetRefRegexp returns the expected regexp given a ProjectPullRefs config and a RefKind. +func GetRefRegexp(ppr config.ProjectPullRefs, rk RefKind) (re *regexp.Regexp, err error) { + switch rk { + case RefKindBranch: + return regexp.Compile(ppr.Branches.Regexp) + case RefKindTag: + return regexp.Compile(ppr.Tags.Regexp) + case RefKindMergeRequest: + return regexp.Compile(mergeRequestRegexp) } + + return nil, fmt.Errorf("invalid ref kind (%v)", rk) +} + +// GetMergeRequestIIDFromRefName parse a refName to extract a merge request IID. +func GetMergeRequestIIDFromRefName(refName string) (string, error) { + re := regexp.MustCompile(mergeRequestRegexp) + if matches := re.FindStringSubmatch(refName); len(matches) == 4 { + if len(matches[2]) > 0 { + return matches[2], nil + } + + if len(matches[3]) > 0 { + return matches[3], nil + } + } + + return refName, fmt.Errorf("unable to extract the merge-request ID from the ref (%s)", refName) } diff --git a/pkg/schemas/ref_test.go b/pkg/schemas/ref_test.go index 2498053b..6607b938 100644 --- a/pkg/schemas/ref_test.go +++ b/pkg/schemas/ref_test.go @@ -7,13 +7,11 @@ import ( ) func TestRefKey(t *testing.T) { - ref := Ref{ - Kind: RefKindBranch, - ProjectName: "foo/bar", - Name: "baz", - } - - assert.Equal(t, RefKey("1690074537"), ref.Key()) + assert.Equal(t, RefKey("1690074537"), NewRef( + NewProject("foo/bar"), + RefKindBranch, + "baz", + ).Key()) } func TestRefsCount(t *testing.T) { @@ -24,13 +22,15 @@ func TestRefsCount(t *testing.T) { } func TestRefDefaultLabelsValues(t *testing.T) { + p := NewProject("foo/bar") + p.Topics = "amazing,project" ref := Ref{ - Kind: RefKindBranch, - ProjectName: "foo/bar", - Name: "feature", - Topics: "amazing,project", + Project: p, + Kind: RefKindBranch, + Name: "feature", LatestPipeline: Pipeline{ Variables: "blah", + Source: "schedule", }, LatestJobs: make(Jobs), } @@ -41,39 +41,51 @@ func TestRefDefaultLabelsValues(t *testing.T) { "ref": "feature", "topics": "amazing,project", "variables": "blah", + "source": "schedule", } assert.Equal(t, expectedValue, ref.DefaultLabelsValues()) } func TestNewRef(t *testing.T) { - expectedValue := Ref{ - Kind: RefKindTag, - ProjectName: "foo/bar", - Name: "v0.0.7", - Topics: "bar,baz", - LatestJobs: make(Jobs), + p := NewProject("foo/bar") + p.Topics = "bar,baz" + p.OutputSparseStatusMetrics = false + p.Pull.Pipeline.Jobs.Enabled = true + p.Pull.Pipeline.Jobs.FromChildPipelines.Enabled = false + p.Pull.Pipeline.Jobs.RunnerDescription.Enabled = false + p.Pull.Pipeline.Variables.Enabled = true + p.Pull.Pipeline.Variables.Regexp = `.*` + p.Pull.Pipeline.Jobs.RunnerDescription.AggregationRegexp = `.*` - OutputSparseStatusMetrics: true, - PullPipelineJobsEnabled: true, - PullPipelineJobsFromChildPipelinesEnabled: false, - PullPipelineJobsRunnerDescriptionEnabled: false, - PullPipelineVariablesEnabled: true, - PullPipelineVariablesRegexp: ".*", - PullPipelineJobsRunnerDescriptionAggregationRegexp: ".*", + expectedValue := Ref{ + Project: p, + Kind: RefKindTag, + Name: "v0.0.7", + LatestJobs: make(Jobs), } assert.Equal(t, expectedValue, NewRef( + p, RefKindTag, - "foo/bar", "v0.0.7", - "bar,baz", - true, - true, - false, - false, - true, - ".*", - ".*", )) } + +func TestGetMergeRequestIIDFromRefName(t *testing.T) { + name, err := GetMergeRequestIIDFromRefName("1234") + assert.NoError(t, err) + assert.Equal(t, "1234", name) + + name, err = GetMergeRequestIIDFromRefName("refs/merge-requests/5678/head") + assert.NoError(t, err) + assert.Equal(t, "5678", name) + + name, err = GetMergeRequestIIDFromRefName("20.0.1") + assert.Error(t, err) + assert.Equal(t, "20.0.1", name) + + name, err = GetMergeRequestIIDFromRefName("x") + assert.Error(t, err) + assert.Equal(t, "x", name) +} diff --git a/pkg/schemas/tasks.go b/pkg/schemas/tasks.go new file mode 100644 index 00000000..1be9232c --- /dev/null +++ b/pkg/schemas/tasks.go @@ -0,0 +1,51 @@ +package schemas + +// TaskType represents the type of a task. +type TaskType string + +const ( + // TaskTypePullProject .. + TaskTypePullProject TaskType = "PullProject" + + // TaskTypePullProjectsFromWildcard .. + TaskTypePullProjectsFromWildcard TaskType = "PullProjectsFromWildcard" + + // TaskTypePullProjectsFromWildcards .. + TaskTypePullProjectsFromWildcards TaskType = "PullProjectsFromWildcards" + + // TaskTypePullEnvironmentsFromProject .. + TaskTypePullEnvironmentsFromProject TaskType = "PullEnvironmentsFromProject" + + // TaskTypePullEnvironmentsFromProjects .. + TaskTypePullEnvironmentsFromProjects TaskType = "PullEnvironmentsFromProjects" + + // TaskTypePullEnvironmentMetrics .. + TaskTypePullEnvironmentMetrics TaskType = "PullEnvironmentMetrics" + + // TaskTypePullMetrics .. + TaskTypePullMetrics TaskType = "PullMetrics" + + // TaskTypePullRefsFromProject .. + TaskTypePullRefsFromProject TaskType = "PullRefsFromProject" + + // TaskTypePullRefsFromProjects .. + TaskTypePullRefsFromProjects TaskType = "PullRefsFromProjects" + + // TaskTypePullRefMetrics .. + TaskTypePullRefMetrics TaskType = "PullRefMetrics" + + // TaskTypeGarbageCollectProjects .. + TaskTypeGarbageCollectProjects TaskType = "GarbageCollectProjects" + + // TaskTypeGarbageCollectEnvironments .. + TaskTypeGarbageCollectEnvironments TaskType = "GarbageCollectEnvironments" + + // TaskTypeGarbageCollectRefs .. + TaskTypeGarbageCollectRefs TaskType = "GarbageCollectRefs" + + // TaskTypeGarbageCollectMetrics .. + TaskTypeGarbageCollectMetrics TaskType = "GarbageCollectMetrics" +) + +// Tasks can be used to keep track of tasks. +type Tasks map[TaskType]map[string]interface{} diff --git a/pkg/schemas/tasks_test.go b/pkg/schemas/tasks_test.go new file mode 100644 index 00000000..faeaab64 --- /dev/null +++ b/pkg/schemas/tasks_test.go @@ -0,0 +1 @@ +package schemas diff --git a/pkg/schemas/wildcard_test.go b/pkg/schemas/wildcard_test.go deleted file mode 100644 index 1005a08e..00000000 --- a/pkg/schemas/wildcard_test.go +++ /dev/null @@ -1,15 +0,0 @@ -package schemas - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestWildcardKey(t *testing.T) { - w := Wildcard{ - Search: "foo", - } - - assert.Equal(t, WildcardKey("1065724787"), w.Key()) -} diff --git a/pkg/storage/local.go b/pkg/storage/local.go deleted file mode 100644 index de4c7d1f..00000000 --- a/pkg/storage/local.go +++ /dev/null @@ -1,274 +0,0 @@ -package storage - -import ( - "sync" - - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" -) - -// Local .. -type Local struct { - projects schemas.Projects - projectsMutex sync.RWMutex - - environments schemas.Environments - environmentsMutex sync.RWMutex - - refs schemas.Refs - refsMutex sync.RWMutex - - metrics schemas.Metrics - metricsMutex sync.RWMutex -} - -// SetProject .. -func (l *Local) SetProject(p schemas.Project) error { - l.projectsMutex.Lock() - defer l.projectsMutex.Unlock() - - l.projects[p.Key()] = p - return nil -} - -// DelProject .. -func (l *Local) DelProject(k schemas.ProjectKey) error { - l.projectsMutex.Lock() - defer l.projectsMutex.Unlock() - - delete(l.projects, k) - return nil -} - -// GetProject .. -func (l *Local) GetProject(p *schemas.Project) error { - exists, err := l.ProjectExists(p.Key()) - if err != nil { - return err - } - - if exists { - l.projectsMutex.RLock() - *p = l.projects[p.Key()] - l.projectsMutex.RUnlock() - } - - return nil -} - -// ProjectExists .. -func (l *Local) ProjectExists(k schemas.ProjectKey) (bool, error) { - l.projectsMutex.RLock() - defer l.projectsMutex.RUnlock() - - _, ok := l.projects[k] - return ok, nil -} - -// Projects .. -func (l *Local) Projects() (projects schemas.Projects, err error) { - projects = make(schemas.Projects) - l.projectsMutex.RLock() - defer l.projectsMutex.RUnlock() - - for k, v := range l.projects { - projects[k] = v - } - return -} - -// ProjectsCount .. -func (l *Local) ProjectsCount() (int64, error) { - l.projectsMutex.RLock() - defer l.projectsMutex.RUnlock() - - return int64(len(l.projects)), nil -} - -// SetEnvironment .. -func (l *Local) SetEnvironment(environment schemas.Environment) error { - l.environmentsMutex.Lock() - defer l.environmentsMutex.Unlock() - - l.environments[environment.Key()] = environment - return nil -} - -// DelEnvironment .. -func (l *Local) DelEnvironment(k schemas.EnvironmentKey) error { - l.environmentsMutex.Lock() - defer l.environmentsMutex.Unlock() - - delete(l.environments, k) - return nil -} - -// GetEnvironment .. -func (l *Local) GetEnvironment(environment *schemas.Environment) error { - exists, err := l.EnvironmentExists(environment.Key()) - if err != nil { - return err - } - - if exists { - l.environmentsMutex.RLock() - *environment = l.environments[environment.Key()] - l.environmentsMutex.RUnlock() - } - - return nil -} - -// EnvironmentExists .. -func (l *Local) EnvironmentExists(k schemas.EnvironmentKey) (bool, error) { - l.environmentsMutex.RLock() - defer l.environmentsMutex.RUnlock() - - _, ok := l.environments[k] - return ok, nil -} - -// Environments .. -func (l *Local) Environments() (environments schemas.Environments, err error) { - environments = make(schemas.Environments) - l.environmentsMutex.RLock() - defer l.environmentsMutex.RUnlock() - - for k, v := range l.environments { - environments[k] = v - } - return -} - -// EnvironmentsCount .. -func (l *Local) EnvironmentsCount() (int64, error) { - l.environmentsMutex.RLock() - defer l.environmentsMutex.RUnlock() - - return int64(len(l.environments)), nil -} - -// SetRef .. -func (l *Local) SetRef(ref schemas.Ref) error { - l.refsMutex.Lock() - defer l.refsMutex.Unlock() - - l.refs[ref.Key()] = ref - return nil -} - -// DelRef .. -func (l *Local) DelRef(k schemas.RefKey) error { - l.refsMutex.Lock() - defer l.refsMutex.Unlock() - - delete(l.refs, k) - return nil -} - -// GetRef .. -func (l *Local) GetRef(ref *schemas.Ref) error { - exists, err := l.RefExists(ref.Key()) - if err != nil { - return err - } - - if exists { - l.refsMutex.RLock() - *ref = l.refs[ref.Key()] - l.refsMutex.RUnlock() - } - - return nil -} - -// RefExists .. -func (l *Local) RefExists(k schemas.RefKey) (bool, error) { - l.refsMutex.RLock() - defer l.refsMutex.RUnlock() - - _, ok := l.refs[k] - return ok, nil -} - -// Refs .. -func (l *Local) Refs() (refs schemas.Refs, err error) { - refs = make(schemas.Refs) - l.refsMutex.RLock() - defer l.refsMutex.RUnlock() - - for k, v := range l.refs { - refs[k] = v - } - return -} - -// RefsCount .. -func (l *Local) RefsCount() (int64, error) { - l.refsMutex.RLock() - defer l.refsMutex.RUnlock() - - return int64(len(l.refs)), nil -} - -// SetMetric .. -func (l *Local) SetMetric(m schemas.Metric) error { - l.metricsMutex.Lock() - defer l.metricsMutex.Unlock() - - l.metrics[m.Key()] = m - return nil -} - -// DelMetric .. -func (l *Local) DelMetric(k schemas.MetricKey) error { - l.metricsMutex.Lock() - defer l.metricsMutex.Unlock() - - delete(l.metrics, k) - return nil -} - -// GetMetric .. -func (l *Local) GetMetric(m *schemas.Metric) error { - exists, err := l.MetricExists(m.Key()) - if err != nil { - return err - } - - if exists { - l.metricsMutex.RLock() - *m = l.metrics[m.Key()] - l.metricsMutex.RUnlock() - } - - return nil -} - -// MetricExists .. -func (l *Local) MetricExists(k schemas.MetricKey) (bool, error) { - l.metricsMutex.RLock() - defer l.metricsMutex.RUnlock() - - _, ok := l.metrics[k] - return ok, nil -} - -// Metrics .. -func (l *Local) Metrics() (metrics schemas.Metrics, err error) { - metrics = make(schemas.Metrics) - l.metricsMutex.RLock() - defer l.metricsMutex.RUnlock() - - for k, v := range l.metrics { - metrics[k] = v - } - return -} - -// MetricsCount .. -func (l *Local) MetricsCount() (int64, error) { - l.metricsMutex.RLock() - defer l.metricsMutex.RUnlock() - - return int64(len(l.metrics)), nil -} diff --git a/pkg/storage/local_test.go b/pkg/storage/local_test.go deleted file mode 100644 index 3e260249..00000000 --- a/pkg/storage/local_test.go +++ /dev/null @@ -1,174 +0,0 @@ -package storage - -import ( - "testing" - - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/openlyinc/pointy" - "github.com/prometheus/client_golang/prometheus" - "github.com/stretchr/testify/assert" -) - -func TestLocalProjectFunctions(t *testing.T) { - p := schemas.Project{ - Name: "foo/bar", - ProjectParameters: schemas.ProjectParameters{ - OutputSparseStatusMetricsValue: pointy.Bool(false), - }, - } - - l := NewLocalStorage() - l.SetProject(p) - - // Set project - projects, err := l.Projects() - assert.NoError(t, err) - assert.Contains(t, projects, p.Key()) - assert.Equal(t, p, projects[p.Key()]) - - // Project exists - exists, err := l.ProjectExists(p.Key()) - assert.NoError(t, err) - assert.True(t, exists) - - // GetProject should succeed - newProject := schemas.Project{ - Name: "foo/bar", - } - assert.NoError(t, l.GetProject(&newProject)) - assert.Equal(t, p, newProject) - - // Count - count, err := l.ProjectsCount() - assert.NoError(t, err) - assert.Equal(t, int64(1), count) - - // Delete project - l.DelProject(p.Key()) - projects, err = l.Projects() - assert.NoError(t, err) - assert.NotContains(t, projects, p.Key()) - - exists, err = l.ProjectExists(p.Key()) - assert.NoError(t, err) - assert.False(t, exists) - - // GetProject should not update the var this time - newProject = schemas.Project{ - Name: "foo/bar", - } - assert.NoError(t, l.GetProject(&newProject)) - assert.NotEqual(t, p, newProject) -} - -func TestLocalEnvironmentFunctions(t *testing.T) { - environment := schemas.Environment{ - ProjectName: "foo", - ID: 1, - } - - l := NewLocalStorage() - l.SetEnvironment(environment) - - // Set project - environments, err := l.Environments() - assert.NoError(t, err) - assert.Contains(t, environments, environment.Key()) - assert.Equal(t, environment, environments[environment.Key()]) - - // Environment exists - exists, err := l.EnvironmentExists(environment.Key()) - assert.NoError(t, err) - assert.True(t, exists) - - // GetEnvironment should succeed - newEnvironment := schemas.Environment{ - ProjectName: "foo", - ID: 1, - } - assert.NoError(t, l.GetEnvironment(&newEnvironment)) - assert.Equal(t, environment, newEnvironment) - - // Count - count, err := l.EnvironmentsCount() - assert.NoError(t, err) - assert.Equal(t, int64(1), count) - - // Delete Environment - l.DelEnvironment(environment.Key()) - environments, err = l.Environments() - assert.NoError(t, err) - assert.NotContains(t, environments, environment.Key()) - - exists, err = l.EnvironmentExists(environment.Key()) - assert.NoError(t, err) - assert.False(t, exists) - - // GetEnvironment should not update the var this time - newEnvironment = schemas.Environment{ - ProjectName: "foo", - ID: 1, - ExternalURL: "foo", - } - assert.NoError(t, l.GetEnvironment(&newEnvironment)) - assert.NotEqual(t, environment, newEnvironment) -} - -func TestLocalMetricFunctions(t *testing.T) { - m := schemas.Metric{ - Kind: schemas.MetricKindCoverage, - Labels: prometheus.Labels{ - "foo": "bar", - }, - Value: 5, - } - - l := NewLocalStorage() - l.SetMetric(m) - - // Set metric - metrics, err := l.Metrics() - assert.NoError(t, err) - assert.Contains(t, metrics, m.Key()) - assert.Equal(t, m, metrics[m.Key()]) - - // Metric exists - exists, err := l.MetricExists(m.Key()) - assert.NoError(t, err) - assert.True(t, exists) - - // GetMetric should succeed - newMetric := schemas.Metric{ - Kind: schemas.MetricKindCoverage, - Labels: prometheus.Labels{ - "foo": "bar", - }, - } - assert.NoError(t, l.GetMetric(&newMetric)) - assert.Equal(t, m, newMetric) - - // Count - count, err := l.MetricsCount() - assert.NoError(t, err) - assert.Equal(t, int64(1), count) - - // Delete Metric - l.DelMetric(m.Key()) - metrics, err = l.Metrics() - assert.NoError(t, err) - assert.NotContains(t, metrics, m.Key()) - - exists, err = l.MetricExists(m.Key()) - assert.NoError(t, err) - assert.False(t, exists) - - // GetMetric should not update the var this time - newMetric = schemas.Metric{ - Kind: schemas.MetricKindCoverage, - Labels: prometheus.Labels{ - "foo": "bar", - }, - } - assert.NoError(t, l.GetMetric(&newMetric)) - assert.NotEqual(t, m, newMetric) -} diff --git a/pkg/storage/redis.go b/pkg/storage/redis.go deleted file mode 100644 index 73267168..00000000 --- a/pkg/storage/redis.go +++ /dev/null @@ -1,299 +0,0 @@ -package storage - -import ( - "context" - - "github.com/go-redis/redis/v8" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/vmihailenco/msgpack/v5" -) - -const ( - redisProjectsKey string = `projects` - redisEnvironmentsKey string = `environments` - redisRefsKey string = `refs` - redisMetricsKey string = `metrics` -) - -// Redis .. -type Redis struct { - *redis.Client - - ctx context.Context -} - -// SetProject .. -func (r *Redis) SetProject(p schemas.Project) error { - marshalledProject, err := msgpack.Marshal(p) - if err != nil { - return err - } - - _, err = r.HSet(r.ctx, redisProjectsKey, string(p.Key()), marshalledProject).Result() - return err -} - -// DelProject .. -func (r *Redis) DelProject(k schemas.ProjectKey) error { - _, err := r.HDel(r.ctx, redisProjectsKey, string(k)).Result() - return err -} - -// GetProject .. -func (r *Redis) GetProject(p *schemas.Project) error { - exists, err := r.ProjectExists(p.Key()) - if err != nil { - return err - } - - if exists { - k := p.Key() - marshalledProject, err := r.HGet(r.ctx, redisProjectsKey, string(k)).Result() - if err != nil { - return err - } - - if err = msgpack.Unmarshal([]byte(marshalledProject), p); err != nil { - return err - } - } - - return nil -} - -// ProjectExists .. -func (r *Redis) ProjectExists(k schemas.ProjectKey) (bool, error) { - return r.HExists(r.ctx, redisProjectsKey, string(k)).Result() -} - -// Projects .. -func (r *Redis) Projects() (schemas.Projects, error) { - projects := schemas.Projects{} - marshalledProjects, err := r.HGetAll(r.ctx, redisProjectsKey).Result() - if err != nil { - return projects, err - } - - for stringProjectKey, marshalledProject := range marshalledProjects { - p := schemas.Project{} - - if err = msgpack.Unmarshal([]byte(marshalledProject), &p); err != nil { - return projects, err - } - projects[schemas.ProjectKey(stringProjectKey)] = p - } - - return projects, nil -} - -// ProjectsCount .. -func (r *Redis) ProjectsCount() (int64, error) { - return r.HLen(r.ctx, redisProjectsKey).Result() -} - -// SetEnvironment .. -func (r *Redis) SetEnvironment(e schemas.Environment) error { - marshalledEnvironment, err := msgpack.Marshal(e) - if err != nil { - return err - } - - _, err = r.HSet(r.ctx, redisEnvironmentsKey, string(e.Key()), marshalledEnvironment).Result() - return err -} - -// DelEnvironment .. -func (r *Redis) DelEnvironment(k schemas.EnvironmentKey) error { - _, err := r.HDel(r.ctx, redisEnvironmentsKey, string(k)).Result() - return err -} - -// GetEnvironment .. -func (r *Redis) GetEnvironment(e *schemas.Environment) error { - exists, err := r.EnvironmentExists(e.Key()) - if err != nil { - return err - } - - if exists { - k := e.Key() - marshalledEnvironment, err := r.HGet(r.ctx, redisEnvironmentsKey, string(k)).Result() - if err != nil { - return err - } - - if err = msgpack.Unmarshal([]byte(marshalledEnvironment), e); err != nil { - return err - } - } - - return nil -} - -// EnvironmentExists .. -func (r *Redis) EnvironmentExists(k schemas.EnvironmentKey) (bool, error) { - return r.HExists(r.ctx, redisEnvironmentsKey, string(k)).Result() -} - -// Environments .. -func (r *Redis) Environments() (schemas.Environments, error) { - environments := schemas.Environments{} - marshalledProjects, err := r.HGetAll(r.ctx, redisEnvironmentsKey).Result() - if err != nil { - return environments, err - } - - for stringEnvironmentKey, marshalledEnvironment := range marshalledProjects { - p := schemas.Environment{} - - if err = msgpack.Unmarshal([]byte(marshalledEnvironment), &p); err != nil { - return environments, err - } - environments[schemas.EnvironmentKey(stringEnvironmentKey)] = p - } - - return environments, nil -} - -// EnvironmentsCount .. -func (r *Redis) EnvironmentsCount() (int64, error) { - return r.HLen(r.ctx, redisEnvironmentsKey).Result() -} - -// SetRef .. -func (r *Redis) SetRef(ref schemas.Ref) error { - marshalledRef, err := msgpack.Marshal(ref) - if err != nil { - return err - } - - _, err = r.HSet(r.ctx, redisRefsKey, string(ref.Key()), marshalledRef).Result() - return err -} - -// DelRef .. -func (r *Redis) DelRef(k schemas.RefKey) error { - _, err := r.HDel(r.ctx, redisRefsKey, string(k)).Result() - return err -} - -// GetRef .. -func (r *Redis) GetRef(ref *schemas.Ref) error { - exists, err := r.RefExists(ref.Key()) - if err != nil { - return err - } - - if exists { - k := ref.Key() - marshalledRef, err := r.HGet(r.ctx, redisRefsKey, string(k)).Result() - if err != nil { - return err - } - - if err = msgpack.Unmarshal([]byte(marshalledRef), ref); err != nil { - return err - } - } - - return nil -} - -// RefExists .. -func (r *Redis) RefExists(k schemas.RefKey) (bool, error) { - return r.HExists(r.ctx, redisRefsKey, string(k)).Result() -} - -// Refs .. -func (r *Redis) Refs() (schemas.Refs, error) { - refs := schemas.Refs{} - marshalledProjects, err := r.HGetAll(r.ctx, redisRefsKey).Result() - if err != nil { - return refs, err - } - - for stringRefKey, marshalledRef := range marshalledProjects { - p := schemas.Ref{} - - if err = msgpack.Unmarshal([]byte(marshalledRef), &p); err != nil { - return refs, err - } - refs[schemas.RefKey(stringRefKey)] = p - } - - return refs, nil -} - -// RefsCount .. -func (r *Redis) RefsCount() (int64, error) { - return r.HLen(r.ctx, redisRefsKey).Result() -} - -// SetMetric .. -func (r *Redis) SetMetric(m schemas.Metric) error { - marshalledMetric, err := msgpack.Marshal(m) - if err != nil { - return err - } - - _, err = r.HSet(r.ctx, redisMetricsKey, string(m.Key()), marshalledMetric).Result() - return err -} - -// DelMetric .. -func (r *Redis) DelMetric(k schemas.MetricKey) error { - _, err := r.HDel(r.ctx, redisMetricsKey, string(k)).Result() - return err -} - -// MetricExists .. -func (r *Redis) MetricExists(k schemas.MetricKey) (bool, error) { - return r.HExists(r.ctx, redisMetricsKey, string(k)).Result() -} - -// GetMetric .. -func (r *Redis) GetMetric(m *schemas.Metric) error { - exists, err := r.MetricExists(m.Key()) - if err != nil { - return err - } - - if exists { - k := m.Key() - marshalledMetric, err := r.HGet(r.ctx, redisMetricsKey, string(k)).Result() - if err != nil { - return err - } - - if err = msgpack.Unmarshal([]byte(marshalledMetric), m); err != nil { - return err - } - } - - return nil -} - -// Metrics .. -func (r *Redis) Metrics() (schemas.Metrics, error) { - metrics := schemas.Metrics{} - marshalledMetrics, err := r.HGetAll(r.ctx, redisMetricsKey).Result() - if err != nil { - return metrics, err - } - - for stringMetricKey, marshalledMetric := range marshalledMetrics { - m := schemas.Metric{} - - if err := msgpack.Unmarshal([]byte(marshalledMetric), &m); err != nil { - return metrics, err - } - metrics[schemas.MetricKey(stringMetricKey)] = m - } - - return metrics, nil -} - -// MetricsCount .. -func (r *Redis) MetricsCount() (int64, error) { - return r.HLen(r.ctx, redisMetricsKey).Result() -} diff --git a/pkg/storage/redis_test.go b/pkg/storage/redis_test.go deleted file mode 100644 index 3192c056..00000000 --- a/pkg/storage/redis_test.go +++ /dev/null @@ -1,257 +0,0 @@ -package storage - -import ( - "testing" - - "github.com/alicebob/miniredis/v2" - "github.com/go-redis/redis/v8" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/openlyinc/pointy" - "github.com/prometheus/client_golang/prometheus" - "github.com/stretchr/testify/assert" -) - -func TestRedisProjectFunctions(t *testing.T) { - s, err := miniredis.Run() - if err != nil { - panic(err) - } - defer s.Close() - - r := NewRedisStorage(redis.NewClient(&redis.Options{Addr: s.Addr()})) - - p := schemas.Project{ - Name: "foo/bar", - ProjectParameters: schemas.ProjectParameters{ - OutputSparseStatusMetricsValue: pointy.Bool(false), - }, - } - - // Set project - r.SetProject(p) - projects, err := r.Projects() - assert.NoError(t, err) - assert.Contains(t, projects, p.Key()) - assert.Equal(t, p, projects[p.Key()]) - - // Project exists - exists, err := r.ProjectExists(p.Key()) - assert.NoError(t, err) - assert.True(t, exists) - - // GetProject should succeed - newProject := schemas.Project{ - Name: "foo/bar", - } - assert.NoError(t, r.GetProject(&newProject)) - assert.Equal(t, p, newProject) - - // Count - count, err := r.ProjectsCount() - assert.NoError(t, err) - assert.Equal(t, int64(1), count) - - // Delete project - r.DelProject(p.Key()) - projects, err = r.Projects() - assert.NoError(t, err) - assert.NotContains(t, projects, p.Key()) - - exists, err = r.ProjectExists(p.Key()) - assert.NoError(t, err) - assert.False(t, exists) - - // GetProject should not update the var this time - newProject = schemas.Project{ - Name: "foo/bar", - } - assert.NoError(t, r.GetProject(&newProject)) - assert.NotEqual(t, p, newProject) -} - -func TestRedisEnvironmentFunctions(t *testing.T) { - s, err := miniredis.Run() - if err != nil { - panic(err) - } - defer s.Close() - - r := NewRedisStorage(redis.NewClient(&redis.Options{Addr: s.Addr()})) - - environment := schemas.Environment{ - ProjectName: "foo", - ID: 1, - ExternalURL: "bar", - } - - // Set project - r.SetEnvironment(environment) - environments, err := r.Environments() - assert.NoError(t, err) - assert.Contains(t, environments, environment.Key()) - assert.Equal(t, environment.ProjectName, environments[environment.Key()].ProjectName) - assert.Equal(t, environment.ID, environments[environment.Key()].ID) - - // Environment exists - exists, err := r.EnvironmentExists(environment.Key()) - assert.NoError(t, err) - assert.True(t, exists) - - // GetEnvironment should succeed - newEnvironment := schemas.Environment{ - ProjectName: "foo", - ID: 1, - } - assert.NoError(t, r.GetEnvironment(&newEnvironment)) - assert.Equal(t, environment.ExternalURL, newEnvironment.ExternalURL) - - // Count - count, err := r.EnvironmentsCount() - assert.NoError(t, err) - assert.Equal(t, int64(1), count) - - // Delete Environment - r.DelEnvironment(environment.Key()) - environments, err = r.Environments() - assert.NoError(t, err) - assert.NotContains(t, environments, environment.Key()) - - exists, err = r.EnvironmentExists(environment.Key()) - assert.NoError(t, err) - assert.False(t, exists) - - // GetEnvironment should not update the var this time - newEnvironment = schemas.Environment{ - ProjectName: "foo", - ID: 1, - } - assert.NoError(t, r.GetEnvironment(&newEnvironment)) - assert.NotEqual(t, environment, newEnvironment) -} - -func TestRedisRefFunctions(t *testing.T) { - s, err := miniredis.Run() - if err != nil { - panic(err) - } - defer s.Close() - - r := NewRedisStorage(redis.NewClient(&redis.Options{Addr: s.Addr()})) - - ref := schemas.Ref{ - Kind: schemas.RefKindBranch, - ProjectName: "foo/bar", - Name: "sweet", - Topics: "salty", - } - - // Set project - r.SetRef(ref) - projectsRefs, err := r.Refs() - assert.NoError(t, err) - assert.Contains(t, projectsRefs, ref.Key()) - assert.Equal(t, ref, projectsRefs[ref.Key()]) - - // Ref exists - exists, err := r.RefExists(ref.Key()) - assert.NoError(t, err) - assert.True(t, exists) - - // GetRef should succeed - newRef := schemas.Ref{ - Kind: schemas.RefKindBranch, - ProjectName: "foo/bar", - Name: "sweet", - } - assert.NoError(t, r.GetRef(&newRef)) - assert.Equal(t, ref, newRef) - - // Count - count, err := r.RefsCount() - assert.NoError(t, err) - assert.Equal(t, int64(1), count) - - // Delete Ref - r.DelRef(ref.Key()) - projectsRefs, err = r.Refs() - assert.NoError(t, err) - assert.NotContains(t, projectsRefs, ref.Key()) - - exists, err = r.RefExists(ref.Key()) - assert.NoError(t, err) - assert.False(t, exists) - - // GetRef should not update the var this time - newRef = schemas.Ref{ - Kind: schemas.RefKindBranch, - ProjectName: "foo/bar", - Name: "sweet", - } - assert.NoError(t, r.GetRef(&newRef)) - assert.NotEqual(t, ref, newRef) -} - -func TestRedisMetricFunctions(t *testing.T) { - s, err := miniredis.Run() - if err != nil { - panic(err) - } - defer s.Close() - - r := NewRedisStorage(redis.NewClient(&redis.Options{Addr: s.Addr()})) - - m := schemas.Metric{ - Kind: schemas.MetricKindCoverage, - Labels: prometheus.Labels{ - "foo": "bar", - }, - Value: 5, - } - - // Set metric - r.SetMetric(m) - metrics, err := r.Metrics() - assert.NoError(t, err) - assert.Contains(t, metrics, m.Key()) - assert.Equal(t, m, metrics[m.Key()]) - - // Metric exists - exists, err := r.MetricExists(m.Key()) - assert.NoError(t, err) - assert.True(t, exists) - - // GetMetric should succeed - newMetric := schemas.Metric{ - Kind: schemas.MetricKindCoverage, - Labels: prometheus.Labels{ - "foo": "bar", - }, - } - assert.NoError(t, r.GetMetric(&newMetric)) - assert.Equal(t, m, newMetric) - - // Count - count, err := r.MetricsCount() - assert.NoError(t, err) - assert.Equal(t, int64(1), count) - - // Delete Metric - r.DelMetric(m.Key()) - metrics, err = r.Metrics() - assert.NoError(t, err) - assert.NotContains(t, metrics, m.Key()) - - exists, err = r.MetricExists(m.Key()) - assert.NoError(t, err) - assert.False(t, exists) - - // GetMetric should not update the var this time - newMetric = schemas.Metric{ - Kind: schemas.MetricKindCoverage, - Labels: prometheus.Labels{ - "foo": "bar", - }, - } - assert.NoError(t, r.GetMetric(&newMetric)) - assert.NotEqual(t, m, newMetric) -} diff --git a/pkg/storage/storage.go b/pkg/storage/storage.go deleted file mode 100644 index 1a3260a7..00000000 --- a/pkg/storage/storage.go +++ /dev/null @@ -1,57 +0,0 @@ -package storage - -import ( - "context" - - "github.com/go-redis/redis/v8" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" -) - -// Storage .. -type Storage interface { - SetProject(schemas.Project) error - DelProject(schemas.ProjectKey) error - GetProject(*schemas.Project) error - ProjectExists(schemas.ProjectKey) (bool, error) - Projects() (schemas.Projects, error) - ProjectsCount() (int64, error) - - SetEnvironment(schemas.Environment) error - DelEnvironment(schemas.EnvironmentKey) error - GetEnvironment(*schemas.Environment) error - EnvironmentExists(schemas.EnvironmentKey) (bool, error) - Environments() (schemas.Environments, error) - EnvironmentsCount() (int64, error) - - SetRef(schemas.Ref) error - DelRef(schemas.RefKey) error - GetRef(*schemas.Ref) error - RefExists(schemas.RefKey) (bool, error) - Refs() (schemas.Refs, error) - RefsCount() (int64, error) - - SetMetric(schemas.Metric) error - DelMetric(schemas.MetricKey) error - GetMetric(*schemas.Metric) error - MetricExists(schemas.MetricKey) (bool, error) - Metrics() (schemas.Metrics, error) - MetricsCount() (int64, error) -} - -// NewLocalStorage .. -func NewLocalStorage() Storage { - return &Local{ - projects: make(schemas.Projects), - environments: make(schemas.Environments), - refs: make(schemas.Refs), - metrics: make(schemas.Metrics), - } -} - -// NewRedisStorage .. -func NewRedisStorage(client *redis.Client) Storage { - return &Redis{ - Client: client, - ctx: context.TODO(), - } -} diff --git a/pkg/storage/storage_test.go b/pkg/storage/storage_test.go deleted file mode 100644 index 79851b45..00000000 --- a/pkg/storage/storage_test.go +++ /dev/null @@ -1,30 +0,0 @@ -package storage - -import ( - "context" - "testing" - - "github.com/go-redis/redis/v8" - "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" - "github.com/stretchr/testify/assert" -) - -func TestNewLocalStorage(t *testing.T) { - expectedValue := &Local{ - projects: make(schemas.Projects), - environments: make(schemas.Environments), - refs: make(schemas.Refs), - metrics: make(schemas.Metrics), - } - assert.Equal(t, expectedValue, NewLocalStorage()) -} - -func TestNewRedisStorage(t *testing.T) { - redisClient := redis.NewClient(&redis.Options{}) - expectedValue := &Redis{ - Client: redisClient, - ctx: context.TODO(), - } - - assert.Equal(t, expectedValue, NewRedisStorage(redisClient)) -} diff --git a/pkg/store/local.go b/pkg/store/local.go new file mode 100644 index 00000000..85058593 --- /dev/null +++ b/pkg/store/local.go @@ -0,0 +1,359 @@ +package store + +import ( + "context" + "sync" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +// Local .. +type Local struct { + projects schemas.Projects + projectsMutex sync.RWMutex + + environments schemas.Environments + environmentsMutex sync.RWMutex + + refs schemas.Refs + refsMutex sync.RWMutex + + metrics schemas.Metrics + metricsMutex sync.RWMutex + + tasks schemas.Tasks + tasksMutex sync.RWMutex + executedTasksCount uint64 +} + +// SetProject .. +func (l *Local) SetProject(_ context.Context, p schemas.Project) error { + l.projectsMutex.Lock() + defer l.projectsMutex.Unlock() + + l.projects[p.Key()] = p + + return nil +} + +// DelProject .. +func (l *Local) DelProject(_ context.Context, k schemas.ProjectKey) error { + l.projectsMutex.Lock() + defer l.projectsMutex.Unlock() + + delete(l.projects, k) + + return nil +} + +// GetProject .. +func (l *Local) GetProject(ctx context.Context, p *schemas.Project) error { + exists, _ := l.ProjectExists(ctx, p.Key()) + + if exists { + l.projectsMutex.RLock() + *p = l.projects[p.Key()] + l.projectsMutex.RUnlock() + } + + return nil +} + +// ProjectExists .. +func (l *Local) ProjectExists(_ context.Context, k schemas.ProjectKey) (bool, error) { + l.projectsMutex.RLock() + defer l.projectsMutex.RUnlock() + + _, ok := l.projects[k] + + return ok, nil +} + +// Projects .. +func (l *Local) Projects(_ context.Context) (projects schemas.Projects, err error) { + projects = make(schemas.Projects) + + l.projectsMutex.RLock() + defer l.projectsMutex.RUnlock() + + for k, v := range l.projects { + projects[k] = v + } + + return +} + +// ProjectsCount .. +func (l *Local) ProjectsCount(_ context.Context) (int64, error) { + l.projectsMutex.RLock() + defer l.projectsMutex.RUnlock() + + return int64(len(l.projects)), nil +} + +// SetEnvironment .. +func (l *Local) SetEnvironment(_ context.Context, environment schemas.Environment) error { + l.environmentsMutex.Lock() + defer l.environmentsMutex.Unlock() + + l.environments[environment.Key()] = environment + + return nil +} + +// DelEnvironment .. +func (l *Local) DelEnvironment(_ context.Context, k schemas.EnvironmentKey) error { + l.environmentsMutex.Lock() + defer l.environmentsMutex.Unlock() + + delete(l.environments, k) + + return nil +} + +// GetEnvironment .. +func (l *Local) GetEnvironment(ctx context.Context, environment *schemas.Environment) error { + exists, _ := l.EnvironmentExists(ctx, environment.Key()) + + if exists { + l.environmentsMutex.RLock() + *environment = l.environments[environment.Key()] + l.environmentsMutex.RUnlock() + } + + return nil +} + +// EnvironmentExists .. +func (l *Local) EnvironmentExists(_ context.Context, k schemas.EnvironmentKey) (bool, error) { + l.environmentsMutex.RLock() + defer l.environmentsMutex.RUnlock() + + _, ok := l.environments[k] + + return ok, nil +} + +// Environments .. +func (l *Local) Environments(_ context.Context) (environments schemas.Environments, err error) { + environments = make(schemas.Environments) + + l.environmentsMutex.RLock() + defer l.environmentsMutex.RUnlock() + + for k, v := range l.environments { + environments[k] = v + } + + return +} + +// EnvironmentsCount .. +func (l *Local) EnvironmentsCount(_ context.Context) (int64, error) { + l.environmentsMutex.RLock() + defer l.environmentsMutex.RUnlock() + + return int64(len(l.environments)), nil +} + +// SetRef .. +func (l *Local) SetRef(_ context.Context, ref schemas.Ref) error { + l.refsMutex.Lock() + defer l.refsMutex.Unlock() + + l.refs[ref.Key()] = ref + + return nil +} + +// DelRef .. +func (l *Local) DelRef(_ context.Context, k schemas.RefKey) error { + l.refsMutex.Lock() + defer l.refsMutex.Unlock() + + delete(l.refs, k) + + return nil +} + +// GetRef .. +func (l *Local) GetRef(ctx context.Context, ref *schemas.Ref) error { + exists, _ := l.RefExists(ctx, ref.Key()) + + if exists { + l.refsMutex.RLock() + *ref = l.refs[ref.Key()] + l.refsMutex.RUnlock() + } + + return nil +} + +// RefExists .. +func (l *Local) RefExists(_ context.Context, k schemas.RefKey) (bool, error) { + l.refsMutex.RLock() + defer l.refsMutex.RUnlock() + + _, ok := l.refs[k] + + return ok, nil +} + +// Refs .. +func (l *Local) Refs(_ context.Context) (refs schemas.Refs, err error) { + refs = make(schemas.Refs) + + l.refsMutex.RLock() + defer l.refsMutex.RUnlock() + + for k, v := range l.refs { + refs[k] = v + } + + return +} + +// RefsCount .. +func (l *Local) RefsCount(_ context.Context) (int64, error) { + l.refsMutex.RLock() + defer l.refsMutex.RUnlock() + + return int64(len(l.refs)), nil +} + +// SetMetric .. +func (l *Local) SetMetric(_ context.Context, m schemas.Metric) error { + l.metricsMutex.Lock() + defer l.metricsMutex.Unlock() + + l.metrics[m.Key()] = m + + return nil +} + +// DelMetric .. +func (l *Local) DelMetric(_ context.Context, k schemas.MetricKey) error { + l.metricsMutex.Lock() + defer l.metricsMutex.Unlock() + + delete(l.metrics, k) + + return nil +} + +// GetMetric .. +func (l *Local) GetMetric(ctx context.Context, m *schemas.Metric) error { + exists, _ := l.MetricExists(ctx, m.Key()) + + if exists { + l.metricsMutex.RLock() + *m = l.metrics[m.Key()] + l.metricsMutex.RUnlock() + } + + return nil +} + +// MetricExists .. +func (l *Local) MetricExists(_ context.Context, k schemas.MetricKey) (bool, error) { + l.metricsMutex.RLock() + defer l.metricsMutex.RUnlock() + + _, ok := l.metrics[k] + + return ok, nil +} + +// Metrics .. +func (l *Local) Metrics(_ context.Context) (metrics schemas.Metrics, err error) { + metrics = make(schemas.Metrics) + + l.metricsMutex.RLock() + defer l.metricsMutex.RUnlock() + + for k, v := range l.metrics { + metrics[k] = v + } + + return +} + +// MetricsCount .. +func (l *Local) MetricsCount(_ context.Context) (int64, error) { + l.metricsMutex.RLock() + defer l.metricsMutex.RUnlock() + + return int64(len(l.metrics)), nil +} + +// isTaskAlreadyQueued assess if a task is already queued or not. +func (l *Local) isTaskAlreadyQueued(tt schemas.TaskType, uniqueID string) bool { + l.tasksMutex.Lock() + defer l.tasksMutex.Unlock() + + if l.tasks == nil { + l.tasks = make(map[schemas.TaskType]map[string]interface{}) + } + + taskTypeQueue, ok := l.tasks[tt] + if !ok { + l.tasks[tt] = make(map[string]interface{}) + + return false + } + + if _, alreadyQueued := taskTypeQueue[uniqueID]; alreadyQueued { + return true + } + + return false +} + +// QueueTask registers that we are queueing the task. +// It returns true if it managed to schedule it, false if it was already scheduled. +func (l *Local) QueueTask(_ context.Context, tt schemas.TaskType, uniqueID, _ string) (bool, error) { + if !l.isTaskAlreadyQueued(tt, uniqueID) { + l.tasksMutex.Lock() + defer l.tasksMutex.Unlock() + + l.tasks[tt][uniqueID] = nil + + return true, nil + } + + return false, nil +} + +// UnqueueTask removes the task from the tracker. +func (l *Local) UnqueueTask(_ context.Context, tt schemas.TaskType, uniqueID string) error { + if l.isTaskAlreadyQueued(tt, uniqueID) { + l.tasksMutex.Lock() + defer l.tasksMutex.Unlock() + + delete(l.tasks[tt], uniqueID) + + l.executedTasksCount++ + } + + return nil +} + +// CurrentlyQueuedTasksCount .. +func (l *Local) CurrentlyQueuedTasksCount(_ context.Context) (count uint64, err error) { + l.tasksMutex.RLock() + defer l.tasksMutex.RUnlock() + + for _, t := range l.tasks { + count += uint64(len(t)) + } + + return +} + +// ExecutedTasksCount .. +func (l *Local) ExecutedTasksCount(_ context.Context) (uint64, error) { + l.tasksMutex.RLock() + defer l.tasksMutex.RUnlock() + + return l.executedTasksCount, nil +} diff --git a/pkg/store/local_test.go b/pkg/store/local_test.go new file mode 100644 index 00000000..aa4fcb1e --- /dev/null +++ b/pkg/store/local_test.go @@ -0,0 +1,271 @@ +package store + +import ( + "testing" + + "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +func TestLocalProjectFunctions(t *testing.T) { + p := schemas.NewProject("foo/bar") + p.OutputSparseStatusMetrics = false + + l := NewLocalStore() + assert.NoError(t, l.SetProject(testCtx, p)) + + // Set project + projects, err := l.Projects(testCtx) + assert.NoError(t, err) + assert.Contains(t, projects, p.Key()) + assert.Equal(t, p, projects[p.Key()]) + + // Project exists + exists, err := l.ProjectExists(testCtx, p.Key()) + assert.NoError(t, err) + assert.True(t, exists) + + // GetProject should succeed + newProject := schemas.NewProject("foo/bar") + assert.NoError(t, l.GetProject(testCtx, &newProject)) + assert.Equal(t, p, newProject) + + // Count + count, err := l.ProjectsCount(testCtx) + assert.NoError(t, err) + assert.Equal(t, int64(1), count) + + // Delete project + assert.NoError(t, l.DelProject(testCtx, p.Key())) + projects, err = l.Projects(testCtx) + assert.NoError(t, err) + assert.NotContains(t, projects, p.Key()) + + exists, err = l.ProjectExists(testCtx, p.Key()) + assert.NoError(t, err) + assert.False(t, exists) + + // GetProject should not update the var this time + newProject = schemas.NewProject("foo/bar") + assert.NoError(t, l.GetProject(testCtx, &newProject)) + assert.NotEqual(t, p, newProject) +} + +func TestLocalEnvironmentFunctions(t *testing.T) { + environment := schemas.Environment{ + ProjectName: "foo", + ID: 1, + } + + l := NewLocalStore() + assert.NoError(t, l.SetEnvironment(testCtx, environment)) + + // Set project + environments, err := l.Environments(testCtx) + assert.NoError(t, err) + assert.Contains(t, environments, environment.Key()) + assert.Equal(t, environment, environments[environment.Key()]) + + // Environment exists + exists, err := l.EnvironmentExists(testCtx, environment.Key()) + assert.NoError(t, err) + assert.True(t, exists) + + // GetEnvironment should succeed + newEnvironment := schemas.Environment{ + ProjectName: "foo", + ID: 1, + } + assert.NoError(t, l.GetEnvironment(testCtx, &newEnvironment)) + assert.Equal(t, environment, newEnvironment) + + // Count + count, err := l.EnvironmentsCount(testCtx) + assert.NoError(t, err) + assert.Equal(t, int64(1), count) + + // Delete Environment + assert.NoError(t, l.DelEnvironment(testCtx, environment.Key())) + environments, err = l.Environments(testCtx) + assert.NoError(t, err) + assert.NotContains(t, environments, environment.Key()) + + exists, err = l.EnvironmentExists(testCtx, environment.Key()) + assert.NoError(t, err) + assert.False(t, exists) + + // GetEnvironment should not update the var this time + newEnvironment = schemas.Environment{ + ProjectName: "foo", + ID: 1, + ExternalURL: "foo", + } + assert.NoError(t, l.GetEnvironment(testCtx, &newEnvironment)) + assert.NotEqual(t, environment, newEnvironment) +} + +func TestLocalRefFunctions(t *testing.T) { + p := schemas.NewProject("foo/bar") + p.Topics = "salty" + ref := schemas.NewRef( + p, + schemas.RefKindBranch, + "sweet", + ) + + // Set project + l := NewLocalStore() + assert.NoError(t, l.SetRef(testCtx, ref)) + + projectsRefs, err := l.Refs(testCtx) + assert.NoError(t, err) + assert.Contains(t, projectsRefs, ref.Key()) + assert.Equal(t, ref, projectsRefs[ref.Key()]) + + // Ref exists + exists, err := l.RefExists(testCtx, ref.Key()) + assert.NoError(t, err) + assert.True(t, exists) + + // GetRef should succeed + newRef := schemas.Ref{ + Project: schemas.NewProject("foo/bar"), + Kind: schemas.RefKindBranch, + Name: "sweet", + } + assert.NoError(t, l.GetRef(testCtx, &newRef)) + assert.Equal(t, ref, newRef) + + // Count + count, err := l.RefsCount(testCtx) + assert.NoError(t, err) + assert.Equal(t, int64(1), count) + + // Delete Ref + assert.NoError(t, l.DelRef(testCtx, ref.Key())) + projectsRefs, err = l.Refs(testCtx) + assert.NoError(t, err) + assert.NotContains(t, projectsRefs, ref.Key()) + + exists, err = l.RefExists(testCtx, ref.Key()) + assert.NoError(t, err) + assert.False(t, exists) + + // GetRef should not update the var this time + newRef = schemas.Ref{ + Kind: schemas.RefKindBranch, + Project: schemas.NewProject("foo/bar"), + Name: "sweet", + } + assert.NoError(t, l.GetRef(testCtx, &newRef)) + assert.NotEqual(t, ref, newRef) +} + +func TestLocalMetricFunctions(t *testing.T) { + m := schemas.Metric{ + Kind: schemas.MetricKindCoverage, + Labels: prometheus.Labels{ + "foo": "bar", + }, + Value: 5, + } + + l := NewLocalStore() + assert.NoError(t, l.SetMetric(testCtx, m)) + + // Set metric + metrics, err := l.Metrics(testCtx) + assert.NoError(t, err) + assert.Contains(t, metrics, m.Key()) + assert.Equal(t, m, metrics[m.Key()]) + + // Metric exists + exists, err := l.MetricExists(testCtx, m.Key()) + assert.NoError(t, err) + assert.True(t, exists) + + // GetMetric should succeed + newMetric := schemas.Metric{ + Kind: schemas.MetricKindCoverage, + Labels: prometheus.Labels{ + "foo": "bar", + }, + } + assert.NoError(t, l.GetMetric(testCtx, &newMetric)) + assert.Equal(t, m, newMetric) + + // Count + count, err := l.MetricsCount(testCtx) + assert.NoError(t, err) + assert.Equal(t, int64(1), count) + + // Delete Metric + l.DelMetric(testCtx, m.Key()) + metrics, err = l.Metrics(testCtx) + assert.NoError(t, err) + assert.NotContains(t, metrics, m.Key()) + + exists, err = l.MetricExists(testCtx, m.Key()) + assert.NoError(t, err) + assert.False(t, exists) + + // GetMetric should not update the var this time + newMetric = schemas.Metric{ + Kind: schemas.MetricKindCoverage, + Labels: prometheus.Labels{ + "foo": "bar", + }, + } + assert.NoError(t, l.GetMetric(testCtx, &newMetric)) + assert.NotEqual(t, m, newMetric) +} + +func TestLocalQueueTask(t *testing.T) { + l := NewLocalStore() + ok, err := l.QueueTask(testCtx, schemas.TaskTypePullMetrics, "foo", "") + assert.True(t, ok) + assert.NoError(t, err) + + ok, err = l.QueueTask(testCtx, schemas.TaskTypePullMetrics, "foo", "") + assert.False(t, ok) + assert.NoError(t, err) + + _, _ = l.QueueTask(testCtx, schemas.TaskTypePullMetrics, "bar", "") + ok, err = l.QueueTask(testCtx, schemas.TaskTypePullMetrics, "bar", "") + assert.False(t, ok) + assert.NoError(t, err) +} + +func TestLocalUnqueueTask(t *testing.T) { + l := NewLocalStore() + _, _ = l.QueueTask(testCtx, schemas.TaskTypePullMetrics, "foo", "") + assert.Equal(t, uint64(0), l.(*Local).executedTasksCount) + assert.NoError(t, l.UnqueueTask(testCtx, schemas.TaskTypePullMetrics, "foo")) + assert.Equal(t, uint64(1), l.(*Local).executedTasksCount) +} + +func TestLocalCurrentlyQueuedTasksCount(t *testing.T) { + l := NewLocalStore() + _, _ = l.QueueTask(testCtx, schemas.TaskTypePullMetrics, "foo", "") + _, _ = l.QueueTask(testCtx, schemas.TaskTypePullMetrics, "bar", "") + _, _ = l.QueueTask(testCtx, schemas.TaskTypePullMetrics, "baz", "") + + count, _ := l.CurrentlyQueuedTasksCount(testCtx) + assert.Equal(t, uint64(3), count) + assert.NoError(t, l.UnqueueTask(testCtx, schemas.TaskTypePullMetrics, "foo")) + count, _ = l.CurrentlyQueuedTasksCount(testCtx) + assert.Equal(t, uint64(2), count) +} + +func TestLocalExecutedTasksCount(t *testing.T) { + l := NewLocalStore() + _, _ = l.QueueTask(testCtx, schemas.TaskTypePullMetrics, "foo", "") + _, _ = l.QueueTask(testCtx, schemas.TaskTypePullMetrics, "bar", "") + _ = l.UnqueueTask(testCtx, schemas.TaskTypePullMetrics, "foo") + _ = l.UnqueueTask(testCtx, schemas.TaskTypePullMetrics, "foo") + + count, _ := l.ExecutedTasksCount(testCtx) + assert.Equal(t, uint64(1), count) +} diff --git a/pkg/store/redis.go b/pkg/store/redis.go new file mode 100644 index 00000000..29f33782 --- /dev/null +++ b/pkg/store/redis.go @@ -0,0 +1,420 @@ +package store + +import ( + "context" + "fmt" + "strconv" + "time" + + "github.com/redis/go-redis/v9" + "github.com/vmihailenco/msgpack/v5" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +const ( + redisProjectsKey string = `projects` + redisEnvironmentsKey string = `environments` + redisRefsKey string = `refs` + redisMetricsKey string = `metrics` + redisTaskKey string = `task` + redisTasksExecutedCountKey string = `tasksExecutedCount` + redisKeepaliveKey string = `keepalive` +) + +// Redis .. +type Redis struct { + *redis.Client +} + +// SetProject .. +func (r *Redis) SetProject(ctx context.Context, p schemas.Project) error { + marshalledProject, err := msgpack.Marshal(p) + if err != nil { + return err + } + + _, err = r.HSet(ctx, redisProjectsKey, string(p.Key()), marshalledProject).Result() + + return err +} + +// DelProject .. +func (r *Redis) DelProject(ctx context.Context, k schemas.ProjectKey) error { + _, err := r.HDel(ctx, redisProjectsKey, string(k)).Result() + + return err +} + +// GetProject .. +func (r *Redis) GetProject(ctx context.Context, p *schemas.Project) error { + exists, err := r.ProjectExists(ctx, p.Key()) + if err != nil { + return err + } + + if exists { + k := p.Key() + + marshalledProject, err := r.HGet(ctx, redisProjectsKey, string(k)).Result() + if err != nil { + return err + } + + if err = msgpack.Unmarshal([]byte(marshalledProject), p); err != nil { + return err + } + } + + return nil +} + +// ProjectExists .. +func (r *Redis) ProjectExists(ctx context.Context, k schemas.ProjectKey) (bool, error) { + return r.HExists(ctx, redisProjectsKey, string(k)).Result() +} + +// Projects .. +func (r *Redis) Projects(ctx context.Context) (schemas.Projects, error) { + projects := schemas.Projects{} + + marshalledProjects, err := r.HGetAll(ctx, redisProjectsKey).Result() + if err != nil { + return projects, err + } + + for stringProjectKey, marshalledProject := range marshalledProjects { + p := schemas.Project{} + + if err = msgpack.Unmarshal([]byte(marshalledProject), &p); err != nil { + return projects, err + } + + projects[schemas.ProjectKey(stringProjectKey)] = p + } + + return projects, nil +} + +// ProjectsCount .. +func (r *Redis) ProjectsCount(ctx context.Context) (int64, error) { + return r.HLen(ctx, redisProjectsKey).Result() +} + +// SetEnvironment .. +func (r *Redis) SetEnvironment(ctx context.Context, e schemas.Environment) error { + marshalledEnvironment, err := msgpack.Marshal(e) + if err != nil { + return err + } + + _, err = r.HSet(ctx, redisEnvironmentsKey, string(e.Key()), marshalledEnvironment).Result() + + return err +} + +// DelEnvironment .. +func (r *Redis) DelEnvironment(ctx context.Context, k schemas.EnvironmentKey) error { + _, err := r.HDel(ctx, redisEnvironmentsKey, string(k)).Result() + + return err +} + +// GetEnvironment .. +func (r *Redis) GetEnvironment(ctx context.Context, e *schemas.Environment) error { + exists, err := r.EnvironmentExists(ctx, e.Key()) + if err != nil { + return err + } + + if exists { + k := e.Key() + + marshalledEnvironment, err := r.HGet(ctx, redisEnvironmentsKey, string(k)).Result() + if err != nil { + return err + } + + if err = msgpack.Unmarshal([]byte(marshalledEnvironment), e); err != nil { + return err + } + } + + return nil +} + +// EnvironmentExists .. +func (r *Redis) EnvironmentExists(ctx context.Context, k schemas.EnvironmentKey) (bool, error) { + return r.HExists(ctx, redisEnvironmentsKey, string(k)).Result() +} + +// Environments .. +func (r *Redis) Environments(ctx context.Context) (schemas.Environments, error) { + environments := schemas.Environments{} + + marshalledProjects, err := r.HGetAll(ctx, redisEnvironmentsKey).Result() + if err != nil { + return environments, err + } + + for stringEnvironmentKey, marshalledEnvironment := range marshalledProjects { + p := schemas.Environment{} + + if err = msgpack.Unmarshal([]byte(marshalledEnvironment), &p); err != nil { + return environments, err + } + + environments[schemas.EnvironmentKey(stringEnvironmentKey)] = p + } + + return environments, nil +} + +// EnvironmentsCount .. +func (r *Redis) EnvironmentsCount(ctx context.Context) (int64, error) { + return r.HLen(ctx, redisEnvironmentsKey).Result() +} + +// SetRef .. +func (r *Redis) SetRef(ctx context.Context, ref schemas.Ref) error { + marshalledRef, err := msgpack.Marshal(ref) + if err != nil { + return err + } + + _, err = r.HSet(ctx, redisRefsKey, string(ref.Key()), marshalledRef).Result() + + return err +} + +// DelRef .. +func (r *Redis) DelRef(ctx context.Context, k schemas.RefKey) error { + _, err := r.HDel(ctx, redisRefsKey, string(k)).Result() + + return err +} + +// GetRef .. +func (r *Redis) GetRef(ctx context.Context, ref *schemas.Ref) error { + exists, err := r.RefExists(ctx, ref.Key()) + if err != nil { + return err + } + + if exists { + k := ref.Key() + + marshalledRef, err := r.HGet(ctx, redisRefsKey, string(k)).Result() + if err != nil { + return err + } + + if err = msgpack.Unmarshal([]byte(marshalledRef), ref); err != nil { + return err + } + } + + return nil +} + +// RefExists .. +func (r *Redis) RefExists(ctx context.Context, k schemas.RefKey) (bool, error) { + return r.HExists(ctx, redisRefsKey, string(k)).Result() +} + +// Refs .. +func (r *Redis) Refs(ctx context.Context) (schemas.Refs, error) { + refs := schemas.Refs{} + + marshalledProjects, err := r.HGetAll(ctx, redisRefsKey).Result() + if err != nil { + return refs, err + } + + for stringRefKey, marshalledRef := range marshalledProjects { + p := schemas.Ref{} + + if err = msgpack.Unmarshal([]byte(marshalledRef), &p); err != nil { + return refs, err + } + + refs[schemas.RefKey(stringRefKey)] = p + } + + return refs, nil +} + +// RefsCount .. +func (r *Redis) RefsCount(ctx context.Context) (int64, error) { + return r.HLen(ctx, redisRefsKey).Result() +} + +// SetMetric .. +func (r *Redis) SetMetric(ctx context.Context, m schemas.Metric) error { + marshalledMetric, err := msgpack.Marshal(m) + if err != nil { + return err + } + + _, err = r.HSet(ctx, redisMetricsKey, string(m.Key()), marshalledMetric).Result() + + return err +} + +// DelMetric .. +func (r *Redis) DelMetric(ctx context.Context, k schemas.MetricKey) error { + _, err := r.HDel(ctx, redisMetricsKey, string(k)).Result() + + return err +} + +// MetricExists .. +func (r *Redis) MetricExists(ctx context.Context, k schemas.MetricKey) (bool, error) { + return r.HExists(ctx, redisMetricsKey, string(k)).Result() +} + +// GetMetric .. +func (r *Redis) GetMetric(ctx context.Context, m *schemas.Metric) error { + exists, err := r.MetricExists(ctx, m.Key()) + if err != nil { + return err + } + + if exists { + k := m.Key() + + marshalledMetric, err := r.HGet(ctx, redisMetricsKey, string(k)).Result() + if err != nil { + return err + } + + if err = msgpack.Unmarshal([]byte(marshalledMetric), m); err != nil { + return err + } + } + + return nil +} + +// Metrics .. +func (r *Redis) Metrics(ctx context.Context) (schemas.Metrics, error) { + metrics := schemas.Metrics{} + + marshalledMetrics, err := r.HGetAll(ctx, redisMetricsKey).Result() + if err != nil { + return metrics, err + } + + for stringMetricKey, marshalledMetric := range marshalledMetrics { + m := schemas.Metric{} + + if err := msgpack.Unmarshal([]byte(marshalledMetric), &m); err != nil { + return metrics, err + } + + metrics[schemas.MetricKey(stringMetricKey)] = m + } + + return metrics, nil +} + +// MetricsCount .. +func (r *Redis) MetricsCount(ctx context.Context) (int64, error) { + return r.HLen(ctx, redisMetricsKey).Result() +} + +// SetKeepalive sets a key with an UUID corresponding to the currently running process. +func (r *Redis) SetKeepalive(ctx context.Context, uuid string, ttl time.Duration) (bool, error) { + return r.SetNX(ctx, fmt.Sprintf("%s:%s", redisKeepaliveKey, uuid), nil, ttl).Result() +} + +// KeepaliveExists returns whether a keepalive exists or not for a particular UUID. +func (r *Redis) KeepaliveExists(ctx context.Context, uuid string) (bool, error) { + exists, err := r.Exists(ctx, fmt.Sprintf("%s:%s", redisKeepaliveKey, uuid)).Result() + + return exists == 1, err +} + +func getRedisQueueKey(tt schemas.TaskType, taskUUID string) string { + return fmt.Sprintf("%s:%v:%s", redisTaskKey, tt, taskUUID) +} + +// QueueTask registers that we are queueing the task. +// It returns true if it managed to schedule it, false if it was already scheduled. +func (r *Redis) QueueTask(ctx context.Context, tt schemas.TaskType, taskUUID, processUUID string) (set bool, err error) { + k := getRedisQueueKey(tt, taskUUID) + + // We attempt to set the key, if it already exists, we do not overwrite it + set, err = r.SetNX(ctx, k, processUUID, 0).Result() + if err != nil || set { + return + } + + // If the key already exists, we want to check a couple of things + // First, that the associated process UUID is the same as our current one + var tpuuid string + + if tpuuid, err = r.Get(ctx, k).Result(); err != nil { + return + } + + // If it is not the case, we assess that the one being associated with the task lock + // is still alive, otherwise we override the key and schedule the task + if tpuuid != processUUID { + var uuidIsAlive bool + + if uuidIsAlive, err = r.KeepaliveExists(ctx, tpuuid); err != nil { + return + } + + if !uuidIsAlive { + if _, err = r.Set(ctx, k, processUUID, 0).Result(); err != nil { + return + } + + return true, nil + } + } + + return +} + +// UnqueueTask removes the task from the tracker. +func (r *Redis) UnqueueTask(ctx context.Context, tt schemas.TaskType, taskUUID string) (err error) { + var matched int64 + + matched, err = r.Del(ctx, getRedisQueueKey(tt, taskUUID)).Result() + if err != nil { + return + } + + if matched > 0 { + _, err = r.Incr(ctx, redisTasksExecutedCountKey).Result() + } + + return +} + +// CurrentlyQueuedTasksCount .. +func (r *Redis) CurrentlyQueuedTasksCount(ctx context.Context) (count uint64, err error) { + iter := r.Scan(ctx, 0, fmt.Sprintf("%s:*", redisTaskKey), 0).Iterator() + for iter.Next(ctx) { + count++ + } + + err = iter.Err() + + return +} + +// ExecutedTasksCount .. +func (r *Redis) ExecutedTasksCount(ctx context.Context) (uint64, error) { + countString, err := r.Get(ctx, redisTasksExecutedCountKey).Result() + if err != nil { + return 0, err + } + + c, err := strconv.Atoi(countString) + + return uint64(c), err +} diff --git a/pkg/store/redis_test.go b/pkg/store/redis_test.go new file mode 100644 index 00000000..3fadb93a --- /dev/null +++ b/pkg/store/redis_test.go @@ -0,0 +1,324 @@ +package store + +import ( + "testing" + "time" + + "github.com/alicebob/miniredis/v2" + "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" + "github.com/redis/go-redis/v9" + "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +func newTestRedisStore(t *testing.T) (mr *miniredis.Miniredis, r Store) { + mr, err := miniredis.Run() + if err != nil { + panic(err) + } + + t.Cleanup(func() { + mr.Close() + }) + + return mr, NewRedisStore(redis.NewClient(&redis.Options{Addr: mr.Addr()})).(*Redis) +} + +func TestRedisProjectFunctions(t *testing.T) { + _, r := newTestRedisStore(t) + + p := schemas.NewProject("foo/bar") + p.OutputSparseStatusMetrics = false + + // Set project + r.SetProject(testCtx, p) + projects, err := r.Projects(testCtx) + assert.NoError(t, err) + assert.Contains(t, projects, p.Key()) + assert.Equal(t, p, projects[p.Key()]) + + // Project exists + exists, err := r.ProjectExists(testCtx, p.Key()) + assert.NoError(t, err) + assert.True(t, exists) + + // GetProject should succeed + newProject := schemas.NewProject("foo/bar") + assert.NoError(t, r.GetProject(testCtx, &newProject)) + assert.Equal(t, p, newProject) + + // Count + count, err := r.ProjectsCount(testCtx) + assert.NoError(t, err) + assert.Equal(t, int64(1), count) + + // Delete project + r.DelProject(testCtx, p.Key()) + projects, err = r.Projects(testCtx) + assert.NoError(t, err) + assert.NotContains(t, projects, p.Key()) + + exists, err = r.ProjectExists(testCtx, p.Key()) + assert.NoError(t, err) + assert.False(t, exists) + + // GetProject should not update the var this time + newProject = schemas.NewProject("foo/bar") + assert.NoError(t, r.GetProject(testCtx, &newProject)) + assert.NotEqual(t, p, newProject) +} + +func TestRedisEnvironmentFunctions(t *testing.T) { + _, r := newTestRedisStore(t) + + environment := schemas.Environment{ + ProjectName: "foo", + ID: 1, + ExternalURL: "bar", + } + + // Set project + r.SetEnvironment(testCtx, environment) + environments, err := r.Environments(testCtx) + assert.NoError(t, err) + assert.Contains(t, environments, environment.Key()) + assert.Equal(t, environment.ProjectName, environments[environment.Key()].ProjectName) + assert.Equal(t, environment.ID, environments[environment.Key()].ID) + + // Environment exists + exists, err := r.EnvironmentExists(testCtx, environment.Key()) + assert.NoError(t, err) + assert.True(t, exists) + + // GetEnvironment should succeed + newEnvironment := schemas.Environment{ + ProjectName: "foo", + ID: 1, + } + assert.NoError(t, r.GetEnvironment(testCtx, &newEnvironment)) + assert.Equal(t, environment.ExternalURL, newEnvironment.ExternalURL) + + // Count + count, err := r.EnvironmentsCount(testCtx) + assert.NoError(t, err) + assert.Equal(t, int64(1), count) + + // Delete Environment + r.DelEnvironment(testCtx, environment.Key()) + environments, err = r.Environments(testCtx) + assert.NoError(t, err) + assert.NotContains(t, environments, environment.Key()) + + exists, err = r.EnvironmentExists(testCtx, environment.Key()) + assert.NoError(t, err) + assert.False(t, exists) + + // GetEnvironment should not update the var this time + newEnvironment = schemas.Environment{ + ProjectName: "foo", + ID: 1, + } + assert.NoError(t, r.GetEnvironment(testCtx, &newEnvironment)) + assert.NotEqual(t, environment, newEnvironment) +} + +func TestRedisRefFunctions(t *testing.T) { + _, r := newTestRedisStore(t) + + p := schemas.NewProject("foo/bar") + p.Topics = "salty" + ref := schemas.NewRef( + p, + schemas.RefKindBranch, + "sweet", + ) + + // Set project + r.SetRef(testCtx, ref) + projectsRefs, err := r.Refs(testCtx) + assert.NoError(t, err) + assert.Contains(t, projectsRefs, ref.Key()) + assert.Equal(t, ref, projectsRefs[ref.Key()]) + + // Ref exists + exists, err := r.RefExists(testCtx, ref.Key()) + assert.NoError(t, err) + assert.True(t, exists) + + // GetRef should succeed + newRef := schemas.Ref{ + Project: schemas.NewProject("foo/bar"), + Kind: schemas.RefKindBranch, + Name: "sweet", + } + assert.NoError(t, r.GetRef(testCtx, &newRef)) + assert.Equal(t, ref, newRef) + + // Count + count, err := r.RefsCount(testCtx) + assert.NoError(t, err) + assert.Equal(t, int64(1), count) + + // Delete Ref + r.DelRef(testCtx, ref.Key()) + projectsRefs, err = r.Refs(testCtx) + assert.NoError(t, err) + assert.NotContains(t, projectsRefs, ref.Key()) + + exists, err = r.RefExists(testCtx, ref.Key()) + assert.NoError(t, err) + assert.False(t, exists) + + // GetRef should not update the var this time + newRef = schemas.Ref{ + Kind: schemas.RefKindBranch, + Project: schemas.NewProject("foo/bar"), + Name: "sweet", + } + assert.NoError(t, r.GetRef(testCtx, &newRef)) + assert.NotEqual(t, ref, newRef) +} + +func TestRedisMetricFunctions(t *testing.T) { + _, r := newTestRedisStore(t) + + m := schemas.Metric{ + Kind: schemas.MetricKindCoverage, + Labels: prometheus.Labels{ + "foo": "bar", + }, + Value: 5, + } + + // Set metric + r.SetMetric(testCtx, m) + metrics, err := r.Metrics(testCtx) + assert.NoError(t, err) + assert.Contains(t, metrics, m.Key()) + assert.Equal(t, m, metrics[m.Key()]) + + // Metric exists + exists, err := r.MetricExists(testCtx, m.Key()) + assert.NoError(t, err) + assert.True(t, exists) + + // GetMetric should succeed + newMetric := schemas.Metric{ + Kind: schemas.MetricKindCoverage, + Labels: prometheus.Labels{ + "foo": "bar", + }, + } + assert.NoError(t, r.GetMetric(testCtx, &newMetric)) + assert.Equal(t, m, newMetric) + + // Count + count, err := r.MetricsCount(testCtx) + assert.NoError(t, err) + assert.Equal(t, int64(1), count) + + // Delete Metric + r.DelMetric(testCtx, m.Key()) + metrics, err = r.Metrics(testCtx) + assert.NoError(t, err) + assert.NotContains(t, metrics, m.Key()) + + exists, err = r.MetricExists(testCtx, m.Key()) + assert.NoError(t, err) + assert.False(t, exists) + + // GetMetric should not update the var this time + newMetric = schemas.Metric{ + Kind: schemas.MetricKindCoverage, + Labels: prometheus.Labels{ + "foo": "bar", + }, + } + assert.NoError(t, r.GetMetric(testCtx, &newMetric)) + assert.NotEqual(t, m, newMetric) +} + +func TestRedisKeepalive(t *testing.T) { + mr, r := newTestRedisStore(t) + + uuidString := uuid.New().String() + resp, err := r.(*Redis).SetKeepalive(testCtx, uuidString, time.Second) + assert.True(t, resp) + assert.NoError(t, err) + + resp, err = r.(*Redis).KeepaliveExists(testCtx, uuidString) + assert.True(t, resp) + assert.NoError(t, err) + + mr.FastForward(2 * time.Second) + + resp, err = r.(*Redis).KeepaliveExists(testCtx, uuidString) + assert.False(t, resp) + assert.NoError(t, err) +} + +func TestGetRedisQueueKey(t *testing.T) { + assert.Equal(t, "task:GarbageCollectEnvironments:foo", getRedisQueueKey(schemas.TaskTypeGarbageCollectEnvironments, "foo")) +} + +func TestRedisQueueTask(t *testing.T) { + mr, r := newTestRedisStore(t) + + r.(*Redis).SetKeepalive(testCtx, "controller1", time.Second) + + ok, err := r.QueueTask(testCtx, schemas.TaskTypePullMetrics, "foo", "controller1") + assert.True(t, ok) + assert.NoError(t, err) + + // The keepalive of controller1 not being expired, we should not requeue the task + ok, err = r.QueueTask(testCtx, schemas.TaskTypePullMetrics, "foo", "controller2") + assert.False(t, ok) + assert.NoError(t, err) + + // The keepalive of controller1 being expired, we should requeue the task + mr.FastForward(2 * time.Second) + + ok, err = r.QueueTask(testCtx, schemas.TaskTypePullMetrics, "foo", "controller2") + assert.True(t, ok) + assert.NoError(t, err) +} + +func TestRedisUnqueueTask(t *testing.T) { + _, r := newTestRedisStore(t) + + r.QueueTask(testCtx, schemas.TaskTypePullMetrics, "foo", "") + count, _ := r.ExecutedTasksCount(testCtx) + assert.Equal(t, uint64(0), count) + + assert.NoError(t, r.UnqueueTask(testCtx, schemas.TaskTypePullMetrics, "foo")) + count, _ = r.ExecutedTasksCount(testCtx) + assert.Equal(t, uint64(1), count) +} + +func TestRedisCurrentlyQueuedTasksCount(t *testing.T) { + _, r := newTestRedisStore(t) + + r.QueueTask(testCtx, schemas.TaskTypePullMetrics, "foo", "") + r.QueueTask(testCtx, schemas.TaskTypePullMetrics, "bar", "") + r.QueueTask(testCtx, schemas.TaskTypePullMetrics, "baz", "") + + count, _ := r.CurrentlyQueuedTasksCount(testCtx) + assert.Equal(t, uint64(3), count) + r.UnqueueTask(testCtx, schemas.TaskTypePullMetrics, "foo") + count, _ = r.CurrentlyQueuedTasksCount(testCtx) + assert.Equal(t, uint64(2), count) +} + +func TestRedisExecutedTasksCount(t *testing.T) { + _, r := newTestRedisStore(t) + + r.QueueTask(testCtx, schemas.TaskTypePullMetrics, "foo", "") + r.QueueTask(testCtx, schemas.TaskTypePullMetrics, "bar", "") + r.UnqueueTask(testCtx, schemas.TaskTypePullMetrics, "foo") + r.UnqueueTask(testCtx, schemas.TaskTypePullMetrics, "foo") + + count, _ := r.ExecutedTasksCount(testCtx) + assert.Equal(t, uint64(1), count) +} diff --git a/pkg/store/store.go b/pkg/store/store.go new file mode 100644 index 00000000..02a5bdf2 --- /dev/null +++ b/pkg/store/store.go @@ -0,0 +1,109 @@ +package store + +import ( + "context" + + "github.com/redis/go-redis/v9" + log "github.com/sirupsen/logrus" + "go.opentelemetry.io/otel" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +// Store .. +type Store interface { + SetProject(ctx context.Context, p schemas.Project) error + DelProject(ctx context.Context, pk schemas.ProjectKey) error + GetProject(ctx context.Context, p *schemas.Project) error + ProjectExists(ctx context.Context, pk schemas.ProjectKey) (bool, error) + Projects(ctx context.Context) (schemas.Projects, error) + ProjectsCount(ctx context.Context) (int64, error) + SetEnvironment(ctx context.Context, e schemas.Environment) error + DelEnvironment(ctx context.Context, ek schemas.EnvironmentKey) error + GetEnvironment(ctx context.Context, e *schemas.Environment) error + EnvironmentExists(ctx context.Context, ek schemas.EnvironmentKey) (bool, error) + Environments(ctx context.Context) (schemas.Environments, error) + EnvironmentsCount(ctx context.Context) (int64, error) + SetRef(ctx context.Context, r schemas.Ref) error + DelRef(ctx context.Context, rk schemas.RefKey) error + GetRef(ctx context.Context, r *schemas.Ref) error + RefExists(ctx context.Context, rk schemas.RefKey) (bool, error) + Refs(ctx context.Context) (schemas.Refs, error) + RefsCount(ctx context.Context) (int64, error) + SetMetric(ctx context.Context, m schemas.Metric) error + DelMetric(ctx context.Context, mk schemas.MetricKey) error + GetMetric(ctx context.Context, m *schemas.Metric) error + MetricExists(ctx context.Context, mk schemas.MetricKey) (bool, error) + Metrics(ctx context.Context) (schemas.Metrics, error) + MetricsCount(ctx context.Context) (int64, error) + + // Helpers to keep track of currently queued tasks and avoid scheduling them + // twice at the risk of ending up with loads of dangling goroutines being locked + QueueTask(ctx context.Context, tt schemas.TaskType, taskUUID, processUUID string) (bool, error) + UnqueueTask(ctx context.Context, tt schemas.TaskType, taskUUID string) error + CurrentlyQueuedTasksCount(ctx context.Context) (uint64, error) + ExecutedTasksCount(ctx context.Context) (uint64, error) +} + +// NewLocalStore .. +func NewLocalStore() Store { + return &Local{ + projects: make(schemas.Projects), + environments: make(schemas.Environments), + refs: make(schemas.Refs), + metrics: make(schemas.Metrics), + } +} + +// NewRedisStore .. +func NewRedisStore(client *redis.Client) Store { + return &Redis{ + Client: client, + } +} + +// New creates a new store and populates it with +// provided []schemas.Project. +func New( + ctx context.Context, + r *redis.Client, + projects config.Projects, +) (s Store) { + ctx, span := otel.Tracer("gitlab-ci-pipelines-exporter").Start(ctx, "store:New") + defer span.End() + + if r != nil { + s = NewRedisStore(r) + } else { + s = NewLocalStore() + } + + // Load all the configured projects in the store + for _, p := range projects { + sp := schemas.Project{Project: p} + + exists, err := s.ProjectExists(ctx, sp.Key()) + if err != nil { + log.WithContext(ctx). + WithFields(log.Fields{ + "project-name": p.Name, + }). + WithError(err). + Error("reading project from the store") + } + + if !exists { + if err = s.SetProject(ctx, sp); err != nil { + log.WithContext(ctx). + WithFields(log.Fields{ + "project-name": p.Name, + }). + WithError(err). + Error("writing project in the store") + } + } + } + + return +} diff --git a/pkg/store/store_test.go b/pkg/store/store_test.go new file mode 100644 index 00000000..61ff7014 --- /dev/null +++ b/pkg/store/store_test.go @@ -0,0 +1,56 @@ +package store + +import ( + "context" + "testing" + + "github.com/redis/go-redis/v9" + "github.com/stretchr/testify/assert" + + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/config" + "github.com/mvisonneau/gitlab-ci-pipelines-exporter/pkg/schemas" +) + +var testCtx = context.Background() + +func TestNewLocalStore(t *testing.T) { + expectedValue := &Local{ + projects: make(schemas.Projects), + environments: make(schemas.Environments), + refs: make(schemas.Refs), + metrics: make(schemas.Metrics), + } + assert.Equal(t, expectedValue, NewLocalStore()) +} + +func TestNewRedisStore(t *testing.T) { + redisClient := redis.NewClient(&redis.Options{}) + expectedValue := &Redis{ + Client: redisClient, + } + + assert.Equal(t, expectedValue, NewRedisStore(redisClient)) +} + +func TestNew(t *testing.T) { + localStore := New(testCtx, nil, config.Projects{}) + assert.IsType(t, &Local{}, localStore) + + redisClient := redis.NewClient(&redis.Options{}) + redisStore := New(testCtx, redisClient, config.Projects{}) + assert.IsType(t, &Redis{}, redisStore) + + localStore = New(testCtx, nil, config.Projects{ + { + Name: "foo", + }, + { + Name: "foo", + }, + { + Name: "bar", + }, + }) + count, _ := localStore.ProjectsCount(testCtx) + assert.Equal(t, int64(2), count) +} diff --git a/renovate.json5 b/renovate.json5 new file mode 100644 index 00000000..b8d7fafa --- /dev/null +++ b/renovate.json5 @@ -0,0 +1,28 @@ +{ + $schema: "https://docs.renovatebot.com/renovate-schema.json", + extends: ["config:best-practices"], + + postUpdateOptions: [ + "gomodTidy", // Run go mod tidy after Go module updates. + ], + + customManagers: [ + // Update Makefile's go dependencies + { + customType: "regex", + fileMatch: ["^Makefile$"], + matchStrings: ["go run (?.*?)@(?.*?) "], + datasourceTemplate: "go", + }, + ], + + packageRules: [ + // Group all patch updates into a single PR + { + groupName: "all patch and minor", + matchPackageNames: ["*"], + matchUpdateTypes: ["minor", "patch", "digest"], + automerge: true, + }, + ], +}