Skip to content

Commit

Permalink
Fix tests (#679)
Browse files Browse the repository at this point in the history
<!--
Thanks for taking precious time for making a PR.

Before creating a pull request, please make sure:
- Your PR solves one problem for which an issue exist and a solution has
been discussed
- You have read the guide for contributing
- See
https://github.com/beatlabs/patron/blob/master/README.md#how-to-contribute
- You signed all your commits (otherwise we won't be able to merge the
PR)
  - See https://github.com/beatlabs/patron/blob/master/SIGNYOURWORK.md
- You added unit tests for the new functionality
- You mention in the PR description which issue it is addressing, e.g.
"Resolves #123"
-->

## Which problem is this PR solving?

Tests are fragile and time out.

## Short description of the changes

- Private GH runner to get more juice so that the tests do not fail
- Merged e2e with CI
  • Loading branch information
mantzas authored Mar 17, 2024
1 parent 256a8cb commit 9e81221
Show file tree
Hide file tree
Showing 14 changed files with 106 additions and 174 deletions.
23 changes: 16 additions & 7 deletions .github/workflows/go.yml → .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,15 @@ on:
jobs:
lint:
name: Lint and fmt check
runs-on: ubuntu-latest
runs-on: self-hosted
steps:
- name: Check out source code
uses: actions/checkout@v4

- name: Set up Go
uses: actions/setup-go@v5
with:
go-version-file: 'go.mod'
go-version-file: "go.mod"

- name: Linting and fmt check
run: make lint
Expand All @@ -26,7 +26,7 @@ jobs:
strategy:
matrix:
go-image-version: ["1.21"]
runs-on: ubuntu-latest
runs-on: self-hosted
steps:
- name: Set up Go ${{ matrix.go-image-version }}
uses: actions/setup-go@v5
Expand All @@ -39,11 +39,10 @@ jobs:
- name: Start dependencies
run: make deps-start

- name: Wait dependencies
run: sleep 60

- name: Running CI
run: make ci
run: |
sleep 30
make ci
- name: Convert coverage file to lcov
run: |
Expand All @@ -55,3 +54,13 @@ jobs:
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
path-to-lcov: ./coverage.lcov

- name: e2e
run: |
go build -o service ./service/
nohup ./service/service &
go run client/main.go
working-directory: ./examples

- name: Stop dependencies
run: make deps-stop
37 changes: 0 additions & 37 deletions .github/workflows/e2e.yml

This file was deleted.

2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ cover: fmtcheck
go tool cover -func=coverage.txt && \
rm coverage.txt

ci: deps-start
ci:
go test ./... -race -cover -mod=vendor -coverprofile=coverage.txt -covermode=atomic -tags=integration && \
mv coverage.txt coverage.txt.tmp && \
cat coverage.txt.tmp | grep -v "/cmd/patron/" > coverage.txt
Expand Down
27 changes: 25 additions & 2 deletions client/amqp/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ import (
"context"
"testing"

testamqp "github.com/beatlabs/patron/test/amqp"
"github.com/opentracing/opentracing-go"
"github.com/opentracing/opentracing-go/ext"
"github.com/opentracing/opentracing-go/mocktracer"
Expand All @@ -27,7 +26,7 @@ func TestRun(t *testing.T) {
opentracing.SetGlobalTracer(mtr)
t.Cleanup(func() { mtr.Reset() })

require.NoError(t, testamqp.CreateQueue(endpoint, queue))
require.NoError(t, createQueue(endpoint, queue))

pub, err := New(endpoint)
require.NoError(t, err)
Expand Down Expand Up @@ -72,3 +71,27 @@ func TestRun(t *testing.T) {
assert.NoError(t, channel.Close())
assert.NoError(t, conn.Close())
}

func createQueue(endpoint, queue string) error {
conn, err := amqp.Dial(endpoint)
if err != nil {
return err
}

channel, err := conn.Channel()
if err != nil {
return err
}

_, err = channel.QueueDelete(queue, false, false, false)
if err != nil {
return err
}

_, err = channel.QueueDeclare(queue, true, false, false, false, nil)
if err != nil {
return err
}

return nil
}
2 changes: 1 addition & 1 deletion client/kafka/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ const (
clientTopic = "clientTopic"
)

var brokers = []string{"127.0.0.1:9093"}
var brokers = []string{"127.0.0.1:9092"}

func TestNewAsyncProducer_Success(t *testing.T) {
saramaCfg, err := DefaultProducerSaramaConfig("test-producer", true)
Expand Down
12 changes: 9 additions & 3 deletions component/async/kafka/group/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ const (
successTopic1 = "successTopic1"
failAllRetriesTopic1 = "failAllRetriesTopic1"
failAndRetryTopic1 = "failAndRetryTopic1"
broker = "127.0.0.1:9093"
broker = "127.0.0.1:9092"
)

func TestGroupConsume(t *testing.T) {
Expand Down Expand Up @@ -72,7 +72,10 @@ func TestGroupConsume(t *testing.T) {

messages := make([]*sarama.ProducerMessage, 0, len(sent))
for _, val := range sent {
messages = append(messages, testkafka.CreateProducerMessage(groupTopic1, val))
messages = append(messages, &sarama.ProducerMessage{
Topic: groupTopic1,
Value: sarama.StringEncoder(val),
})
}

err := testkafka.SendMessages(broker, messages...)
Expand Down Expand Up @@ -118,7 +121,10 @@ func TestGroupConsume_ClaimMessageError(t *testing.T) {

time.Sleep(5 * time.Second)

err = testkafka.SendMessages(broker, testkafka.CreateProducerMessage(groupTopic2, "321"))
err = testkafka.SendMessages(broker, &sarama.ProducerMessage{
Topic: groupTopic2,
Value: sarama.StringEncoder("321"),
})
require.NoError(t, err)

select {
Expand Down
32 changes: 25 additions & 7 deletions component/async/kafka/simple/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ const (
simpleTopic5 = "simpleTopic5"
simpleTopic6 = "simpleTopic6"
simpleTopic7 = "simpleTopic7"
broker = "127.0.0.1:9093"
broker = "127.0.0.1:9092"
)

func TestSimpleConsume(t *testing.T) {
Expand Down Expand Up @@ -65,7 +65,10 @@ func TestSimpleConsume(t *testing.T) {

messages := make([]*sarama.ProducerMessage, 0, len(sent))
for _, val := range sent {
messages = append(messages, testkafka.CreateProducerMessage(simpleTopic1, val))
messages = append(messages, &sarama.ProducerMessage{
Topic: simpleTopic1,
Value: sarama.StringEncoder(val),
})
}

err := testkafka.SendMessages(broker, messages...)
Expand Down Expand Up @@ -117,7 +120,10 @@ func TestSimpleConsume_ClaimMessageError(t *testing.T) {

time.Sleep(5 * time.Second)

err := testkafka.SendMessages(broker, testkafka.CreateProducerMessage(simpleTopic2, "123"))
err := testkafka.SendMessages(broker, &sarama.ProducerMessage{
Topic: simpleTopic2,
Value: sarama.StringEncoder("123"),
})
require.NoError(t, err)

select {
Expand All @@ -141,7 +147,10 @@ func TestSimpleConsume_WithDurationOffset(t *testing.T) {

messages := make([]*sarama.ProducerMessage, 0)
for _, val := range sent {
messages = append(messages, testkafka.CreateProducerMessage(simpleTopic3, val))
messages = append(messages, &sarama.ProducerMessage{
Topic: simpleTopic3,
Value: sarama.StringEncoder(val),
})
}

err := testkafka.SendMessages(broker, messages...)
Expand Down Expand Up @@ -206,7 +215,10 @@ func TestSimpleConsume_WithTimestampOffset(t *testing.T) {
messages := make([]*sarama.ProducerMessage, 0)
for i, tm := range times {
val := sent[i]
msg := testkafka.CreateProducerMessage(simpleTopic6, val)
msg := &sarama.ProducerMessage{
Topic: simpleTopic6,
Value: sarama.StringEncoder(val),
}
msg.Timestamp = tm
messages = append(messages, msg)
}
Expand Down Expand Up @@ -263,7 +275,10 @@ func TestSimpleConsume_WithNotificationOnceReachingLatestOffset(t *testing.T) {
messages := make([]*sarama.ProducerMessage, 0)
numberOfMessages := 10
for i := 0; i < numberOfMessages; i++ {
messages = append(messages, testkafka.CreateProducerMessage(simpleTopic4, "foo"))
messages = append(messages, &sarama.ProducerMessage{
Topic: simpleTopic4,
Value: sarama.StringEncoder("foo"),
})
}

err := testkafka.SendMessages(broker, messages...)
Expand Down Expand Up @@ -379,7 +394,10 @@ func TestSimpleConsume_WithNotificationOnceReachingLatestOffset_WithTimestampOff
messages := make([]*sarama.ProducerMessage, 0)
for i, tm := range times {
val := sent[i]
msg := testkafka.CreateProducerMessage(simpleTopic7, val)
msg := &sarama.ProducerMessage{
Topic: simpleTopic7,
Value: sarama.StringEncoder(val),
}
msg.Timestamp = tm
messages = append(messages, msg)
}
Expand Down
2 changes: 1 addition & 1 deletion component/kafka/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ const (
successTopic2 = "successTopic2"
failAllRetriesTopic2 = "failAllRetriesTopic2"
failAndRetryTopic2 = "failAndRetryTopic2"
broker = "127.0.0.1:9093"
broker = "127.0.0.1:9092"
groupSuffix = "-group"
)

Expand Down
47 changes: 26 additions & 21 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,27 +1,32 @@
version: "3.8"
services:
zookeeper:
image: bitnami/zookeeper:latest
ports:
- "2181:2181"
environment:
- ALLOW_ANONYMOUS_LOGIN=yes
kafka:
image: bitnami/kafka:2
image: confluentinc/cp-kafka:7.6.0
hostname: broker
container_name: broker
ports:
- "9092:9092"
- "9093:9093"
- "9101:9101"
environment:
- KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181
- ALLOW_PLAINTEXT_LISTENER=yes
- KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CLIENT:PLAINTEXT,EXTERNAL:PLAINTEXT
- KAFKA_CFG_LISTENERS=CLIENT://:9092,EXTERNAL://:9093
- KAFKA_CFG_ADVERTISED_LISTENERS=CLIENT://kafka:9092,EXTERNAL://localhost:9093
- KAFKA_CFG_INTER_BROKER_LISTENER_NAME=CLIENT
depends_on:
- zookeeper
KAFKA_NODE_ID: 1
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092'
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_JMX_PORT: 9101
KAFKA_JMX_HOSTNAME: localhost
KAFKA_PROCESS_ROLES: 'broker,controller'
KAFKA_CONTROLLER_QUORUM_VOTERS: '1@broker:29093'
KAFKA_LISTENERS: 'PLAINTEXT://broker:29092,CONTROLLER://broker:29093,PLAINTEXT_HOST://0.0.0.0:9092'
KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
# Replace CLUSTER_ID with a unique base64 UUID using "bin/kafka-storage.sh random-uuid"
# See https://docs.confluent.io/kafka/operations-tools/kafka-tools.html#kafka-storage-sh
CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk'
rabbitmq:
image: docker.io/bitnami/rabbitmq:latest
image: docker.io/bitnami/rabbitmq:3.12
ports:
- '4369:4369'
- '5551:5551'
Expand Down Expand Up @@ -60,7 +65,7 @@ services:
volumes:
- "redis_data:/bitnami/redis/data"
localstack:
image: localstack/localstack:latest
image: localstack/localstack:3
ports:
- "127.0.0.1:4566:4566" # LocalStack Gateway
- "127.0.0.1:4510-4559:4510-4559" # external services port range
Expand All @@ -71,7 +76,7 @@ services:
- "${TMPDIR:-/tmp}/localstack:/var/lib/localstack"
- "/var/run/docker.sock:/var/run/docker.sock"
hivemq:
image: hivemq/hivemq4:latest
image: hivemq/hivemq4:4.26.0
restart: always
ports:
- target: 1883
Expand All @@ -87,7 +92,7 @@ services:
HIVEMQ_CONTROL_CENTER_PASSWORD: "123456"
HIVEMQ_CLUSTER_TRANSPORT_TYPE: "TCP"
mongo:
image: mongo:5
image: mongo:7
restart: always
ports:
- "27017:27017"
Expand Down
2 changes: 1 addition & 1 deletion examples/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ The client implements all Patron clients for the components used by the service.
First we need to start the dependencies of the example by running:

```bash
docker-compose -f examples/docker-compose.yml up -d
docker-compose -f docker-compose.yml up -d
```

Next we run the service:
Expand Down
Loading

0 comments on commit 9e81221

Please sign in to comment.