Skip to content

Commit

Permalink
Add testing steps
Browse files Browse the repository at this point in the history
  • Loading branch information
8naama authored Sep 22, 2024
2 parents 38626d9 + 6e08ab4 commit 724b855
Show file tree
Hide file tree
Showing 3 changed files with 85 additions and 0 deletions.
1 change: 1 addition & 0 deletions .github/workflows/create-release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ on:
- 'README.md'
- 'version'
- 'CONTRIBUTING.md'
- 'Makefile'

jobs:
update_version:
Expand Down
55 changes: 55 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
############################################################
# This Makefile generates binary files for testing purposes.
############################################################

# Initialize paths
CURR_PATH := $(CURDIR)
ASSETS_PATH := $(CURR_PATH)/assets
TMP_PATH := $(CURR_PATH)/tmp

# Starting point
.PHONY: all
all: create_assets windows linux mac

# Create the assets directory
.PHONY: create_assets
create_assets:
mkdir -p $(ASSETS_PATH)

# Build Windows binaries
.PHONY: windows
windows: create_assets
mkdir -p $(TMP_PATH)
cd $(TMP_PATH) && \
cp -r $(CURR_PATH)/scripts/windows/. $(CURR_PATH)/version . && \
zip -r $(ASSETS_PATH)/agent_windows.zip . && \
rm -rf $(TMP_PATH)/*
cd $(TMP_PATH) && \
cp -r $(CURR_PATH)/datasources/windows/. $(CURR_PATH)/resources . && \
zip -r $(ASSETS_PATH)/windows_kubernetes_aks.zip kubernetes/aks resources && \
zip -r $(ASSETS_PATH)/windows_kubernetes_eks.zip kubernetes/eks resources && \
zip -r $(ASSETS_PATH)/windows_kubernetes_gke.zip kubernetes/gke resources && \
zip -r $(ASSETS_PATH)/windows_kubernetes_digitalocean.zip kubernetes/digitalocean resources && \
zip -r $(ASSETS_PATH)/windows_localhost_windows.zip localhost/windows resources
rm -rf $(TMP_PATH)

# Build Linux binaries
.PHONY: linux
linux: create_assets
tar -czvf $(ASSETS_PATH)/agent_linux.tar.gz -C scripts/linux . -C $(CURR_PATH) version
tar -czvf $(ASSETS_PATH)/linux_kubernetes_aks.tar.gz -C datasources/linux kubernetes/aks -C $(CURR_PATH) resources -C $(CURR_PATH) resources-linux
tar -czvf $(ASSETS_PATH)/linux_kubernetes_eks.tar.gz -C datasources/linux kubernetes/eks -C $(CURR_PATH) resources -C $(CURR_PATH) resources-linux
tar -czvf $(ASSETS_PATH)/linux_kubernetes_gke.tar.gz -C datasources/linux kubernetes/gke -C $(CURR_PATH) resources -C $(CURR_PATH) resources-linux
tar -czvf $(ASSETS_PATH)/linux_kubernetes_digitalocean.tar.gz -C datasources/linux kubernetes/digitalocean -C $(CURR_PATH) resources -C $(CURR_PATH) resources-linux
tar -czvf $(ASSETS_PATH)/linux_aws_ec2.tar.gz -C datasources/linux aws/ec2 -C $(CURR_PATH) resources -C $(CURR_PATH) resources-linux
tar -czvf $(ASSETS_PATH)/linux_localhost_linux.tar.gz -C datasources/linux localhost/linux -C $(CURR_PATH) resources -C $(CURR_PATH) resources-linux

# Build Mac binaries
.PHONY: mac
mac: create_assets
tar -czvf $(ASSETS_PATH)/agent_mac.tar.gz -C scripts/mac . -C $(CURR_PATH) version
tar -czvf $(ASSETS_PATH)/mac_kubernetes_aks.tar.gz -C datasources/mac kubernetes/aks -C $(CURR_PATH) resources -C $(CURR_PATH) resources-mac
tar -czvf $(ASSETS_PATH)/mac_kubernetes_eks.tar.gz -C datasources/mac kubernetes/eks -C $(CURR_PATH) resources -C $(CURR_PATH) resources-mac
tar -czvf $(ASSETS_PATH)/mac_kubernetes_gke.tar.gz -C datasources/mac kubernetes/gke -C $(CURR_PATH) resources -C $(CURR_PATH) resources-mac
tar -czvf $(ASSETS_PATH)/mac_kubernetes_digitalocean.tar.gz -C datasources/mac kubernetes/digitalocean -C $(CURR_PATH) resources -C $(CURR_PATH) resources-mac
tar -czvf $(ASSETS_PATH)/mac_localhost_mac.tar.gz -C datasources/mac localhost/mac -C $(CURR_PATH) resources -C $(CURR_PATH) resources-mac
29 changes: 29 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
- [Linux](#linux-1)
- [MacOS](#macos-1)
- [Windows](#windows-1)
- [Test and Build locally](#test-and-build-locally)

This repo contains all the scripts needed to ship logs, metrics and traces to Logz.io for supported datasources using the Logz.io agent.

Expand Down Expand Up @@ -148,3 +149,31 @@ bash <(curl -sSL https://github.com/logzio/logzio-agent-manifest/releases/downlo
powershell { iex “& { $(irm https://github.com/logzio/logzio-agent-manifest/releases/download/latest/run_prerequisites_windows.ps1) } --path=LOGZIO_REPO_DATASOURCE_PATH” }
```

## Test and Build locally
If you're contributing code to this repository, please refer to the [Contribution Guidelines](CONTRIBUTING.md) for more details on the process.

Before submitting a Pull Request (PR), ensure that you test your changes locally by following these steps:
1. **Generate binary files:**

Run the following command in the project root folder to generate the necessary binary files in the `assets` folder.
```shell
make
```

2. **Create an Agent:**

Log in to your Logz.io account and create an Agent using the relevant integration that you want to test. This will provide you with a command similar to the following:
```shell
sudo mkdir -p /opt/logzio-agent; sudo chown -R $USER: /opt/logzio-agent; mkdir -p /tmp/logzio; curl -fsSL 'https://github.com/logzio/logzio-agent-manifest/releases/latest/download/agent_mac.tar.gz' -o /tmp/logzio/agent_mac.tar.gz; tar -zxf /tmp/logzio/agent_mac.tar.gz --directory /tmp/logzio; bash /tmp/logzio/agent.bash --url=https://app.logz.io --id=<<SOME_ID>>
```
3. **Update the command:**
Replace the `curl -fsSL` command with `cp <<LOCAL_PATH_TO_FILE>> /tmp/logzio/` where `<<LOCAL_PATH_TO_FILE>>` is the binary generated at step [1]. Example:
```shell
sudo mkdir -p /opt/logzio-agent; sudo chown -R $USER: /opt/logzio-agent; mkdir -p /tmp/logzio; cp <<LOCAL_PATH_TO_FILE>> /tmp/logzio/; tar -zxf /tmp/logzio/agent_mac.tar.gz --directory /tmp/logzio; bash /tmp/logzio/agent.bash --url=https://app.logz.io --id=<<SOME_ID>>
```
4. **Test your changes:**
Run the modified command locally to test your changes and ensure everything works as expected.

0 comments on commit 724b855

Please sign in to comment.