Skip to content

Commit

Permalink
Add LogEvents interceptor, update test schemas
Browse files Browse the repository at this point in the history
  • Loading branch information
jclausen committed Sep 25, 2024
1 parent 51552fb commit f9fc2a1
Show file tree
Hide file tree
Showing 19 changed files with 297 additions and 62 deletions.
1 change: 0 additions & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@ jobs:
env:
ENVIRONMENT: development
run: |
cd test-harness
box server start serverConfigFile="server-${{ matrix.cfengine }}.json" --noSaveSettings --debug
cd ../
curl http://127.0.0.1:60299
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
.vscode

.engine/**
.artifacts/**
.tmp/**

Expand Down
6 changes: 3 additions & 3 deletions .markdownlint.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
"no-multiple-blanks": {
"maximum": 2
},
"no-duplicate-header" : {
"siblings_only" : true
}
"no-duplicate-header" : false,
"no-duplicate-heading" : false,
"no-inline-html" : false
}
42 changes: 20 additions & 22 deletions ModuleConfig.cfc
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ component {
"applicationName" : getSystemSetting( "LOGSTASH_APPLICATION_NAME", server.coldfusion.productname eq "Lucee" ? getApplicationSettings().name : getApplicationMetadata().name ),
// Whether to enable the API endpoints to receive log messages
"enableAPI" : getSystemSetting( "LOGSTASH_ENABLE_API", true ),
// Whether to automatically enabled log appenders
// Whether to automatically enable the logstash logging appender
"enableAppenders" : getSystemSetting( "LOGSTASH_ENABLE_APPENDERS", true ),
// The type of transmission mode for this module - `direct` or `api`
"transmission" : getSystemSetting( "LOGSTASH_TRANSMISSION_METHOD", "direct" ),
Expand Down Expand Up @@ -68,7 +68,9 @@ component {
"indexPrefix" : getSystemSetting( "LOGSTASH_INDEX_PREFIX", "" ),
"migrateIndices" : getSystemSetting( "LOGSTASH_MIGRATE_V2", false ),
// Whether to throw an error when a log document fails to save
"throwOnError" : true
"throwOnError" : true,
// An array of detached appenders which can be used with the `writeToAppender` interception point or directly through the elasticsearch module AppenderService
"detachedAppenders" : []
};

// Try to look up the release based on a box.json
Expand All @@ -92,27 +94,15 @@ component {

interceptors = [
//API Security Interceptor
{ class="logstash.interceptors.APISecurity" }
{ class="logstash.interceptors.APISecurity" },
{ class="logstash.interceptors.LogEvents" }
];

if( settings.enableAPI ){
routes = [
// Module Entry Point
{
pattern = "/api/logs",
handler = "API",
action = {
"HEAD" : "onInvalidHTTPMethod",
"OPTIONS" : "onInvalidHTTPMethod",
"GET" : "onInvalidHTTPMethod",
"POST" : "create",
"DELETE" : "onInvalidHTTPMethod",
"PUT" : "create",
"PATCH" : "onInvalidHTTPMethod"
}
}
];
}
interceptorSettings = {
customInterceptionPoints : [
"writeToAppender"
]
};

}

Expand Down Expand Up @@ -154,9 +144,17 @@ component {
// Register the appender with the root loggger, and turn the logger on.
var root = logBox.getRootLogger();
root.addAppender( appenders[ 'logstash_appender' ] );

}

settings.detachedAppenders.each( ( appender) => {
wirebox.getInstance( "AppenderService@cbelasticsearch" )
.createDetachedAppender(
appender.name,
appender.properties ?: {},
appender.class ?: "cbelasticsearch.models.logging.LogstashAppender"
);
} );

// If the api
if( settings.enableAPI ){
binder.map( "EventAppender@logstash" )
Expand Down
15 changes: 14 additions & 1 deletion build/docs/Configuration.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,20 @@ moduleSettings = {
"indexPrefix" : getSystemSetting( "LOGSTASH_INDEX_PREFIX", "" ),
"migrateIndices" : getSystemSetting( "LOGSTASH_MIGRATE_V2", false ),
// Whether to throw an error when a log document fails to save
"throwOnError" : true
"throwOnError" : true,
// An array of detached appenders which can be used with the `writeToAppender` interception point or directly through the elasticsearch module AppenderService
"detachedAppenders" : [
{
"name" : "myCustomAppender",
"properties" : {
"retentionDays" : 1,
// The name of the application which will be transmitted with the log data and used for grouping
"applicationName" : "Custom Detached Appender Logs",
// The max shard size at which the hot phase will rollover data
"rolloverSize" : "1gb"
}
}
]
}
}
```
Expand Down
19 changes: 19 additions & 0 deletions build/docs/Logging.md
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,25 @@ Fields typed `keyword` are not searchable, but are exact match fields. This all

_Note: There are two timestamp fields which contain the same data: `timestamp` and `@timestamp`. The latter is simply provided for easy automation with the default configuration for Logstash logs in Kibana. [Read more on the ELK stack here](https://www.elastic.co/what-is/elk-stack)._

## Detached Appender Logging

If you [configure the `detachedAppenders` array](https://logstash.ortusbooks.com/getting-started/configuration) in your module config, you can log ad-hoc log messages directly to those data streams in Elasticsearch. You may either [log through the `AppenderService` directly](https://cbelasticsearch.ortusbooks.com/logging) or the Logstash module provides a convenient interception point to log directly to your detached appenders.

```java
announce(
"writeToAppender",
{
// The detached appender name
"name" : "myCustomAppender",
"level" : "info",
"message" : "Here is my custom log message",
"extraInfo" : {
"foo" : "bar"
}
}
);
```

## API Usage

You may transmit any data directly to the logstash API, as long as it follows the mapped schema above. You may even specify a name of the index prefix to be used in the transmission ( the actual index name will have the rotational appender timestamps applied, according to your configured rotation frequency). This provides you flexibility in storing additional log files, which may or may not be from your CFML application.
Expand Down
33 changes: 32 additions & 1 deletion changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,64 +6,95 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

----

## [Unreleased]

### Changed

* Remove JSONToRC module dependency as Coldbox handles this by default

## [3.0.4] => 2023-11-19

### Changed

* Bump cbElasticsearch version to 3.2

## [3.0.3] => 2023-06-01

### Changed

* Bump cbElasticsearch version to 3.1

## [3.0.2] => 2023-03-28

### Changed

* Bump cbElasticsearch version to 3.0.2

## [3.0.1] => 2023-03-12

### Changed

* Bump cbElasticsearch version to 3.0.1

## [3.0.0] => 2023-03-03

### Changed

* Updates `cbElasticsearch` dependency for v3
* Module settings and environment variable changes to support v3 of Logstash appender

## [2.0.1] => 2023-01-30

### Fixed

* Revert to old log index pattern until data stream support can be implemented

## [2.0.0] => 2023-01-29

### Changed

* Bump cbElasticsearch version minimum to 2.4.0
* Bump Coldbox to v6
* Changes default log index pattern to `logs-` to reflect v8 changes in Kibana/Logstash defaults

## [1.2.1] => 2022-09-21

### Added

* Added support for `LOGSTASH_APPLICATION_NAME` environment variable

### Changed

* Changed build process to use Github Actions
* Migrated README content to GitBook

## [1.2.0] => 2022-08-12

### Fixed

* Ensured SSL protocol on download location of package

## [1.1.1] => 2020-12-10

### Fixed

* Remove build artifact from final package

## [1.1.0] => 2020-11-03

### Added

* Added additional settings for number of index shards and replicas

### Changed

* Modifies appender preflight to use base appender preflight
* Modifies default logstash index prefix to use Kibana/ES conventions
* Bumps `cbElasticsearch` dependency to `v2.1.0`

## [1.0.0] => 2020-09-11

### Added
* Initial release of module

* Initial release of module
21 changes: 21 additions & 0 deletions config/Router.cfc
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
component {

function configure(){
var apiEnabled = controller.getModuleSettings( "logstash", "apiEnabled", true );

if( apiEnabled ){
route( "/api/logs" )
.withAction( {
"HEAD" : "onInvalidHTTPMethod",
"OPTIONS" : "onInvalidHTTPMethod",
"GET" : "onInvalidHTTPMethod",
"POST" : "create",
"DELETE" : "onInvalidHTTPMethod",
"PUT" : "create",
"PATCH" : "onInvalidHTTPMethod"
} )
.toHandler( "API" );
}
}

}
25 changes: 25 additions & 0 deletions interceptors/LogEvents.cfc
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
component {

property name="appenderService" inject="AppenderService@cbelasticsearch";


function writeToAppender( event, rc, prc, interceptData ){
if( !interceptData.keyExists( "appender" ) ){
throw( type="InvalidArgument", message="The 'appender' key is required in the intercept data. Could not continue." );
} else if( !interceptData.keyExists( "message" ) ){
throw( type="InvalidArgument", message="The 'message' key is required in the intercept data. Could not continue." );
}

param interceptData.extraInfo = {};
param interceptData.severity = "INFO";

appenderService.logToAppender(
interceptData.appender,
interceptData.message,
interceptData.severity,
interceptData.extraInfo
);

}

}
5 changes: 3 additions & 2 deletions test-harness/[email protected][email protected]
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
},
"rewrites":{
"enable":"true"
}
},
"webroot":"test-harness"
},
"openBrowser":"false"
}
}
15 changes: 8 additions & 7 deletions test-harness/[email protected][email protected]
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"name":"logstash-adobe@2018",
"name":"logstash-adobe@2021",
"app":{
"serverHomeDirectory":".engine/adobe2021",
"cfengine":"adobe@2021"
Expand All @@ -11,13 +11,14 @@
"rewrites":{
"enable":"true"
},
"webroot":"test-harness",
"aliases":{
"/moduleroot/stachebox/includes":"../includes",
"/stachebox/includes":"../includes"
"/moduleroot/logstash":"../",
"/root":"./test-harness"
}
},
"openBrowser":"false",
"scripts" : {
"onServerInstall" : "cfpm install zip,mysql,document,feed"
}
}
"scripts":{
"onServerInstall":"cfpm install zip,debugger"
}
}
11 changes: 6 additions & 5 deletions test-harness/[email protected][email protected]
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name":"logstash-adobe@2023",
"app":{
"serverHomeDirectory":"./.engine/adobe2023",
"serverHomeDirectory":".engine/adobe2023",
"cfengine":"adobe@2023"
},
"web":{
Expand All @@ -11,13 +11,14 @@
"rewrites":{
"enable":"true"
},
"webroot":"test-harness",
"aliases":{
"/moduleroot/stachebox/includes":"../includes",
"/stachebox/includes":"../includes"
"/moduleroot/logstash":"../",
"/root":"./test-harness"
}
},
"openBrowser":"false",
"scripts":{
"onServerInstall":"cfpm install zip,document,feed"
"onServerInstall":"cfpm install zip,debugger"
}
}
}
7 changes: 6 additions & 1 deletion test-harness/[email protected][email protected]
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,12 @@
},
"rewrites":{
"enable":"true"
},
"webroot":"test-harness",
"aliases":{
"/moduleroot/logstash":"./",
"/root":"./test-harness"
}
},
"openBrowser":"false"
}
}
Loading

0 comments on commit f9fc2a1

Please sign in to comment.