From f9fc2a1df6830045c04e162f8cbec212aaca20b9 Mon Sep 17 00:00:00 2001 From: jclausen Date: Wed, 25 Sep 2024 12:49:40 -0400 Subject: [PATCH] Add LogEvents interceptor, update test schemas --- .github/workflows/tests.yml | 1 - .gitignore | 1 + .markdownlint.json | 6 +- ModuleConfig.cfc | 42 +++++----- build/docs/Configuration.md | 15 +++- build/docs/Logging.md | 19 +++++ changelog.md | 33 +++++++- config/Router.cfc | 21 +++++ interceptors/LogEvents.cfc | 25 ++++++ ...-adobe@2018.json => server-adobe@2018.json | 5 +- ...-adobe@2021.json => server-adobe@2021.json | 15 ++-- ...-adobe@2023.json => server-adobe@2023.json | 11 +-- ...server-lucee@5.json => server-lucee@5.json | 7 +- server-lucee@6.json | 21 +++++ test-harness/box.json | 9 +- test-harness/tests/Application.cfc | 27 ++++-- .../tests/specs/integration/APITest.cfc | 13 ++- .../tests/specs/interceptor/LogEventsTest.cfc | 84 +++++++++++++++++++ .../tests/specs/unit/APIAppenderTest.cfc | 4 +- 19 files changed, 297 insertions(+), 62 deletions(-) create mode 100644 config/Router.cfc create mode 100644 interceptors/LogEvents.cfc rename test-harness/server-adobe@2018.json => server-adobe@2018.json (86%) rename test-harness/server-adobe@2021.json => server-adobe@2021.json (56%) rename test-harness/server-adobe@2023.json => server-adobe@2023.json (57%) rename test-harness/server-lucee@5.json => server-lucee@5.json (66%) create mode 100644 server-lucee@6.json create mode 100644 test-harness/tests/specs/interceptor/LogEventsTest.cfc diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index ce06f1f..541334a 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -64,7 +64,6 @@ jobs: env: ENVIRONMENT: development run: | - cd test-harness box server start serverConfigFile="server-${{ matrix.cfengine }}.json" --noSaveSettings --debug cd ../ curl http://127.0.0.1:60299 diff --git a/.gitignore b/.gitignore index d1a7b2d..15e6bb0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ .vscode +.engine/** .artifacts/** .tmp/** diff --git a/.markdownlint.json b/.markdownlint.json index 31705fa..36204cd 100644 --- a/.markdownlint.json +++ b/.markdownlint.json @@ -8,7 +8,7 @@ "no-multiple-blanks": { "maximum": 2 }, - "no-duplicate-header" : { - "siblings_only" : true - } + "no-duplicate-header" : false, + "no-duplicate-heading" : false, + "no-inline-html" : false } \ No newline at end of file diff --git a/ModuleConfig.cfc b/ModuleConfig.cfc index 7250eb9..956b6a0 100644 --- a/ModuleConfig.cfc +++ b/ModuleConfig.cfc @@ -33,7 +33,7 @@ component { "applicationName" : getSystemSetting( "LOGSTASH_APPLICATION_NAME", server.coldfusion.productname eq "Lucee" ? getApplicationSettings().name : getApplicationMetadata().name ), // Whether to enable the API endpoints to receive log messages "enableAPI" : getSystemSetting( "LOGSTASH_ENABLE_API", true ), - // Whether to automatically enabled log appenders + // Whether to automatically enable the logstash logging appender "enableAppenders" : getSystemSetting( "LOGSTASH_ENABLE_APPENDERS", true ), // The type of transmission mode for this module - `direct` or `api` "transmission" : getSystemSetting( "LOGSTASH_TRANSMISSION_METHOD", "direct" ), @@ -68,7 +68,9 @@ component { "indexPrefix" : getSystemSetting( "LOGSTASH_INDEX_PREFIX", "" ), "migrateIndices" : getSystemSetting( "LOGSTASH_MIGRATE_V2", false ), // Whether to throw an error when a log document fails to save - "throwOnError" : true + "throwOnError" : true, + // An array of detached appenders which can be used with the `writeToAppender` interception point or directly through the elasticsearch module AppenderService + "detachedAppenders" : [] }; // Try to look up the release based on a box.json @@ -92,27 +94,15 @@ component { interceptors = [ //API Security Interceptor - { class="logstash.interceptors.APISecurity" } + { class="logstash.interceptors.APISecurity" }, + { class="logstash.interceptors.LogEvents" } ]; - if( settings.enableAPI ){ - routes = [ - // Module Entry Point - { - pattern = "/api/logs", - handler = "API", - action = { - "HEAD" : "onInvalidHTTPMethod", - "OPTIONS" : "onInvalidHTTPMethod", - "GET" : "onInvalidHTTPMethod", - "POST" : "create", - "DELETE" : "onInvalidHTTPMethod", - "PUT" : "create", - "PATCH" : "onInvalidHTTPMethod" - } - } - ]; - } + interceptorSettings = { + customInterceptionPoints : [ + "writeToAppender" + ] + }; } @@ -154,9 +144,17 @@ component { // Register the appender with the root loggger, and turn the logger on. var root = logBox.getRootLogger(); root.addAppender( appenders[ 'logstash_appender' ] ); - } + settings.detachedAppenders.each( ( appender) => { + wirebox.getInstance( "AppenderService@cbelasticsearch" ) + .createDetachedAppender( + appender.name, + appender.properties ?: {}, + appender.class ?: "cbelasticsearch.models.logging.LogstashAppender" + ); + } ); + // If the api if( settings.enableAPI ){ binder.map( "EventAppender@logstash" ) diff --git a/build/docs/Configuration.md b/build/docs/Configuration.md index 9da0439..a5fff24 100644 --- a/build/docs/Configuration.md +++ b/build/docs/Configuration.md @@ -56,7 +56,20 @@ moduleSettings = { "indexPrefix" : getSystemSetting( "LOGSTASH_INDEX_PREFIX", "" ), "migrateIndices" : getSystemSetting( "LOGSTASH_MIGRATE_V2", false ), // Whether to throw an error when a log document fails to save - "throwOnError" : true + "throwOnError" : true, + // An array of detached appenders which can be used with the `writeToAppender` interception point or directly through the elasticsearch module AppenderService + "detachedAppenders" : [ + { + "name" : "myCustomAppender", + "properties" : { + "retentionDays" : 1, + // The name of the application which will be transmitted with the log data and used for grouping + "applicationName" : "Custom Detached Appender Logs", + // The max shard size at which the hot phase will rollover data + "rolloverSize" : "1gb" + } + } + ] } } ``` diff --git a/build/docs/Logging.md b/build/docs/Logging.md index c033c0e..721d95a 100644 --- a/build/docs/Logging.md +++ b/build/docs/Logging.md @@ -101,6 +101,25 @@ Fields typed `keyword` are not searchable, but are exact match fields. This all _Note: There are two timestamp fields which contain the same data: `timestamp` and `@timestamp`. The latter is simply provided for easy automation with the default configuration for Logstash logs in Kibana. [Read more on the ELK stack here](https://www.elastic.co/what-is/elk-stack)._ +## Detached Appender Logging + +If you [configure the `detachedAppenders` array](https://logstash.ortusbooks.com/getting-started/configuration) in your module config, you can log ad-hoc log messages directly to those data streams in Elasticsearch. You may either [log through the `AppenderService` directly](https://cbelasticsearch.ortusbooks.com/logging) or the Logstash module provides a convenient interception point to log directly to your detached appenders. + +```java +announce( + "writeToAppender", + { + // The detached appender name + "name" : "myCustomAppender", + "level" : "info", + "message" : "Here is my custom log message", + "extraInfo" : { + "foo" : "bar" + } + } +); +``` + ## API Usage You may transmit any data directly to the logstash API, as long as it follows the mapped schema above. You may even specify a name of the index prefix to be used in the transmission ( the actual index name will have the rotational appender timestamps applied, according to your configured rotation frequency). This provides you flexibility in storing additional log files, which may or may not be from your CFML application. diff --git a/changelog.md b/changelog.md index 6435e23..6cebaa9 100644 --- a/changelog.md +++ b/changelog.md @@ -6,59 +6,89 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ---- + ## [Unreleased] + ### Changed + * Remove JSONToRC module dependency as Coldbox handles this by default ## [3.0.4] => 2023-11-19 + ### Changed + * Bump cbElasticsearch version to 3.2 + ## [3.0.3] => 2023-06-01 + ### Changed + * Bump cbElasticsearch version to 3.1 ## [3.0.2] => 2023-03-28 + ### Changed + * Bump cbElasticsearch version to 3.0.2 ## [3.0.1] => 2023-03-12 + ### Changed + * Bump cbElasticsearch version to 3.0.1 ## [3.0.0] => 2023-03-03 + ### Changed + * Updates `cbElasticsearch` dependency for v3 * Module settings and environment variable changes to support v3 of Logstash appender ## [2.0.1] => 2023-01-30 ### Fixed + * Revert to old log index pattern until data stream support can be implemented ## [2.0.0] => 2023-01-29 + ### Changed + * Bump cbElasticsearch version minimum to 2.4.0 * Bump Coldbox to v6 * Changes default log index pattern to `logs-` to reflect v8 changes in Kibana/Logstash defaults + ## [1.2.1] => 2022-09-21 + ### Added + * Added support for `LOGSTASH_APPLICATION_NAME` environment variable + ### Changed + * Changed build process to use Github Actions * Migrated README content to GitBook + ## [1.2.0] => 2022-08-12 + ### Fixed + * Ensured SSL protocol on download location of package ## [1.1.1] => 2020-12-10 + ### Fixed + * Remove build artifact from final package ## [1.1.0] => 2020-11-03 + ### Added + * Added additional settings for number of index shards and replicas ### Changed + * Modifies appender preflight to use base appender preflight * Modifies default logstash index prefix to use Kibana/ES conventions * Bumps `cbElasticsearch` dependency to `v2.1.0` @@ -66,4 +96,5 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [1.0.0] => 2020-09-11 ### Added -* Initial release of module \ No newline at end of file + +* Initial release of module diff --git a/config/Router.cfc b/config/Router.cfc new file mode 100644 index 0000000..30e8993 --- /dev/null +++ b/config/Router.cfc @@ -0,0 +1,21 @@ +component { + + function configure(){ + var apiEnabled = controller.getModuleSettings( "logstash", "apiEnabled", true ); + + if( apiEnabled ){ + route( "/api/logs" ) + .withAction( { + "HEAD" : "onInvalidHTTPMethod", + "OPTIONS" : "onInvalidHTTPMethod", + "GET" : "onInvalidHTTPMethod", + "POST" : "create", + "DELETE" : "onInvalidHTTPMethod", + "PUT" : "create", + "PATCH" : "onInvalidHTTPMethod" + } ) + .toHandler( "API" ); + } + } + +} diff --git a/interceptors/LogEvents.cfc b/interceptors/LogEvents.cfc new file mode 100644 index 0000000..4824e77 --- /dev/null +++ b/interceptors/LogEvents.cfc @@ -0,0 +1,25 @@ +component { + + property name="appenderService" inject="AppenderService@cbelasticsearch"; + + + function writeToAppender( event, rc, prc, interceptData ){ + if( !interceptData.keyExists( "appender" ) ){ + throw( type="InvalidArgument", message="The 'appender' key is required in the intercept data. Could not continue." ); + } else if( !interceptData.keyExists( "message" ) ){ + throw( type="InvalidArgument", message="The 'message' key is required in the intercept data. Could not continue." ); + } + + param interceptData.extraInfo = {}; + param interceptData.severity = "INFO"; + + appenderService.logToAppender( + interceptData.appender, + interceptData.message, + interceptData.severity, + interceptData.extraInfo + ); + + } + +} \ No newline at end of file diff --git a/test-harness/server-adobe@2018.json b/server-adobe@2018.json similarity index 86% rename from test-harness/server-adobe@2018.json rename to server-adobe@2018.json index 4a129fb..4a89b92 100644 --- a/test-harness/server-adobe@2018.json +++ b/server-adobe@2018.json @@ -10,7 +10,8 @@ }, "rewrites":{ "enable":"true" - } + }, + "webroot":"test-harness" }, "openBrowser":"false" -} \ No newline at end of file +} diff --git a/test-harness/server-adobe@2021.json b/server-adobe@2021.json similarity index 56% rename from test-harness/server-adobe@2021.json rename to server-adobe@2021.json index 4afd25b..20cc13c 100644 --- a/test-harness/server-adobe@2021.json +++ b/server-adobe@2021.json @@ -1,5 +1,5 @@ { - "name":"logstash-adobe@2018", + "name":"logstash-adobe@2021", "app":{ "serverHomeDirectory":".engine/adobe2021", "cfengine":"adobe@2021" @@ -11,13 +11,14 @@ "rewrites":{ "enable":"true" }, + "webroot":"test-harness", "aliases":{ - "/moduleroot/stachebox/includes":"../includes", - "/stachebox/includes":"../includes" + "/moduleroot/logstash":"../", + "/root":"./test-harness" } }, "openBrowser":"false", - "scripts" : { - "onServerInstall" : "cfpm install zip,mysql,document,feed" - } -} \ No newline at end of file + "scripts":{ + "onServerInstall":"cfpm install zip,debugger" + } +} diff --git a/test-harness/server-adobe@2023.json b/server-adobe@2023.json similarity index 57% rename from test-harness/server-adobe@2023.json rename to server-adobe@2023.json index 914e0bd..a2e0af7 100644 --- a/test-harness/server-adobe@2023.json +++ b/server-adobe@2023.json @@ -1,7 +1,7 @@ { "name":"logstash-adobe@2023", "app":{ - "serverHomeDirectory":"./.engine/adobe2023", + "serverHomeDirectory":".engine/adobe2023", "cfengine":"adobe@2023" }, "web":{ @@ -11,13 +11,14 @@ "rewrites":{ "enable":"true" }, + "webroot":"test-harness", "aliases":{ - "/moduleroot/stachebox/includes":"../includes", - "/stachebox/includes":"../includes" + "/moduleroot/logstash":"../", + "/root":"./test-harness" } }, "openBrowser":"false", "scripts":{ - "onServerInstall":"cfpm install zip,document,feed" + "onServerInstall":"cfpm install zip,debugger" } -} \ No newline at end of file +} diff --git a/test-harness/server-lucee@5.json b/server-lucee@5.json similarity index 66% rename from test-harness/server-lucee@5.json rename to server-lucee@5.json index 94ff70d..936388b 100644 --- a/test-harness/server-lucee@5.json +++ b/server-lucee@5.json @@ -10,7 +10,12 @@ }, "rewrites":{ "enable":"true" + }, + "webroot":"test-harness", + "aliases":{ + "/moduleroot/logstash":"./", + "/root":"./test-harness" } }, "openBrowser":"false" -} \ No newline at end of file +} diff --git a/server-lucee@6.json b/server-lucee@6.json new file mode 100644 index 0000000..79eccc8 --- /dev/null +++ b/server-lucee@6.json @@ -0,0 +1,21 @@ +{ + "name":"logstash-lucee@6", + "app":{ + "serverHomeDirectory":".engine/lucee6", + "cfengine":"lucee@be" + }, + "web":{ + "http":{ + "port":"60299" + }, + "rewrites":{ + "enable":"true" + }, + "webroot":"test-harness", + "aliases":{ + "/moduleroot/logstash":"./", + "/root":"./test-harness" + } + }, + "openBrowser":"false" +} diff --git a/test-harness/box.json b/test-harness/box.json index 4288fc4..29cc3ab 100644 --- a/test-harness/box.json +++ b/test-harness/box.json @@ -4,18 +4,19 @@ "slug":"tester", "private":true, "description":"", - "dependencies":{ - }, + "dependencies":{}, "devDependencies":{ "testbox":"*", - "coldbox":"^6" + "coldbox":"7", + "route-visualizer":"^2.2.0+1" }, "installPaths":{ "coldbox":"coldbox/", "testbox":"testbox/", "cbelasticsearch":"modules/cbelasticsearch/", "cbrestbasehandler":"modules/cbrestbasehandler/", - "JSONToRC":"modules/JSONToRC/" + "JSONToRC":"modules/JSONToRC/", + "route-visualizer":"modules/route-visualizer/" }, "testbox":{ "runner":"http://localhost:60299/tests/runner.cfm" diff --git a/test-harness/tests/Application.cfc b/test-harness/tests/Application.cfc index b273c55..28c63af 100644 --- a/test-harness/tests/Application.cfc +++ b/test-harness/tests/Application.cfc @@ -27,18 +27,35 @@ component { moduleRootPath = reReplaceNoCase( this.mappings[ "/root" ], "#request.module_name#(\\|/)test-harness(\\|/)", "" ); this.mappings[ "/moduleroot" ] = moduleRootPath; this.mappings[ "/#request.MODULE_NAME#" ] = moduleRootPath & "#request.MODULE_NAME#"; - this.mappings[ "/cbelasticsearch" ] = this.mappings[ "/root" ] & "modules/cbelasticsearch"; + this.mappings[ "/cbelasticsearch" ] = this.mappings[ "/#request.MODULE_NAME#" ] & "/modules/cbelasticsearch"; this.mappings[ "/hyper" ] = this.mappings[ "/cbelasticsearch" ] & "modules/hyper"; // request start - public boolean function onRequestStart( String targetPage ){ + function onRequestStart( required targetPage ){ + // Set a high timeout for long running tests + setting requestTimeout ="9999"; + // New ColdBox Virtual Application Starter + request.coldBoxVirtualApp= new coldbox.system.testing.VirtualApp( appMapping = "/root" ); + + // ORM Reload for fresh results + if ( structKeyExists( url, "fwreinit" ) ) { + if ( structKeyExists( server, "lucee" ) ) { + pagePoolClear(); + } + request.coldBoxVirtualApp.shutdown(); + } + + // If hitting the runner or specs, prep our virtual app + if ( getBaseTemplatePath().replace( expandPath( "/tests" ), "" ).reFindNoCase( "(runner|specs)" ) ) { + request.coldBoxVirtualApp.startup(); + } + return true; } - function onRequestEnd(){ - structDelete( application, "wirebox" ); - structDelete( application, "cbController" ); + public function onRequestEnd(){ + request.coldBoxVirtualApp.shutdown(); } } \ No newline at end of file diff --git a/test-harness/tests/specs/integration/APITest.cfc b/test-harness/tests/specs/integration/APITest.cfc index 4b78057..27cd4cd 100755 --- a/test-harness/tests/specs/integration/APITest.cfc +++ b/test-harness/tests/specs/integration/APITest.cfc @@ -47,8 +47,7 @@ component extends="coldbox.system.testing.BaseTestCase"{ "stacktrace" : errorEntry.stacktrace, "extrainfo" : errorEntry.stacktrace, "snapshot" : {}, - "userinfo" : serializeJSON( { "username" : "tester" } ), - "event" : serializeJSON( { "foo" : "bar" } ) + "event" : { "foo" : "bar" } }; } @@ -82,7 +81,7 @@ component extends="coldbox.system.testing.BaseTestCase"{ var event = execute( route="/logstash/api/logs", - eventArgs=testEvent, + eventArguments=testEvent, renderResults=false ); @@ -102,7 +101,7 @@ component extends="coldbox.system.testing.BaseTestCase"{ var event = execute( route="/logstash/api/logs", - eventArgs=testEvent, + eventArguments=testEvent, renderResults=false ); @@ -121,7 +120,7 @@ component extends="coldbox.system.testing.BaseTestCase"{ var event = execute( route="/logstash/api/logs", - eventArgs=testEvent, + eventArguments=testEvent, renderResults=false ); @@ -138,10 +137,9 @@ component extends="coldbox.system.testing.BaseTestCase"{ var testEvent = newEventArgs( "POST" ); testEvent.rc.entry = logEntry; - var event = execute( route="/logstash/api/logs", - eventArgs=testEvent, + eventArguments=testEvent, renderResults=false ); @@ -153,7 +151,6 @@ component extends="coldbox.system.testing.BaseTestCase"{ expect( prc.response.getData() ).toBeStruct() .toHaveKey( "accepted" ); - } ); diff --git a/test-harness/tests/specs/interceptor/LogEventsTest.cfc b/test-harness/tests/specs/interceptor/LogEventsTest.cfc new file mode 100644 index 0000000..de2ed31 --- /dev/null +++ b/test-harness/tests/specs/interceptor/LogEventsTest.cfc @@ -0,0 +1,84 @@ +component extends="coldbox.system.testing.BaseTestCase" { + + function beforeAll(){ + this.loadColdbox = true; + super.beforeAll(); + setup(); + variables.esClient = getWirebox().getInstance( "Client@cbelasticsearch" ); + variables.interceptor = new logstash.interceptors.LogEvents(); + getWirebox().autowire( variables.interceptor ); + + variables.appenderName = "logstashLogEventsTest"; + variables.appenderService = getWirebox().getInstance( "AppenderService@cbelasticsearch" ); + appenderService.createDetachedAppender( + appenderName, + { + // The data stream name to use for this appenders logs + "dataStreamPattern" : "logs-coldbox-#lcase( appenderName )#*", + "dataStream" : "logs-coldbox-#lcase( appenderName )#", + "ILMPolicyName" : "cbelasticsearch-logs-#lcase( appenderName )#", + "indexTemplateName" : "cbelasticsearch-logs-#lcase( appenderName )#", + "componentTemplateName" : "cbelasticsearch-logs-#lcase( appenderName )#", + "pipelineName" : "cbelasticsearch-logs-#lcase( appenderName )#", + "indexTemplatePriority" : 151, + "retentionDays" : 1, + // The name of the application which will be transmitted with the log data and used for grouping + "applicationName" : "Logstash Detached Interception Appender Logs", + // The max shard size at which the hot phase will rollover data + "rolloverSize" : "1gb" + } + ); + + } + + function afterAll(){ + var appender = appenderService.getAppender( variables.appenderName ); + if( !isNull( appender ) ){ + if( esClient.dataStreamExists( appender.getProperty( "dataStream" ) ) ){ + esClient.deleteDataStream( appender.getProperty( "dataStream" ) ); + } + if( esClient.indexTemplateExists( appender.getProperty( "indexTemplateName" ) ) ){ + esClient.deleteIndexTemplate( appender.getProperty( "indexTemplateName" ) ); + } + + if( esClient.componentTemplateExists( appender.getProperty( "componentTemplateName" ) ) ){ + esClient.deleteComponentTemplate( appender.getProperty( "componentTemplateName" ) ); + } + + if( esClient.ILMPolicyExists( appender.getProperty( "ILMPolicyName" ) ) ){ + esClient.deleteILMPolicy( appender.getProperty( "ILMPolicyName" ) ); + } + } + + super.afterAll(); + } + + function run(){ + describe( "Perform actions on detached appender", function(){ + it( "Tests the ability to log a message through the interception point", function(){ + var appender = appenderService.getAppender( variables.appenderName ); + var dataStreamCount = getDataStreamCount( appender.getProperty( "dataStreamPattern" ) ); + var event = getMockRequestContext(); + var rc = event.getCollection(); + var prc = event.getPrivateCollection(); + variables.interceptor.writeToAppender( + event, + rc, + prc, + { + "appender" : variables.appenderName, + "message" : "Test message" + } + ); + sleep( 1000 ); + expect( getDataStreamCount( appender.getProperty( "dataStreamPattern" ) ) ).toBe( dataStreamCount + 1 ); + } ); + + } ); + } + + function getDataStreamCount( required string dataStreamPattern ){ + return getWirebox().getInstance( "SearchBuilder@cbelasticsearch" ).setIndex( dataStreamPattern ).setQuery( { "match_all" : {} } ).count(); + } + +} diff --git a/test-harness/tests/specs/unit/APIAppenderTest.cfc b/test-harness/tests/specs/unit/APIAppenderTest.cfc index 10aea42..059f0e8 100755 --- a/test-harness/tests/specs/unit/APIAppenderTest.cfc +++ b/test-harness/tests/specs/unit/APIAppenderTest.cfc @@ -16,7 +16,7 @@ component extends="coldbox.system.testing.BaseTestCase"{ { "applicationName" : "testspecs", "dataStream" : "logstash-api-appender-tests", - "dataStreamPattern" : "logstash-api-appender-tests", + "dataStreamPattern" : "logstash-api-appender-tests*", "componentTemplateName" : "logstash-api-appender-component", "indexTemplateName" : "logstash-api-appender-tests", "ILMPolicyName" : "logstash-api-appender-tests", @@ -87,7 +87,7 @@ component extends="coldbox.system.testing.BaseTestCase"{ // create a test error variables.model.logMessage( variables.loge ); - sleep( 5 ); + sleep( 1000 ); var documents = getWirebox().getInstance( "SearchBuilder@cbElasticsearch" ).new( variables.model.getProperty( "dataStream" ) ).setQuery( { "match_all" : {} }).execute().getHits();