All Apps and Add-ons

Trend Micro Deep Security: Why is this app grouping Amazon Web Services SNS messages into a single event?

MatthewH007
Path Finder

I am forwarding logs via Amazon Web Services SNS and using the 'splunk-logger' Lambda function to get the logs into Splunk Cloud. My problem is that when Deep Security sends these SNS messages, it groups multiple events together and Splunk sees them as 1 event instead of X number of separate events.

Example Event:
{Message {Event_1} {Event_2} {Event_3}}

I would like these to be seen as separate events since most of the time they have different hosts, details, etc. The one good thing is at least they are of the same type (Ex: Log Inspection, App Control, etc.).

My thought is to modify the Lambda script so that when the message is received, it will parse through it and separate the events properly and then each event will be sent individually to Splunk. This looks to require heavy modification to the current blueprint (I am also not a JavaScript expert by any means).

Has anyone else ran into this issue with AWS SNS messaging and Deep Security? If there is an easier way, I would love to do that instead of trying to modify the 'splunk-logger' blueprint that has been created by the Splunk team.

Note: I have contacted their support and they were no help. They told me this is just the way Deep Security sends the logs via SNS and they are not working on a solution to send events individually.

0 Karma
1 Solution

MatthewH007
Path Finder

I ended up solving this issue. I had to create an AWS Lambda script to take care of the log forwarding and they all now populate 1 at a time in Splunk instead of being clustered together. The following code works great for me. Keep in mind to use environmental variables

import requests
import re
import json
import os

def lambda_handler(event, context):
    # converts the event into JSON
    data = json.dumps(event)

    #searches for the deliminter between each event and seapartes each
    EventIds = re.findall(r'{\\\".+?\\\"}', data)
    EventLength = len(EventIds)

    #Needed to forward events to Splunk
    headers = {'Authorization': 'Splunk ' + os.environ['SPLUNK_HEC_TOKEN']}

    #Used to loop through and send each event individually
    i = 0
    while i < EventLength:
        response = requests.post(os.environ['SPLUNK_HEC_URL'], headers=headers, json={"event":EventIds[i]}, verify=True)
        i+=1

View solution in original post

0 Karma

MatthewH007
Path Finder

I ended up solving this issue. I had to create an AWS Lambda script to take care of the log forwarding and they all now populate 1 at a time in Splunk instead of being clustered together. The following code works great for me. Keep in mind to use environmental variables

import requests
import re
import json
import os

def lambda_handler(event, context):
    # converts the event into JSON
    data = json.dumps(event)

    #searches for the deliminter between each event and seapartes each
    EventIds = re.findall(r'{\\\".+?\\\"}', data)
    EventLength = len(EventIds)

    #Needed to forward events to Splunk
    headers = {'Authorization': 'Splunk ' + os.environ['SPLUNK_HEC_TOKEN']}

    #Used to loop through and send each event individually
    i = 0
    while i < EventLength:
        response = requests.post(os.environ['SPLUNK_HEC_URL'], headers=headers, json={"event":EventIds[i]}, verify=True)
        i+=1
0 Karma

MatthewH007
Path Finder

Here is the index.js code:

'use strict';

const loggerConfig = {
    url: process.env.SPLUNK_HEC_URL,
    token: process.env.SPLUNK_HEC_TOKEN,
};

const SplunkLogger = require('./lib/mysplunklogger');

const logger = new SplunkLogger(loggerConfig);

exports.handler = (event, context, callback) => {
    console.log('Received event:', JSON.stringify(event, null, 2));


    // Log JSON objects to Splunk
    logger.log(event);

// Send all the events in a single batch to Splunk
    logger.flushAsync((error, response) => {
        if (error) {
            callback(error);
        } else {
            console.log(`Response from Splunk:\n${response}`);
            callback(null, event.key1); // Echo back the first key value
        }
    });
};
0 Karma

MatthewH007
Path Finder

Here is the mysplunklogger.js code.

'use strict';

const url = require('url');

const Logger = function Logger(config) {
    this.url = config.url;
    this.token = config.token;

    this.addMetadata = true;
    this.setSource = true;

    this.parsedUrl = url.parse(this.url);
    // eslint-disable-next-line import/no-dynamic-require
    this.requester = require(this.parsedUrl.protocol.substring(0, this.parsedUrl.protocol.length - 1));
    // Initialize request options which can be overridden & extended by consumer as needed
    this.requestOptions = {
        hostname: this.parsedUrl.hostname,
        path: this.parsedUrl.path,
        port: this.parsedUrl.port,
        method: 'POST',
        headers: {
            Authorization: `Splunk ${this.token}`,
        },
        rejectUnauthorized: false,
    };

    this.payloads = [];
};

// Simple logging API for Lambda functions
Logger.prototype.log = function log(message, context) {
    this.logWithTime(Date.now(), message, context);
};

Logger.prototype.logWithTime = function logWithTime(time, message, context) {
    const payload = {};

    if (Object.prototype.toString.call(message) === '[object Array]') {
        throw new Error('message argument must be a string or a JSON object.');
    }
    payload.event = message;

    // Add Lambda metadata
    if (typeof context !== 'undefined') {
        if (this.addMetadata) {
            // Enrich event only if it is an object
            if (message === Object(message)) {
                payload.event = JSON.parse(JSON.stringify(message)); // deep copy
                payload.event.awsRequestId = context.awsRequestId;
            }
        }
        if (this.setSource) {
            payload.source = `lambda:${context.functionName}`;
        }
    }

    payload.time = new Date(time).getTime() / 1000;

    this.logEvent(payload);
};

Logger.prototype.logEvent = function logEvent(payload) {
    this.payloads.push(JSON.stringify(payload));
};

Logger.prototype.flushAsync = function flushAsync(callback) {
    callback = callback || (() => {}); // eslint-disable-line no-param-reassign

    console.log('Sending event(s)');
    const req = this.requester.request(this.requestOptions, (res) => {
        res.setEncoding('utf8');

        console.log('Response received');
        res.on('data', (data) => {
            let error = null;
            if (res.statusCode !== 200) {
                error = new Error(`error: statusCode=${res.statusCode}\n\n${data}`);
                console.error(error);
            }
            this.payloads.length = 0;
            callback(error, data);
        });
    });

    req.on('error', (error) => {
        callback(error);
    });

    req.end(this.payloads.join(''), 'utf8');
};

module.exports = Logger;
0 Karma
Get Updates on the Splunk Community!

.conf24 | Registration Open!

Hello, hello! I come bearing good news: Registration for .conf24 is now open!   conf is Splunk’s rad annual ...

Splunk is officially part of Cisco

Revolutionizing how our customers build resilience across their entire digital footprint.   Splunk ...

Splunk APM & RUM | Planned Maintenance March 26 - March 28, 2024

There will be planned maintenance for Splunk APM and RUM between March 26, 2024 and March 28, 2024 as ...