Skip to content

Commit 6ffda23

Browse files
authored
Merge pull request #33 from SumoLogic/monty
Cloudtrail s3 to Sumo processing lambda
2 parents dcaec4a + b31d173 commit 6ffda23

File tree

2 files changed

+143
-0
lines changed

2 files changed

+143
-0
lines changed

cloudtrail_s3/README.md

Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
===========================================
2+
Cloudtrail S3 to Sumo Logic
3+
===========================================
4+
5+
Files
6+
-----
7+
* *cloudtrail_s3_to_sumo.js*: node.js function to read files from an S3 bucket to a Sumo Logic hosted HTTP collector. Files in the source bucket are gzipped. The function receives S3 notifications on new files uploaded to the source S3 bucket, then reads these files, unzips them, and breakdown the records before finally sends the data to the target Sumo endpoint.
8+
9+
## Lambda Setup
10+
For the Sumo collector configuration, do not enable multiline processing or
11+
one message per request -- the idea is to send as many messages in one request
12+
as possible to Sumo and let Sumo break them apart as needed.
13+
14+
In the AWS console, use a code entry type of 'Edit code inline' and paste in the
15+
code. In the Environment variable section, set the following Key to the URL provided from Sumo collector configuration.
16+
SUMO_ENDPOINT
17+
18+
In configuration specify index.handler as the Handler. Specify a Role that has
19+
sufficient privileges to read from the *source* bucket, and invoke a lambda
20+
function. The code provided is tested with node runtime 6.10, Memory setting at 128MB, Timeout 10s.
21+
22+
Set trigger to S3 bucket create-all events.
23+
24+
One can use the AWSLambdaBasicExecution and the AWSS3ReadOnlyAccess role, although it is *strongly* recommended to customize them to restrict to relevant resources in production:
25+
26+
<pre>
27+
{
28+
"Version": "2012-10-17",
29+
"Statement": [
30+
{
31+
"Effect": "Allow",
32+
"Action": [
33+
"logs:CreateLogGroup",
34+
"logs:CreateLogStream",
35+
"logs:PutLogEvents"
36+
],
37+
"Resource": "arn:aws:logs:*:*:*"
38+
}
39+
]
40+
}
41+
</pre>
42+
43+
AND
44+
45+
<pre>
46+
{
47+
"Version": "2012-10-17",
48+
"Statement": [
49+
{
50+
"Effect": "Allow",
51+
"Action": [
52+
"s3:Get*",
53+
"s3:List*"
54+
],
55+
"Resource": "*"
56+
}
57+
]
58+
}
59+
</pre>
60+
61+
Once the function is created, you can tie it to the source S3 bucket. From the S3 Management console, select the bucket, goto its Properties, select Events and add a Notification. From there, provide a name for the notification, select *ObjectCreated (All)* as the Events, and select *Lambda* as the *Send To* option. Finally, select the Lambda function created above and Save.
62+
63+
KNOWN ISSUE:
64+
Occassionally, the function will fail with either TypeError or Socket Error. AWS has built-in retries to launch the function again with the same parameters (bucket/filename). There shouldn't be any data loss, but the function log will show those errors. Also, using Sumo to log this Lambda run is highly recommended.
Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
2+
// CloudTrail S3 bucket log to SumoLogic //
3+
// https://github.com/SumoLogic/sumologic-aws-lambda //
4+
// //
5+
// YOU MUST CREATE A SUMO LOGIC ENDPOINT CALLED SUMO_ENDPOINT AND PASTE IN ENVIRONMENTAL VARIABLES BELOW //
6+
// https://help.sumologic.com/Send_Data/Sources/02Sources_for_Hosted_Collectors/HTTP_Source //
7+
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
8+
// SumoLogic Endpoint to post logs
9+
var SumoURL = process.env.SUMO_ENDPOINT;
10+
11+
var AWS = require('aws-sdk');
12+
var s3 = new AWS.S3();
13+
var https = require('https');
14+
var zlib = require('zlib');
15+
var url = require('url');
16+
17+
function s3LogsToSumo(bucket, objKey,context) {
18+
var urlObject = url.parse(SumoURL);
19+
var options = {
20+
'hostname': urlObject.hostname,
21+
'path': urlObject.pathname,
22+
'method': 'POST'
23+
};
24+
options.headers = {
25+
'X-Sumo-Name': objKey,
26+
};
27+
var req = https.request(options, function(res) {
28+
var body = '';
29+
console.log('Status:', res.statusCode);
30+
res.setEncoding('utf8');
31+
res.on('data', function(chunk) { body += chunk; });
32+
res.on('end', function() {
33+
console.log('Successfully processed HTTPS response');
34+
context.succeed();
35+
});
36+
});
37+
var finalData = '';
38+
39+
if (objKey.match(/CloudTrail-Digest/)) {
40+
console.log("digest file are ignored");
41+
context.succeed();
42+
}
43+
44+
var s3Stream = s3.getObject({Bucket: bucket, Key: objKey}).createReadStream();
45+
s3Stream.on('error', function() {
46+
console.log(
47+
'Error getting object "' + objKey + '" from bucket "' + bucket + '". ' +
48+
'Make sure they exist and your bucket is in the same region as this function.');
49+
context.fail();
50+
});
51+
var gunzip = zlib.createGunzip();
52+
s3Stream.pipe(gunzip);
53+
gunzip.on('data',function(data) {
54+
finalData += data.toString();
55+
}).on('end',function(end){
56+
// READ THE UNZIPPED CloudTrail logs
57+
var records = JSON.parse(finalData);
58+
console.log(records.Records.length + " cloudtrail records in this file");
59+
for (var i = 0, len = records.Records.length; i < len; i++) {
60+
req.write(JSON.stringify(records.Records[i]) + '\n');
61+
}
62+
req.end();
63+
}).on('error',function(error) {
64+
context.fail(error);
65+
});
66+
}
67+
68+
exports.handler = function(event, context) {
69+
//options.agent = new https.Agent(options);
70+
// Validate URL has been set
71+
var urlObject = url.parse(SumoURL);
72+
if (urlObject.protocol != 'https:' || urlObject.host === null || urlObject.path === null) {
73+
context.fail('Invalid SUMO_ENDPOINT environment variable: ' + SumoURL);
74+
}
75+
var bucket = event.Records[0].s3.bucket.name;
76+
var objKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, ' '));
77+
console.log('Bucket: '+bucket + ' ObjectKey: ' + objKey);
78+
s3LogsToSumo(bucket, objKey, context);
79+
}

0 commit comments

Comments
 (0)