Skip to content

Commit 1badce4

Browse files
committed
Logger and statistics callback
1 parent 5eaa4e2 commit 1badce4

File tree

4 files changed

+109
-9
lines changed

4 files changed

+109
-9
lines changed

examples/performance/performance-consolidated.js

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,8 @@ function printPercentiles(percentiles, type) {
172172
const consumerRate = await runConsumer(parameters, topic,
173173
warmupMessages, messageCount,
174174
false, partitionsConsumedConcurrently, stats,
175-
produceToSecondTopic ? topic2 : null, compression, useCKJSProducerEverywhere);
175+
produceToSecondTopic ? topic2 : null, compression, useCKJSProducerEverywhere,
176+
messageSize, limitRPS);
176177
endTrackingMemory('consumer-each-message', `consumer-memory-message-${mode}.json`);
177178
console.log("=== Consumer Rate MB/s (eachMessage): ", consumerRate);
178179
console.log("=== Consumer Rate msg/s (eachMessage): ", stats.messageRate);
@@ -197,7 +198,8 @@ function printPercentiles(percentiles, type) {
197198
const consumerRate = await runConsumer(parameters, topic,
198199
warmupMessages, messageCount,
199200
true, partitionsConsumedConcurrently, stats,
200-
produceToSecondTopic ? topic2 : null, compression, useCKJSProducerEverywhere);
201+
produceToSecondTopic ? topic2 : null, compression, useCKJSProducerEverywhere,
202+
messageSize, limitRPS);
201203
endTrackingMemory('consumer-each-batch', `consumer-memory-batch-${mode}.json`);
202204
console.log("=== Consumer Rate MB/s (eachBatch): ", consumerRate);
203205
console.log("=== Consumer Rate msg/s (eachBatch): ", stats.messageRate);

examples/performance/performance-primitives-common.js

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
const { hrtime } = require('process');
22
const { randomBytes } = require('crypto');
3+
const fs = require('fs');
4+
const { logLevel } = require('../../').KafkaJS;
35
const PERCENTILES = [50, 75, 90, 95, 99, 99.9, 99.99, 100];
46

57
const TERMINATE_TIMEOUT_MS = process.env.TERMINATE_TIMEOUT_MS ? +process.env.TERMINATE_TIMEOUT_MS : 600000;
@@ -17,6 +19,41 @@ else {
1719
}
1820
}
1921

22+
class PerformanceLogger {
23+
constructor(fileName) {
24+
this.logLevel = logLevel.INFO;
25+
this.writeStream = fs.createWriteStream(fileName);
26+
}
27+
28+
setLogLevel(logLevel) {
29+
this.logLevel = logLevel;
30+
}
31+
32+
info(message, extra) {
33+
if (this.logLevel >= logLevel.INFO)
34+
this.writeStream.write(`INFO: ${message} ${JSON.stringify(extra)}\n`);
35+
}
36+
37+
error(message, extra) {
38+
if (this.logLevel >= logLevel.ERROR)
39+
this.writeStream.write(`ERROR: ${message} ${JSON.stringify(extra)}\n`);
40+
}
41+
42+
warn(message, extra) {
43+
if (this.logLevel >= logLevel.WARN)
44+
this.writeStream.write(`WARN: ${message} ${JSON.stringify(extra)}\n`);
45+
}
46+
47+
debug(message, extra) {
48+
if (this.logLevel >= logLevel.DEBUG)
49+
this.writeStream.write(`DEBUG: ${message} ${JSON.stringify(extra)}\n`);
50+
}
51+
52+
namespace() {
53+
return this;
54+
}
55+
}
56+
2057
function installHandlers(useTerminateTimeout) {
2158
const handlers = {
2259
terminationRequested: false,
@@ -569,4 +606,5 @@ module.exports = {
569606
runLagMonitoring,
570607
genericProduceToTopic,
571608
getAutoCommit,
609+
PerformanceLogger,
572610
};

examples/performance/performance-primitives-kafkajs.js

Lines changed: 26 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ const {
66
runProducer: runProducerCommon,
77
genericProduceToTopic,
88
getAutoCommit,
9+
PerformanceLogger,
910
} = require('./performance-primitives-common');
1011

1112
const {
@@ -21,6 +22,8 @@ module.exports = {
2122
};
2223

2324
const IS_HIGHER_LATENCY_CLUSTER = process.env.IS_HIGHER_LATENCY_CLUSTER === 'true';
25+
const DEBUG = process.env.DEBUG;
26+
const ENABLE_LOGGING = DEBUG !== undefined;
2427

2528
function baseConfiguration(parameters) {
2629
let ret = {
@@ -41,6 +44,11 @@ function baseConfiguration(parameters) {
4144
}
4245
};
4346
}
47+
if (parameters.logToFile) {
48+
ret.kafkaJS = {
49+
'logger': new PerformanceLogger(parameters.logToFile),
50+
};
51+
}
4452
return ret;
4553
}
4654

@@ -104,6 +112,9 @@ function newCompatibleProducer(parameters, compression) {
104112
}
105113

106114
async function runProducer(parameters, topic, batchSize, warmupMessages, totalMessageCnt, msgSize, compression, randomness, limitRPS) {
115+
if (ENABLE_LOGGING && !parameters.logToFile) {
116+
parameters.logToFile = './kafkajs-producer.log';
117+
}
107118
return runProducerCommon(newCompatibleProducer(parameters, compression), topic, batchSize, warmupMessages, totalMessageCnt, msgSize, compression, randomness, limitRPS);
108119
}
109120

@@ -167,18 +178,30 @@ function newCompatibleConsumer(parameters, eachBatch) {
167178
}
168179

169180

170-
async function runConsumer(parameters, topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, produceToTopic, produceCompression, useCKJSProducerEverywhere) {
181+
async function runConsumer(parameters, topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, produceToTopic, produceCompression, useCKJSProducerEverywhere,
182+
messageSize, limitRPS) {
171183
let actionOnMessages = null;
172184
let producer;
173185
if (produceToTopic) {
174186
const newCompatibleProducerFunction = useCKJSProducerEverywhere ?
175187
newCompatibleProducerCKJS : newCompatibleProducer;
176-
producer = newCompatibleProducerFunction(parameters, produceCompression);
188+
const producerParameters = {
189+
...parameters,
190+
};
191+
if (ENABLE_LOGGING)
192+
producerParameters.logToFile = './kafkajs-consumer-producer.log';
193+
producer = newCompatibleProducerFunction(producerParameters, produceCompression);
177194
await producer.connect();
178195
actionOnMessages = (messages) =>
179196
genericProduceToTopic(producer, produceToTopic, messages);
180197
}
181-
const ret = await runConsumerCommon(newCompatibleConsumer(parameters, eachBatch), topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, actionOnMessages);
198+
const consumerParameters = {
199+
...parameters,
200+
};
201+
if (ENABLE_LOGGING)
202+
consumerParameters.logToFile = eachBatch ? './kafkajs-consumer-batch.log' :
203+
'./kafkajs-consumer-message.log';
204+
const ret = await runConsumerCommon(newCompatibleConsumer(consumerParameters, eachBatch), topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, actionOnMessages);
182205
if (producer) {
183206
await producer.disconnect();
184207
}

examples/performance/performance-primitives.js

Lines changed: 41 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ const {
77
runLagMonitoring: runLagMonitoringCommon,
88
genericProduceToTopic,
99
getAutoCommit,
10+
PerformanceLogger,
1011
} = require('./performance-primitives-common');
1112

1213
module.exports = {
@@ -22,6 +23,9 @@ module.exports = {
2223

2324
const CONSUMER_MAX_BATCH_SIZE = process.env.CONSUMER_MAX_BATCH_SIZE ? +process.env.CONSUMER_MAX_BATCH_SIZE : null;
2425
const IS_HIGHER_LATENCY_CLUSTER = process.env.IS_HIGHER_LATENCY_CLUSTER === 'true';
26+
const DEBUG = process.env.DEBUG;
27+
const STATISTICS_INTERVAL_MS = process.env.STATISTICS_INTERVAL_MS ? +process.env.STATISTICS_INTERVAL_MS : null;
28+
const ENABLE_LOGGING = DEBUG !== undefined || STATISTICS_INTERVAL_MS !== null;
2529

2630
function baseConfiguration(parameters) {
2731
let ret = {
@@ -39,6 +43,20 @@ function baseConfiguration(parameters) {
3943
'sasl.password': parameters.saslPassword,
4044
};
4145
}
46+
if (DEBUG) {
47+
ret['debug'] = DEBUG;
48+
}
49+
if (parameters.logToFile) {
50+
ret.kafkaJS = {
51+
'logger': new PerformanceLogger(parameters.logToFile),
52+
};
53+
}
54+
if (STATISTICS_INTERVAL_MS !== null) {
55+
ret['statistics.interval.ms'] = STATISTICS_INTERVAL_MS;
56+
ret['stats_cb'] = function (event) {
57+
this.logger().info(event.message);
58+
};
59+
}
4260
return ret;
4361
}
4462

@@ -116,6 +134,9 @@ function newCompatibleProducer(parameters, compression) {
116134
}
117135

118136
async function runProducer(parameters, topic, batchSize, warmupMessages, totalMessageCnt, msgSize, compression, randomness, limitRPS) {
137+
if (ENABLE_LOGGING && !parameters.logToFile) {
138+
parameters.logToFile = './confluent-producer.log';
139+
}
119140
return runProducerCommon(newCompatibleProducer(parameters, compression), topic, batchSize, warmupMessages, totalMessageCnt, msgSize, compression, randomness, limitRPS);
120141
}
121142

@@ -151,7 +172,8 @@ class CompatibleConsumer {
151172
}
152173
}
153174

154-
function newCompatibleConsumer(parameters, eachBatch) {
175+
function newCompatibleConsumer(parameters, eachBatch, messageSize, limitRPS) {
176+
const minFetchBytes = messageSize * limitRPS;
155177
const kafka = new Kafka(baseConfiguration(parameters));
156178
const autoCommit = getAutoCommit();
157179
const autoCommitOpts = autoCommit > 0 ?
@@ -173,6 +195,7 @@ function newCompatibleConsumer(parameters, eachBatch) {
173195
'group.id': groupId,
174196
'auto.offset.reset': 'earliest',
175197
'fetch.queue.backoff.ms': '100',
198+
'fetch.min.bytes': minFetchBytes.toString(),
176199
...autoCommitOpts,
177200
...jsOpts,
178201
...higherLatencyClusterOpts,
@@ -181,16 +204,30 @@ function newCompatibleConsumer(parameters, eachBatch) {
181204
}
182205

183206

184-
async function runConsumer(parameters, topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, produceToTopic, produceCompression, useCKJSProducerEverywhere) {
207+
async function runConsumer(parameters, topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, produceToTopic, produceCompression, useCKJSProducerEverywhere,
208+
messageSize, limitRPS) {
185209
let actionOnMessages = null;
186210
let producer;
187211
if (produceToTopic) {
188-
producer = newCompatibleProducer(parameters, produceCompression);
212+
const producerParameters = {
213+
...parameters,
214+
};
215+
if (ENABLE_LOGGING)
216+
producerParameters.logToFile = './confluent-consumer-producer.log';
217+
producer = newCompatibleProducer(producerParameters, produceCompression);
189218
await producer.connect();
190219
actionOnMessages = (messages) =>
191220
genericProduceToTopic(producer, produceToTopic, messages);
192221
}
193-
const ret = await runConsumerCommon(newCompatibleConsumer(parameters, eachBatch), topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, actionOnMessages);
222+
const consumerParameters = {
223+
...parameters,
224+
};
225+
if (ENABLE_LOGGING)
226+
consumerParameters.logToFile = eachBatch ? './confluent-consumer-batch.log' :
227+
'./confluent-consumer-message.log';
228+
const ret = await runConsumerCommon(
229+
newCompatibleConsumer(consumerParameters, eachBatch, messageSize, limitRPS),
230+
topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, actionOnMessages);
194231
if (producer) {
195232
await producer.disconnect();
196233
}

0 commit comments

Comments
 (0)