Skip to content

Commit d78d6e6

Browse files
committed
Logger and statistics callback
1 parent 5eaa4e2 commit d78d6e6

File tree

4 files changed

+122
-12
lines changed

4 files changed

+122
-12
lines changed

examples/performance/performance-consolidated.js

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,8 @@ function printPercentiles(percentiles, type) {
172172
const consumerRate = await runConsumer(parameters, topic,
173173
warmupMessages, messageCount,
174174
false, partitionsConsumedConcurrently, stats,
175-
produceToSecondTopic ? topic2 : null, compression, useCKJSProducerEverywhere);
175+
produceToSecondTopic ? topic2 : null, compression, useCKJSProducerEverywhere,
176+
messageSize, limitRPS);
176177
endTrackingMemory('consumer-each-message', `consumer-memory-message-${mode}.json`);
177178
console.log("=== Consumer Rate MB/s (eachMessage): ", consumerRate);
178179
console.log("=== Consumer Rate msg/s (eachMessage): ", stats.messageRate);
@@ -197,7 +198,8 @@ function printPercentiles(percentiles, type) {
197198
const consumerRate = await runConsumer(parameters, topic,
198199
warmupMessages, messageCount,
199200
true, partitionsConsumedConcurrently, stats,
200-
produceToSecondTopic ? topic2 : null, compression, useCKJSProducerEverywhere);
201+
produceToSecondTopic ? topic2 : null, compression, useCKJSProducerEverywhere,
202+
messageSize, limitRPS);
201203
endTrackingMemory('consumer-each-batch', `consumer-memory-batch-${mode}.json`);
202204
console.log("=== Consumer Rate MB/s (eachBatch): ", consumerRate);
203205
console.log("=== Consumer Rate msg/s (eachBatch): ", stats.messageRate);

examples/performance/performance-primitives-common.js

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
const { hrtime } = require('process');
22
const { randomBytes } = require('crypto');
3+
const fs = require('fs');
4+
const { logLevel } = require('../../').KafkaJS;
35
const PERCENTILES = [50, 75, 90, 95, 99, 99.9, 99.99, 100];
46

57
const TERMINATE_TIMEOUT_MS = process.env.TERMINATE_TIMEOUT_MS ? +process.env.TERMINATE_TIMEOUT_MS : 600000;
@@ -17,6 +19,41 @@ else {
1719
}
1820
}
1921

22+
class PerformanceLogger {
23+
constructor(fileName) {
24+
this.logLevel = logLevel.INFO;
25+
this.writeStream = fs.createWriteStream(fileName);
26+
}
27+
28+
setLogLevel(logLevel) {
29+
this.logLevel = logLevel;
30+
}
31+
32+
info(message, extra) {
33+
if (this.logLevel >= logLevel.INFO)
34+
this.writeStream.write(`INFO: ${message} ${JSON.stringify(extra)}\n`);
35+
}
36+
37+
error(message, extra) {
38+
if (this.logLevel >= logLevel.ERROR)
39+
this.writeStream.write(`ERROR: ${message} ${JSON.stringify(extra)}\n`);
40+
}
41+
42+
warn(message, extra) {
43+
if (this.logLevel >= logLevel.WARN)
44+
this.writeStream.write(`WARN: ${message} ${JSON.stringify(extra)}\n`);
45+
}
46+
47+
debug(message, extra) {
48+
if (this.logLevel >= logLevel.DEBUG)
49+
this.writeStream.write(`DEBUG: ${message} ${JSON.stringify(extra)}\n`);
50+
}
51+
52+
namespace() {
53+
return this;
54+
}
55+
}
56+
2057
function installHandlers(useTerminateTimeout) {
2158
const handlers = {
2259
terminationRequested: false,
@@ -569,4 +606,5 @@ module.exports = {
569606
runLagMonitoring,
570607
genericProduceToTopic,
571608
getAutoCommit,
609+
PerformanceLogger,
572610
};

examples/performance/performance-primitives-kafkajs.js

Lines changed: 29 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ const {
66
runProducer: runProducerCommon,
77
genericProduceToTopic,
88
getAutoCommit,
9+
PerformanceLogger,
910
} = require('./performance-primitives-common');
1011

1112
const {
@@ -21,6 +22,8 @@ module.exports = {
2122
};
2223

2324
const IS_HIGHER_LATENCY_CLUSTER = process.env.IS_HIGHER_LATENCY_CLUSTER === 'true';
25+
const DEBUG = process.env.DEBUG;
26+
const ENABLE_LOGGING = DEBUG !== undefined;
2427

2528
function baseConfiguration(parameters) {
2629
let ret = {
@@ -41,11 +44,18 @@ function baseConfiguration(parameters) {
4144
}
4245
};
4346
}
47+
if (parameters.logToFile) {
48+
ret.logger = new PerformanceLogger(parameters.logToFile);
49+
}
4450
return ret;
4551
}
4652

4753
async function runCreateTopics(parameters, topic, topic2, numPartitions) {
48-
const kafka = new Kafka(baseConfiguration(parameters));
54+
const adminParameters = {
55+
...parameters,
56+
disableLogging: true,
57+
};
58+
const kafka = new Kafka(baseConfiguration(adminParameters));
4959

5060
const admin = kafka.admin();
5161
await admin.connect();
@@ -104,6 +114,9 @@ function newCompatibleProducer(parameters, compression) {
104114
}
105115

106116
async function runProducer(parameters, topic, batchSize, warmupMessages, totalMessageCnt, msgSize, compression, randomness, limitRPS) {
117+
if (ENABLE_LOGGING && !parameters.logToFile) {
118+
parameters.logToFile = './kafkajs-producer.log';
119+
}
107120
return runProducerCommon(newCompatibleProducer(parameters, compression), topic, batchSize, warmupMessages, totalMessageCnt, msgSize, compression, randomness, limitRPS);
108121
}
109122

@@ -167,18 +180,30 @@ function newCompatibleConsumer(parameters, eachBatch) {
167180
}
168181

169182

170-
async function runConsumer(parameters, topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, produceToTopic, produceCompression, useCKJSProducerEverywhere) {
183+
async function runConsumer(parameters, topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, produceToTopic, produceCompression, useCKJSProducerEverywhere,
184+
messageSize, limitRPS) {
171185
let actionOnMessages = null;
172186
let producer;
173187
if (produceToTopic) {
174188
const newCompatibleProducerFunction = useCKJSProducerEverywhere ?
175189
newCompatibleProducerCKJS : newCompatibleProducer;
176-
producer = newCompatibleProducerFunction(parameters, produceCompression);
190+
const producerParameters = {
191+
...parameters,
192+
};
193+
if (ENABLE_LOGGING)
194+
producerParameters.logToFile = './kafkajs-consumer-producer.log';
195+
producer = newCompatibleProducerFunction(producerParameters, produceCompression);
177196
await producer.connect();
178197
actionOnMessages = (messages) =>
179198
genericProduceToTopic(producer, produceToTopic, messages);
180199
}
181-
const ret = await runConsumerCommon(newCompatibleConsumer(parameters, eachBatch), topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, actionOnMessages);
200+
const consumerParameters = {
201+
...parameters,
202+
};
203+
if (ENABLE_LOGGING)
204+
consumerParameters.logToFile = eachBatch ? './kafkajs-consumer-batch.log' :
205+
'./kafkajs-consumer-message.log';
206+
const ret = await runConsumerCommon(newCompatibleConsumer(consumerParameters, eachBatch), topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, actionOnMessages);
182207
if (producer) {
183208
await producer.disconnect();
184209
}

examples/performance/performance-primitives.js

Lines changed: 51 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ const {
77
runLagMonitoring: runLagMonitoringCommon,
88
genericProduceToTopic,
99
getAutoCommit,
10+
PerformanceLogger,
1011
} = require('./performance-primitives-common');
1112

1213
module.exports = {
@@ -22,6 +23,9 @@ module.exports = {
2223

2324
const CONSUMER_MAX_BATCH_SIZE = process.env.CONSUMER_MAX_BATCH_SIZE ? +process.env.CONSUMER_MAX_BATCH_SIZE : null;
2425
const IS_HIGHER_LATENCY_CLUSTER = process.env.IS_HIGHER_LATENCY_CLUSTER === 'true';
26+
const DEBUG = process.env.DEBUG;
27+
const STATISTICS_INTERVAL_MS = process.env.STATISTICS_INTERVAL_MS ? +process.env.STATISTICS_INTERVAL_MS : null;
28+
const ENABLE_LOGGING = DEBUG !== undefined || STATISTICS_INTERVAL_MS !== null;
2529

2630
function baseConfiguration(parameters) {
2731
let ret = {
@@ -39,11 +43,29 @@ function baseConfiguration(parameters) {
3943
'sasl.password': parameters.saslPassword,
4044
};
4145
}
46+
if (DEBUG && !parameters.disableLogging) {
47+
ret['debug'] = DEBUG;
48+
}
49+
if (parameters.logToFile) {
50+
ret.kafkaJS = {
51+
'logger': new PerformanceLogger(parameters.logToFile),
52+
};
53+
}
54+
if (STATISTICS_INTERVAL_MS !== null) {
55+
ret['statistics.interval.ms'] = STATISTICS_INTERVAL_MS;
56+
ret['stats_cb'] = function (event) {
57+
this.logger().info(event.message);
58+
};
59+
}
4260
return ret;
4361
}
4462

4563
async function runCreateTopics(parameters, topic, topic2, numPartitions) {
46-
const kafka = new Kafka(baseConfiguration(parameters));
64+
const adminParameters = {
65+
...parameters,
66+
disableLogging: true,
67+
};
68+
const kafka = new Kafka(baseConfiguration(adminParameters));
4769

4870
const admin = kafka.admin();
4971
await admin.connect();
@@ -73,7 +95,11 @@ async function runCreateTopics(parameters, topic, topic2, numPartitions) {
7395
}
7496

7597
function runLagMonitoring(parameters, topic) {
76-
const kafka = new Kafka(baseConfiguration(parameters));
98+
const monitoringParameters = {
99+
...parameters,
100+
disableLogging: true,
101+
};
102+
const kafka = new Kafka(baseConfiguration(monitoringParameters));
77103
const admin = kafka.admin();
78104

79105
return runLagMonitoringCommon(admin, topic);
@@ -116,6 +142,9 @@ function newCompatibleProducer(parameters, compression) {
116142
}
117143

118144
async function runProducer(parameters, topic, batchSize, warmupMessages, totalMessageCnt, msgSize, compression, randomness, limitRPS) {
145+
if (ENABLE_LOGGING && !parameters.logToFile) {
146+
parameters.logToFile = './confluent-producer.log';
147+
}
119148
return runProducerCommon(newCompatibleProducer(parameters, compression), topic, batchSize, warmupMessages, totalMessageCnt, msgSize, compression, randomness, limitRPS);
120149
}
121150

@@ -151,7 +180,8 @@ class CompatibleConsumer {
151180
}
152181
}
153182

154-
function newCompatibleConsumer(parameters, eachBatch) {
183+
function newCompatibleConsumer(parameters, eachBatch, messageSize, limitRPS) {
184+
const minFetchBytes = messageSize * limitRPS;
155185
const kafka = new Kafka(baseConfiguration(parameters));
156186
const autoCommit = getAutoCommit();
157187
const autoCommitOpts = autoCommit > 0 ?
@@ -173,6 +203,7 @@ function newCompatibleConsumer(parameters, eachBatch) {
173203
'group.id': groupId,
174204
'auto.offset.reset': 'earliest',
175205
'fetch.queue.backoff.ms': '100',
206+
'fetch.min.bytes': minFetchBytes.toString(),
176207
...autoCommitOpts,
177208
...jsOpts,
178209
...higherLatencyClusterOpts,
@@ -181,16 +212,30 @@ function newCompatibleConsumer(parameters, eachBatch) {
181212
}
182213

183214

184-
async function runConsumer(parameters, topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, produceToTopic, produceCompression, useCKJSProducerEverywhere) {
215+
async function runConsumer(parameters, topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, produceToTopic, produceCompression, useCKJSProducerEverywhere,
216+
messageSize, limitRPS) {
185217
let actionOnMessages = null;
186218
let producer;
187219
if (produceToTopic) {
188-
producer = newCompatibleProducer(parameters, produceCompression);
220+
const producerParameters = {
221+
...parameters,
222+
};
223+
if (ENABLE_LOGGING)
224+
producerParameters.logToFile = './confluent-consumer-producer.log';
225+
producer = newCompatibleProducer(producerParameters, produceCompression);
189226
await producer.connect();
190227
actionOnMessages = (messages) =>
191228
genericProduceToTopic(producer, produceToTopic, messages);
192229
}
193-
const ret = await runConsumerCommon(newCompatibleConsumer(parameters, eachBatch), topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, actionOnMessages);
230+
const consumerParameters = {
231+
...parameters,
232+
};
233+
if (ENABLE_LOGGING)
234+
consumerParameters.logToFile = eachBatch ? './confluent-consumer-batch.log' :
235+
'./confluent-consumer-message.log';
236+
const ret = await runConsumerCommon(
237+
newCompatibleConsumer(consumerParameters, eachBatch, messageSize, limitRPS),
238+
topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, actionOnMessages);
194239
if (producer) {
195240
await producer.disconnect();
196241
}

0 commit comments

Comments
 (0)