Skip to content

Commit edaad22

Browse files
committed
Logger and statistics callback
1 parent 5eaa4e2 commit edaad22

File tree

4 files changed

+119
-11
lines changed

4 files changed

+119
-11
lines changed

examples/performance/performance-consolidated.js

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,8 @@ function printPercentiles(percentiles, type) {
172172
const consumerRate = await runConsumer(parameters, topic,
173173
warmupMessages, messageCount,
174174
false, partitionsConsumedConcurrently, stats,
175-
produceToSecondTopic ? topic2 : null, compression, useCKJSProducerEverywhere);
175+
produceToSecondTopic ? topic2 : null, compression, useCKJSProducerEverywhere,
176+
messageSize, limitRPS);
176177
endTrackingMemory('consumer-each-message', `consumer-memory-message-${mode}.json`);
177178
console.log("=== Consumer Rate MB/s (eachMessage): ", consumerRate);
178179
console.log("=== Consumer Rate msg/s (eachMessage): ", stats.messageRate);
@@ -197,7 +198,8 @@ function printPercentiles(percentiles, type) {
197198
const consumerRate = await runConsumer(parameters, topic,
198199
warmupMessages, messageCount,
199200
true, partitionsConsumedConcurrently, stats,
200-
produceToSecondTopic ? topic2 : null, compression, useCKJSProducerEverywhere);
201+
produceToSecondTopic ? topic2 : null, compression, useCKJSProducerEverywhere,
202+
messageSize, limitRPS);
201203
endTrackingMemory('consumer-each-batch', `consumer-memory-batch-${mode}.json`);
202204
console.log("=== Consumer Rate MB/s (eachBatch): ", consumerRate);
203205
console.log("=== Consumer Rate msg/s (eachBatch): ", stats.messageRate);

examples/performance/performance-primitives-common.js

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
const { hrtime } = require('process');
22
const { randomBytes } = require('crypto');
3+
const fs = require('fs');
4+
const { logLevel } = require('../../').KafkaJS;
35
const PERCENTILES = [50, 75, 90, 95, 99, 99.9, 99.99, 100];
46

57
const TERMINATE_TIMEOUT_MS = process.env.TERMINATE_TIMEOUT_MS ? +process.env.TERMINATE_TIMEOUT_MS : 600000;
@@ -17,6 +19,41 @@ else {
1719
}
1820
}
1921

22+
class PerformanceLogger {
23+
constructor(fileName) {
24+
this.logLevel = logLevel.INFO;
25+
this.writeStream = fs.createWriteStream(fileName);
26+
}
27+
28+
setLogLevel(logLevel) {
29+
this.logLevel = logLevel;
30+
}
31+
32+
info(message, extra) {
33+
if (this.logLevel >= logLevel.INFO)
34+
this.writeStream.write(`INFO: ${message} ${JSON.stringify(extra)}\n`);
35+
}
36+
37+
error(message, extra) {
38+
if (this.logLevel >= logLevel.ERROR)
39+
this.writeStream.write(`ERROR: ${message} ${JSON.stringify(extra)}\n`);
40+
}
41+
42+
warn(message, extra) {
43+
if (this.logLevel >= logLevel.WARN)
44+
this.writeStream.write(`WARN: ${message} ${JSON.stringify(extra)}\n`);
45+
}
46+
47+
debug(message, extra) {
48+
if (this.logLevel >= logLevel.DEBUG)
49+
this.writeStream.write(`DEBUG: ${message} ${JSON.stringify(extra)}\n`);
50+
}
51+
52+
namespace() {
53+
return this;
54+
}
55+
}
56+
2057
function installHandlers(useTerminateTimeout) {
2158
const handlers = {
2259
terminationRequested: false,
@@ -569,4 +606,5 @@ module.exports = {
569606
runLagMonitoring,
570607
genericProduceToTopic,
571608
getAutoCommit,
609+
PerformanceLogger,
572610
};

examples/performance/performance-primitives-kafkajs.js

Lines changed: 30 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ const {
66
runProducer: runProducerCommon,
77
genericProduceToTopic,
88
getAutoCommit,
9+
PerformanceLogger,
910
} = require('./performance-primitives-common');
1011

1112
const {
@@ -21,6 +22,8 @@ module.exports = {
2122
};
2223

2324
const IS_HIGHER_LATENCY_CLUSTER = process.env.IS_HIGHER_LATENCY_CLUSTER === 'true';
25+
const DEBUG = process.env.DEBUG;
26+
const ENABLE_LOGGING = DEBUG !== undefined;
2427

2528
function baseConfiguration(parameters) {
2629
let ret = {
@@ -41,11 +44,19 @@ function baseConfiguration(parameters) {
4144
}
4245
};
4346
}
47+
if (parameters.logToFile) {
48+
ret.logger = new PerformanceLogger(parameters.logToFile);
49+
}
4450
return ret;
4551
}
4652

4753
async function runCreateTopics(parameters, topic, topic2, numPartitions) {
48-
const kafka = new Kafka(baseConfiguration(parameters));
54+
const adminParameters = {
55+
...parameters,
56+
};
57+
if (ENABLE_LOGGING)
58+
adminParameters.logToFile = './kafkajs-admin.log';
59+
const kafka = new Kafka(baseConfiguration(adminParameters));
4960

5061
const admin = kafka.admin();
5162
await admin.connect();
@@ -104,6 +115,9 @@ function newCompatibleProducer(parameters, compression) {
104115
}
105116

106117
async function runProducer(parameters, topic, batchSize, warmupMessages, totalMessageCnt, msgSize, compression, randomness, limitRPS) {
118+
if (ENABLE_LOGGING && !parameters.logToFile) {
119+
parameters.logToFile = './kafkajs-producer.log';
120+
}
107121
return runProducerCommon(newCompatibleProducer(parameters, compression), topic, batchSize, warmupMessages, totalMessageCnt, msgSize, compression, randomness, limitRPS);
108122
}
109123

@@ -167,18 +181,30 @@ function newCompatibleConsumer(parameters, eachBatch) {
167181
}
168182

169183

170-
async function runConsumer(parameters, topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, produceToTopic, produceCompression, useCKJSProducerEverywhere) {
184+
async function runConsumer(parameters, topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, produceToTopic, produceCompression, useCKJSProducerEverywhere,
185+
messageSize, limitRPS) {
171186
let actionOnMessages = null;
172187
let producer;
173188
if (produceToTopic) {
174189
const newCompatibleProducerFunction = useCKJSProducerEverywhere ?
175190
newCompatibleProducerCKJS : newCompatibleProducer;
176-
producer = newCompatibleProducerFunction(parameters, produceCompression);
191+
const producerParameters = {
192+
...parameters,
193+
};
194+
if (ENABLE_LOGGING)
195+
producerParameters.logToFile = './kafkajs-consumer-producer.log';
196+
producer = newCompatibleProducerFunction(producerParameters, produceCompression);
177197
await producer.connect();
178198
actionOnMessages = (messages) =>
179199
genericProduceToTopic(producer, produceToTopic, messages);
180200
}
181-
const ret = await runConsumerCommon(newCompatibleConsumer(parameters, eachBatch), topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, actionOnMessages);
201+
const consumerParameters = {
202+
...parameters,
203+
};
204+
if (ENABLE_LOGGING)
205+
consumerParameters.logToFile = eachBatch ? './kafkajs-consumer-batch.log' :
206+
'./kafkajs-consumer-message.log';
207+
const ret = await runConsumerCommon(newCompatibleConsumer(consumerParameters, eachBatch), topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, actionOnMessages);
182208
if (producer) {
183209
await producer.disconnect();
184210
}

examples/performance/performance-primitives.js

Lines changed: 47 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ const {
77
runLagMonitoring: runLagMonitoringCommon,
88
genericProduceToTopic,
99
getAutoCommit,
10+
PerformanceLogger,
1011
} = require('./performance-primitives-common');
1112

1213
module.exports = {
@@ -22,6 +23,9 @@ module.exports = {
2223

2324
const CONSUMER_MAX_BATCH_SIZE = process.env.CONSUMER_MAX_BATCH_SIZE ? +process.env.CONSUMER_MAX_BATCH_SIZE : null;
2425
const IS_HIGHER_LATENCY_CLUSTER = process.env.IS_HIGHER_LATENCY_CLUSTER === 'true';
26+
const DEBUG = process.env.DEBUG;
27+
const STATISTICS_INTERVAL_MS = process.env.STATISTICS_INTERVAL_MS ? +process.env.STATISTICS_INTERVAL_MS : null;
28+
const ENABLE_LOGGING = DEBUG !== undefined || STATISTICS_INTERVAL_MS !== null;
2529

2630
function baseConfiguration(parameters) {
2731
let ret = {
@@ -39,11 +43,30 @@ function baseConfiguration(parameters) {
3943
'sasl.password': parameters.saslPassword,
4044
};
4145
}
46+
if (DEBUG) {
47+
ret['debug'] = DEBUG;
48+
}
49+
if (parameters.logToFile) {
50+
ret.kafkaJS = {
51+
'logger': new PerformanceLogger(parameters.logToFile),
52+
};
53+
}
54+
if (STATISTICS_INTERVAL_MS !== null) {
55+
ret['statistics.interval.ms'] = STATISTICS_INTERVAL_MS;
56+
ret['stats_cb'] = function (event) {
57+
this.logger().info(event.message);
58+
};
59+
}
4260
return ret;
4361
}
4462

4563
async function runCreateTopics(parameters, topic, topic2, numPartitions) {
46-
const kafka = new Kafka(baseConfiguration(parameters));
64+
const adminParameters = {
65+
...parameters,
66+
};
67+
if (ENABLE_LOGGING)
68+
adminParameters.logToFile = './confluent-admin.log';
69+
const kafka = new Kafka(baseConfiguration(adminParameters));
4770

4871
const admin = kafka.admin();
4972
await admin.connect();
@@ -116,6 +139,9 @@ function newCompatibleProducer(parameters, compression) {
116139
}
117140

118141
async function runProducer(parameters, topic, batchSize, warmupMessages, totalMessageCnt, msgSize, compression, randomness, limitRPS) {
142+
if (ENABLE_LOGGING && !parameters.logToFile) {
143+
parameters.logToFile = './confluent-producer.log';
144+
}
119145
return runProducerCommon(newCompatibleProducer(parameters, compression), topic, batchSize, warmupMessages, totalMessageCnt, msgSize, compression, randomness, limitRPS);
120146
}
121147

@@ -151,7 +177,8 @@ class CompatibleConsumer {
151177
}
152178
}
153179

154-
function newCompatibleConsumer(parameters, eachBatch) {
180+
function newCompatibleConsumer(parameters, eachBatch, messageSize, limitRPS) {
181+
const minFetchBytes = messageSize * limitRPS;
155182
const kafka = new Kafka(baseConfiguration(parameters));
156183
const autoCommit = getAutoCommit();
157184
const autoCommitOpts = autoCommit > 0 ?
@@ -173,6 +200,7 @@ function newCompatibleConsumer(parameters, eachBatch) {
173200
'group.id': groupId,
174201
'auto.offset.reset': 'earliest',
175202
'fetch.queue.backoff.ms': '100',
203+
'fetch.min.bytes': minFetchBytes.toString(),
176204
...autoCommitOpts,
177205
...jsOpts,
178206
...higherLatencyClusterOpts,
@@ -181,16 +209,30 @@ function newCompatibleConsumer(parameters, eachBatch) {
181209
}
182210

183211

184-
async function runConsumer(parameters, topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, produceToTopic, produceCompression, useCKJSProducerEverywhere) {
212+
async function runConsumer(parameters, topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, produceToTopic, produceCompression, useCKJSProducerEverywhere,
213+
messageSize, limitRPS) {
185214
let actionOnMessages = null;
186215
let producer;
187216
if (produceToTopic) {
188-
producer = newCompatibleProducer(parameters, produceCompression);
217+
const producerParameters = {
218+
...parameters,
219+
};
220+
if (ENABLE_LOGGING)
221+
producerParameters.logToFile = './confluent-consumer-producer.log';
222+
producer = newCompatibleProducer(producerParameters, produceCompression);
189223
await producer.connect();
190224
actionOnMessages = (messages) =>
191225
genericProduceToTopic(producer, produceToTopic, messages);
192226
}
193-
const ret = await runConsumerCommon(newCompatibleConsumer(parameters, eachBatch), topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, actionOnMessages);
227+
const consumerParameters = {
228+
...parameters,
229+
};
230+
if (ENABLE_LOGGING)
231+
consumerParameters.logToFile = eachBatch ? './confluent-consumer-batch.log' :
232+
'./confluent-consumer-message.log';
233+
const ret = await runConsumerCommon(
234+
newCompatibleConsumer(consumerParameters, eachBatch, messageSize, limitRPS),
235+
topic, warmupMessages, totalMessageCnt, eachBatch, partitionsConsumedConcurrently, stats, actionOnMessages);
194236
if (producer) {
195237
await producer.disconnect();
196238
}

0 commit comments

Comments
 (0)