Skip to content

Commit 33a08cc

Browse files
authored
feat(logs): Drop log events once buffer hits hard limit (#4889)
* Drop log events once buffer hits hard limit * changelog * test hard limit * record client report * format
1 parent 46eff2a commit 33a08cc

File tree

4 files changed

+119
-0
lines changed

4 files changed

+119
-0
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@
1313
- For feature flag evaluations tracked on spans:
1414
- Only 10 evaluations are tracked per span, existing flags are updated but new ones exceeding the limit are ignored
1515
- Spans do not inherit evaluations from their parent
16+
- Drop log events once buffer hits hard limit ([#4889](https://github.com/getsentry/sentry-java/pull/4889))
17+
- If we have 1000 log events queued up, we drop any new logs coming in to prevent OOM
1618
- Remove vendored code and upgrade to async profiler 4.2 ([#4856](https://github.com/getsentry/sentry-java/pull/4856))
1719
- This adds support for JDK 23+
1820

sentry/api/sentry.api

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5034,6 +5034,7 @@ public final class io/sentry/logger/LoggerApi : io/sentry/logger/ILoggerApi {
50345034
public final class io/sentry/logger/LoggerBatchProcessor : io/sentry/logger/ILoggerBatchProcessor {
50355035
public static final field FLUSH_AFTER_MS I
50365036
public static final field MAX_BATCH_SIZE I
5037+
public static final field MAX_QUEUE_SIZE I
50375038
public fun <init> (Lio/sentry/SentryOptions;Lio/sentry/ISentryClient;)V
50385039
public fun add (Lio/sentry/SentryLogEvent;)V
50395040
public fun close (Z)V

sentry/src/main/java/io/sentry/logger/LoggerBatchProcessor.java

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
package io.sentry.logger;
22

3+
import io.sentry.DataCategory;
34
import io.sentry.ISentryClient;
45
import io.sentry.ISentryExecutorService;
56
import io.sentry.ISentryLifecycleToken;
@@ -8,8 +9,10 @@
89
import io.sentry.SentryLogEvent;
910
import io.sentry.SentryLogEvents;
1011
import io.sentry.SentryOptions;
12+
import io.sentry.clientreport.DiscardReason;
1113
import io.sentry.transport.ReusableCountLatch;
1214
import io.sentry.util.AutoClosableReentrantLock;
15+
import io.sentry.util.JsonSerializationUtils;
1316
import java.util.ArrayList;
1417
import java.util.List;
1518
import java.util.Queue;
@@ -24,6 +27,7 @@ public final class LoggerBatchProcessor implements ILoggerBatchProcessor {
2427

2528
public static final int FLUSH_AFTER_MS = 5000;
2629
public static final int MAX_BATCH_SIZE = 100;
30+
public static final int MAX_QUEUE_SIZE = 1000;
2731

2832
private final @NotNull SentryOptions options;
2933
private final @NotNull ISentryClient client;
@@ -46,6 +50,17 @@ public LoggerBatchProcessor(
4650

4751
@Override
4852
public void add(final @NotNull SentryLogEvent logEvent) {
53+
if (pendingCount.getCount() >= MAX_QUEUE_SIZE) {
54+
options
55+
.getClientReportRecorder()
56+
.recordLostEvent(DiscardReason.QUEUE_OVERFLOW, DataCategory.LogItem);
57+
final long lostBytes =
58+
JsonSerializationUtils.byteSizeOf(options.getSerializer(), options.getLogger(), logEvent);
59+
options
60+
.getClientReportRecorder()
61+
.recordLostEvent(DiscardReason.QUEUE_OVERFLOW, DataCategory.Attachment, lostBytes);
62+
return;
63+
}
4964
pendingCount.increment();
5065
queue.offer(logEvent);
5166
maybeSchedule(false, false);
Lines changed: 101 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,101 @@
1+
package io.sentry.logger
2+
3+
import io.sentry.DataCategory
4+
import io.sentry.ISentryClient
5+
import io.sentry.SentryLogEvent
6+
import io.sentry.SentryLogEvents
7+
import io.sentry.SentryLogLevel
8+
import io.sentry.SentryNanotimeDate
9+
import io.sentry.SentryOptions
10+
import io.sentry.clientreport.ClientReportTestHelper
11+
import io.sentry.clientreport.DiscardReason
12+
import io.sentry.clientreport.DiscardedEvent
13+
import io.sentry.protocol.SentryId
14+
import io.sentry.test.DeferredExecutorService
15+
import io.sentry.test.injectForField
16+
import io.sentry.util.JsonSerializationUtils
17+
import kotlin.test.Test
18+
import kotlin.test.assertEquals
19+
import kotlin.test.assertFalse
20+
import kotlin.test.assertTrue
21+
import org.mockito.kotlin.argumentCaptor
22+
import org.mockito.kotlin.atLeast
23+
import org.mockito.kotlin.mock
24+
import org.mockito.kotlin.verify
25+
26+
class LoggerBatchProcessorTest {
27+
@Test
28+
fun `drops log events after reaching MAX_QUEUE_SIZE limit`() {
29+
// given
30+
val mockClient = mock<ISentryClient>()
31+
val mockExecutor = DeferredExecutorService()
32+
val options = SentryOptions()
33+
val processor = LoggerBatchProcessor(options, mockClient)
34+
processor.injectForField("executorService", mockExecutor)
35+
36+
for (i in 1..1001) {
37+
val logEvent =
38+
SentryLogEvent(SentryId(), SentryNanotimeDate(), "log message $i", SentryLogLevel.INFO)
39+
processor.add(logEvent)
40+
}
41+
42+
// run twice since a non full batch would be scheduled at the end
43+
mockExecutor.runAll()
44+
mockExecutor.runAll()
45+
46+
// assert that the transport received 1000 log events
47+
val captor = argumentCaptor<SentryLogEvents>()
48+
verify(mockClient, atLeast(1)).captureBatchedLogEvents(captor.capture())
49+
50+
val allCapturedEvents = mutableListOf<SentryLogEvent>()
51+
captor.allValues.forEach { logEvents -> allCapturedEvents.addAll(logEvents.items) }
52+
53+
assertEquals(1000, allCapturedEvents.size)
54+
55+
// assert that log 1001 did not make it but log 1000 did get sent
56+
val log1000Found = allCapturedEvents.any { it.body == "log message 1000" }
57+
val log1001Found = allCapturedEvents.any { it.body == "log message 1001" }
58+
59+
assertTrue(log1000Found, "Log 1000 should have been sent")
60+
assertFalse(log1001Found, "Log 1001 should not have been sent")
61+
}
62+
63+
@Test
64+
fun `records client report when log event is dropped due to queue overflow`() {
65+
// given
66+
val mockClient = mock<ISentryClient>()
67+
val mockExecutor = DeferredExecutorService()
68+
val options = SentryOptions()
69+
val processor = LoggerBatchProcessor(options, mockClient)
70+
processor.injectForField("executorService", mockExecutor)
71+
72+
// fill the queue to MAX_QUEUE_SIZE
73+
for (i in 1..1000) {
74+
val logEvent =
75+
SentryLogEvent(SentryId(), SentryNanotimeDate(), "log message $i", SentryLogLevel.INFO)
76+
processor.add(logEvent)
77+
}
78+
79+
// add one more log event that should be dropped
80+
val droppedLogEvent =
81+
SentryLogEvent(SentryId(), SentryNanotimeDate(), "dropped log", SentryLogLevel.INFO)
82+
processor.add(droppedLogEvent)
83+
84+
// calculate expected bytes for the dropped log event
85+
val expectedBytes =
86+
JsonSerializationUtils.byteSizeOf(options.serializer, options.logger, droppedLogEvent)
87+
88+
// verify that a client report was recorded for the dropped log item and bytes
89+
val expectedEvents =
90+
mutableListOf(
91+
DiscardedEvent(DiscardReason.QUEUE_OVERFLOW.reason, DataCategory.LogItem.category, 1),
92+
DiscardedEvent(
93+
DiscardReason.QUEUE_OVERFLOW.reason,
94+
DataCategory.Attachment.category,
95+
expectedBytes,
96+
),
97+
)
98+
99+
ClientReportTestHelper.assertClientReport(options.clientReportRecorder, expectedEvents)
100+
}
101+
}

0 commit comments

Comments
 (0)