Skip to content
This repository was archived by the owner on May 30, 2024. It is now read-only.

Commit b3a3634

Browse files
authored
discard and log error about data items that are too big to store (#26)
* discard and log error about data items that are too big to store * update size limit calculation to use UTF8 byte length & add index overhead
1 parent 0c606a0 commit b3a3634

File tree

3 files changed

+169
-35
lines changed

3 files changed

+169
-35
lines changed

README.md

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,22 @@ To reduce traffic to DynamoDB, there is an optional in-memory cache that retains
6060

6161
const store = DynamoDBFeatureStore('YOUR TABLE NAME', { cacheTTL: 0 });
6262

63+
## Data size limitation
64+
65+
DynamoDB has [a 400KB limit](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ServiceQuotas.html#limits-items) on the size of any data item. For the LaunchDarkly SDK, a data item consists of the JSON representation of an individual feature flag or segment configuration, plus a few smaller attributes. You can see the format and size of these representations by querying `https://sdk.launchdarkly.com/flags/latest-all` and setting the `Authorization` header to your SDK key.
66+
67+
Most flags and segments won't be nearly as big as 400KB, but they could be if for instance you have a long list of user keys for individual user targeting. If the flag or segment representation is too large, it cannot be stored in DynamoDB. To avoid disrupting storage and evaluation of other unrelated feature flags, the SDK will simply skip storing that individual flag or segment, and will log a message (at ERROR level) describing the problem. For example:
68+
69+
```
70+
The item "my-flag-key" in "features" was too large to store in DynamoDB and was dropped
71+
```
72+
73+
If caching is enabled in your configuration, the flag or segment may still be available in the SDK from the in-memory cache, but do not rely on this. If you see this message, consider redesigning your flag/segment configurations, or else do not use DynamoDB for the environment that contains this data item.
74+
75+
This limitation does not apply to target lists in [Big Segments](https://docs.launchdarkly.com/home/users/big-segments/).
76+
77+
A future version of the LaunchDarkly DynamoDB integration may use different strategies to work around this limitation, such as compressing the data or dividing it into multiple items. However, this integration is required to be interoperable with the DynamoDB integrations used by all the other LaunchDarkly SDKs and by the Relay Proxy, so any such change will only be made as part of a larger cross-platform release.
78+
6379
## About LaunchDarkly
6480

6581
* LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can:

dynamodb_feature_store.js

Lines changed: 33 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,11 @@ const CachingStoreWrapper = require('launchdarkly-node-server-sdk/caching_store_
33

44
const defaultCacheTTLSeconds = 15;
55

6+
// We won't try to store items whose total size exceeds this. The DynamoDB documentation says
7+
// only "400KB", which probably means 400*1024, but to avoid any chance of trying to store a
8+
// too-large item we are rounding it down.
9+
const dynamoDbMaxItemSize = 400000;
10+
611
// Note that the format of parameters in this implementation is a bit different than in the
712
// LD DynamoDB integrations for some other platforms, because we are using the
813
// AWS.DynamoDB.DocumentClient class, which represents values as simple types like
@@ -79,8 +84,11 @@ function dynamoDBFeatureStoreInternal(tableName, options, logger) {
7984
allData.forEach(function(collection) {
8085
collection.items.forEach(function(item) {
8186
var key = item.key;
82-
delete existingNamespaceKeys[namespaceForKind(collection.kind) + '$' + key];
83-
ops.push({ PutRequest: { Item: marshalItem(collection.kind, item) } });
87+
const dbItem = marshalItem(collection.kind, item);
88+
if (checkSizeLimit(dbItem)) {
89+
delete existingNamespaceKeys[namespaceForKind(collection.kind) + '$' + key];
90+
ops.push({ PutRequest: { Item: dbItem } });
91+
}
8492
});
8593
});
8694

@@ -105,6 +113,12 @@ function dynamoDBFeatureStoreInternal(tableName, options, logger) {
105113

106114
store.upsertInternal = function(kind, item, cb) {
107115
var params = makeVersionedPutRequest(kind, item);
116+
if (!checkSizeLimit(params.Item)) {
117+
// We deliberately don't report this back to the SDK as an error, because we don't want to trigger any
118+
// useless retry behavior. We just won't do the update.
119+
cb(null, null);
120+
return;
121+
}
108122

109123
// testUpdateHook is instrumentation, used only by the unit tests
110124
var prepare = store.testUpdateHook || function(prepareCb) { prepareCb(); };
@@ -213,6 +227,23 @@ function dynamoDBFeatureStoreInternal(tableName, options, logger) {
213227
return item.namespace + '$' + item.key;
214228
}
215229

230+
function checkSizeLimit(item) {
231+
// see: https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/CapacityUnitCalculations.html
232+
let size = 100; // fixed overhead for index data
233+
for (const [key, value] of Object.entries(item)) {
234+
size += key.length + Buffer.byteLength(value.toString());
235+
}
236+
if (size <= dynamoDbMaxItemSize) {
237+
return true;
238+
}
239+
logSizeLimitError(item.namespace, item.key);
240+
return false;
241+
}
242+
243+
function logSizeLimitError(namespace, key) {
244+
logger.error(`The item "${key}" in "${namespace}" was too large to store in DynamoDB and was dropped`);
245+
}
246+
216247
return store;
217248
}
218249

tests/dynamodb_stores-test.js

Lines changed: 120 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ const {
1414
} = require('launchdarkly-node-server-sdk/sharedtest/store_tests');
1515
const AWS = require('aws-sdk');
1616
const { promisify } = require('util');
17-
const { asyncSleep } = require('launchdarkly-js-test-helpers');
17+
const { asyncSleep, promisifySingle } = require('launchdarkly-js-test-helpers');
1818

1919
// Runs the standard test suites provided by the SDK's store_tests module, plus some additional
2020
// tests specific to this package.
@@ -88,57 +88,144 @@ describe('DynamoDBFeatureStore', function() {
8888
store = DynamoDBFeatureStore(testTableName, { dynamoDBClient: client })({ logger });
8989
});
9090

91-
it('error from query in init', done => {
91+
it('error from query in init', async () => {
9292
var data = { features: { flag: { key: 'flag', version: 1 } } };
9393
client.query = (params, cb) => cb(err);
94-
store.init(data, function() {
95-
expect(logger.error).toHaveBeenCalled();
96-
done();
97-
});
94+
await promisifySingle(store.init)(data);
95+
expect(logger.error).toHaveBeenCalled();
9896
});
9997

100-
it('error from batchWrite in init', done => {
98+
it('error from batchWrite in init', async () => {
10199
var data = { features: { flag: { key: 'flag', version: 1 } } };
102100
client.query = (params, cb) => cb(null, { Items: [] });
103101
client.batchWrite = (params, cb) => cb(err);
104-
store.init(data, function() {
105-
expect(logger.error).toHaveBeenCalled();
106-
done();
107-
});
102+
await promisifySingle(store.init)(data);
103+
expect(logger.error).toHaveBeenCalled();
108104
});
109105

110-
it('error from get', done => {
106+
it('error from get', async () => {
111107
client.get = (params, cb) => cb(err);
112-
store.get(dataKind.features, 'flag', function(result) {
113-
expect(result).toBe(null);
114-
expect(logger.error).toHaveBeenCalled();
115-
done();
116-
});
108+
const result = await promisifySingle(store.get)(dataKind.features, 'flag');
109+
expect(result).toBe(null);
110+
expect(logger.error).toHaveBeenCalled();
117111
});
118112

119-
it('error from get all', done => {
113+
it('error from get all', async () => {
120114
client.query = (params, cb) => cb(err);
121-
store.all(dataKind.features, function(result) {
122-
expect(result).toBe(null);
123-
expect(logger.error).toHaveBeenCalled();
124-
done();
125-
});
115+
const result = await promisifySingle(store.all)(dataKind.features);
116+
expect(result).toBe(null);
117+
expect(logger.error).toHaveBeenCalled();
126118
});
127119

128-
it('error from upsert', done => {
120+
it('error from upsert', async () => {
129121
client.put = (params, cb) => cb(err);
130-
store.upsert(dataKind.features, { key: 'flag', version: 1 }, function() {
131-
expect(logger.error).toHaveBeenCalled();
132-
done();
133-
});
122+
await promisifySingle(store.upsert)(dataKind.features, { key: 'flag', version: 1 });
123+
expect(logger.error).toHaveBeenCalled();
134124
});
135125

136-
it('error from initialized', done => {
126+
it('error from initialized', async () => {
137127
client.get = (params, cb) => cb(err);
138-
store.initialized(function(result) {
139-
expect(result).toBe(false);
140-
expect(logger.error).toHaveBeenCalled();
141-
done();
128+
const result = await promisifySingle(store.initialized)();
129+
expect(result).toBe(false);
130+
expect(logger.error).toHaveBeenCalled();
131+
});
132+
});
133+
134+
describe('items over the data size limit', () => {
135+
let logger;
136+
let store;
137+
138+
beforeEach(() => {
139+
logger = stubLogger();
140+
store = DynamoDBFeatureStore(testTableName, { cacheTTL: 0 })({ logger });
141+
// Here we're disabling caching because we want to see if the data is really put into the database. If
142+
// there's a cache, data could be stored in and retrieved from the cache even though the data was too
143+
// big to go into the database.
144+
});
145+
146+
function makeGoodData() {
147+
return {
148+
features: {
149+
flag1: { key: 'flag1', version: 1 },
150+
flag2: { key: 'flag2', version: 1 }
151+
},
152+
segments: {
153+
segment1: { key: 'segment1', version: 1 },
154+
segment2: { key: 'segment2', version: 1 },
155+
}
156+
};
157+
}
158+
159+
async function getAllData() {
160+
const flags = await promisifySingle(store.all)(dataKind.features);
161+
const segments = await promisifySingle(store.all)(dataKind.segments);
162+
return { features: flags, segments: segments };
163+
}
164+
165+
function makeBigKeyList() {
166+
const ret = [];
167+
for (let i = 0; i < 40000; i++) {
168+
ret.push('key' + i);
169+
}
170+
expect(JSON.stringify(ret).length).toBeGreaterThan(400 * 1024);
171+
return ret;
172+
}
173+
174+
function makeTooBigFlag() {
175+
return { key: 'flag1a', version: 1, targets: [{ variation: 0, values: makeBigKeyList() }] };
176+
}
177+
178+
function makeTooBigSegment() {
179+
return { key: 'segment1a', version: 1, included: makeBigKeyList() };
180+
}
181+
182+
function expectSizeLimitErrorLog(logger) {
183+
expect(logger.error.mock.calls.length).toBe(1);
184+
expect(logger.error.mock.calls[0][0]).toContain('was too large to store in DynamoDB and was dropped');
185+
}
186+
187+
describe('skips and logs too-large item in init', () => {
188+
async function testInit(kind, item) {
189+
await clearData();
190+
191+
const data = makeGoodData();
192+
data[kind.namespace][item.key] = item;
193+
await promisifySingle(store.init)(data);
194+
195+
expectSizeLimitErrorLog(logger);
196+
expect(await getAllData()).toEqual(makeGoodData());
197+
}
198+
199+
it('flag', async () => {
200+
await testInit(dataKind.features, makeTooBigFlag());
201+
});
202+
203+
it('segment', async () => {
204+
await testInit(dataKind.segments, makeTooBigSegment());
205+
});
206+
});
207+
208+
describe('skips and logs too-large item in upsert', () => {
209+
async function testUpsert(kind, item) {
210+
await clearData();
211+
212+
const data = makeGoodData();
213+
await promisifySingle(store.init)(data);
214+
expect(logger.error).not.toHaveBeenCalled();
215+
expect(await getAllData()).toEqual(makeGoodData());
216+
217+
await promisify(store.upsert)(kind, item);
218+
219+
expectSizeLimitErrorLog(logger);
220+
expect(await getAllData()).toEqual(makeGoodData());
221+
}
222+
223+
it('flag', async () => {
224+
await testUpsert(dataKind.features, makeTooBigFlag());
225+
});
226+
227+
it('segment', async () => {
228+
await testUpsert(dataKind.segments, makeTooBigSegment());
142229
});
143230
});
144231
});

0 commit comments

Comments
 (0)