Skip to content

Commit 6e31cbe

Browse files
authored
Implementation of Big Segments store support. (#15)
1 parent e0fabe2 commit 6e31cbe

File tree

9 files changed

+395
-175
lines changed

9 files changed

+395
-175
lines changed

build.gradle

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ allprojects {
3838
}
3939

4040
ext.versions = [
41-
"sdk": "5.0.0", // the *lowest* version we're compatible with
41+
"sdk": "5.7.0", // the *lowest* version we're compatible with
4242
"dynamodb": "2.10.32",
4343
"slf4j": "1.7.21"
4444
]

src/main/java/com/launchdarkly/sdk/server/integrations/DynamoDb.java

Lines changed: 32 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
package com.launchdarkly.sdk.server.integrations;
22

3+
import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreFactory;
4+
import com.launchdarkly.sdk.server.interfaces.PersistentDataStoreFactory;
5+
36
/**
47
* Integration between the LaunchDarkly SDK and DynamoDB.
58
*
@@ -9,18 +12,39 @@ public abstract class DynamoDb {
912
/**
1013
* Returns a builder object for creating a DynamoDB-backed data store.
1114
* <p>
12-
* This object can be modified with {@link DynamoDbDataStoreBuilder} methods for any desired
13-
* custom DynamoDB options. Then, pass it to
14-
* {@link com.launchdarkly.sdk.server.Components#persistentDataStore(com.launchdarkly.sdk.server.interfaces.PersistentDataStoreFactory)}
15-
* and set any desired caching options. Finally, pass the result to
16-
* {@link com.launchdarkly.sdk.server.LDConfig.Builder#dataStore(com.launchdarkly.sdk.server.interfaces.DataStoreFactory)}.
17-
* For example:
18-
*
15+
* This can be used either for the main data store that holds feature flag data, or for the Big
16+
* Segment store, or both. If you are using both, they do not have to have the same parameters.
17+
* For instance, in this example the main data store uses a table called "table1" and the Big
18+
* Segment store uses a table called "table2":
19+
*
20+
* <pre><code>
21+
* LDConfig config = new LDConfig.Builder()
22+
* .dataStore(
23+
* Components.persistentDataStore(
24+
* DynamoDb.dataStore("table1")
25+
* )
26+
* )
27+
* .bigSegments(
28+
* Components.bigSegments(
29+
* DynamoDb.dataStore("table2")
30+
* )
31+
* )
32+
* .build();
33+
* </code></pre>
34+
*
35+
* Note that the builder is passed to one of two methods,
36+
* {@link com.launchdarkly.sdk.server.Components#persistentDataStore(PersistentDataStoreFactory)} or
37+
* {@link com.launchdarkly.sdk.server.Components#bigSegments(BigSegmentStoreFactory)}, depending on
38+
* the context in which it is being used. This is because each of those contexts has its own
39+
* additional configuration options that are unrelated to the DynamoDb options. For instance, the
40+
* {@link com.launchdarkly.sdk.server.Components#persistentDataStore(PersistentDataStoreFactory)}
41+
* builder has options for caching:
42+
*
1943
* <pre><code>
2044
* LDConfig config = new LDConfig.Builder()
2145
* .dataStore(
2246
* Components.persistentDataStore(
23-
* DynamoDb.dataStore("my-table-name")
47+
* DynamoDb.dataStore("table1")
2448
* ).cacheSeconds(15)
2549
* )
2650
* .build();
Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
package com.launchdarkly.sdk.server.integrations;
2+
3+
import static com.launchdarkly.sdk.server.interfaces.BigSegmentStoreTypes.createMembershipFromSegmentRefs;
4+
5+
import com.launchdarkly.sdk.server.interfaces.BigSegmentStore;
6+
import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreTypes;
7+
8+
import java.util.List;
9+
10+
import software.amazon.awssdk.services.dynamodb.DynamoDbClient;
11+
import software.amazon.awssdk.services.dynamodb.model.AttributeValue;
12+
import software.amazon.awssdk.services.dynamodb.model.GetItemResponse;
13+
14+
public class DynamoDbBigSegmentStoreImpl extends DynamoDbStoreImplBase implements BigSegmentStore {
15+
private final static String MEMBERSHIP_KEY = "big_segments_user";
16+
private final static String INCLUDED_ATTR = "included";
17+
private final static String EXCLUDED_ATTR = "excluded";
18+
19+
private final static String METADATA_KEY = "big_segments_metadata";
20+
private final static String SYNC_TIME_ATTR = "synchronizedOn";
21+
22+
DynamoDbBigSegmentStoreImpl(DynamoDbClient client, boolean wasExistingClient, String tableName, String prefix) {
23+
super(client, wasExistingClient, tableName, prefix);
24+
}
25+
26+
@Override
27+
public BigSegmentStoreTypes.Membership getMembership(String userHash) {
28+
String namespaceKey = prefixedNamespace(MEMBERSHIP_KEY);
29+
GetItemResponse response = getItemByKeys(namespaceKey, userHash);
30+
if (response == null || response.item() == null || response.item().isEmpty()) {
31+
return null;
32+
}
33+
List<String> includedRefs = stringListFromAttrValue(response.item().get(INCLUDED_ATTR));
34+
List<String> excludedRefs = stringListFromAttrValue(response.item().get(EXCLUDED_ATTR));
35+
return createMembershipFromSegmentRefs(includedRefs, excludedRefs);
36+
}
37+
38+
private static List<String> stringListFromAttrValue(AttributeValue attrValue) {
39+
return attrValue == null ? null : attrValue.ss();
40+
}
41+
42+
@Override
43+
public BigSegmentStoreTypes.StoreMetadata getMetadata() {
44+
String key = prefixedNamespace(METADATA_KEY);
45+
GetItemResponse response = getItemByKeys(key, key);
46+
if (response == null || response.item() == null) {
47+
return null;
48+
}
49+
AttributeValue syncTimeValue = response.item().get(SYNC_TIME_ATTR);
50+
if (syncTimeValue == null) {
51+
return null;
52+
}
53+
String syncTimeString = syncTimeValue.n();
54+
if (syncTimeString == null) {
55+
return null;
56+
}
57+
return new BigSegmentStoreTypes.StoreMetadata(Long.parseLong(syncTimeString));
58+
}
59+
}

src/main/java/com/launchdarkly/sdk/server/integrations/DynamoDbDataStoreBuilder.java

Lines changed: 18 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
package com.launchdarkly.sdk.server.integrations;
22

33
import com.launchdarkly.sdk.LDValue;
4-
import com.launchdarkly.sdk.server.LDConfig;
54
import com.launchdarkly.sdk.server.interfaces.BasicConfiguration;
5+
import com.launchdarkly.sdk.server.interfaces.BigSegmentStore;
6+
import com.launchdarkly.sdk.server.interfaces.BigSegmentStoreFactory;
67
import com.launchdarkly.sdk.server.interfaces.ClientContext;
78
import com.launchdarkly.sdk.server.interfaces.DiagnosticDescription;
89
import com.launchdarkly.sdk.server.interfaces.PersistentDataStore;
@@ -19,22 +20,17 @@
1920
/**
2021
* Builder/factory class for the DynamoDB data store.
2122
* <p>
22-
* Obtain an instance of this class by calling {@link com.launchdarkly.sdk.server.integrations.DynamoDb#dataStore(String)}.
23-
* After calling its methods to specify any desired custom settings, wrap it in a
24-
* {@link com.launchdarkly.sdk.server.integrations.PersistentDataStoreBuilder}
25-
* by calling {@code Components.persistentDataStore()}, then pass the result into the SDK configuration with
26-
* {@link com.launchdarkly.sdk.server.LDConfig.Builder#dataStore(com.launchdarkly.sdk.server.interfaces.DataStoreFactory)}.
27-
* You do not need to call {@link #createPersistentDataStore(ClientContext)} yourself to build the actual data store; that
28-
* will be done by the SDK.
23+
* Examples of configuring the SDK with this builder are described in the documentation for
24+
* {@link DynamoDb#dataStore(String)}, which returns an instance of this class.
2925
* <p>
3026
* The AWS SDK provides many configuration options for a DynamoDB client. This class has
3127
* corresponding methods for some of the most commonly used ones. If you need more sophisticated
3228
* control over the DynamoDB client, you can construct one of your own and pass it in with the
3329
* {@link #existingClient(DynamoDbClient)} method.
34-
*
30+
*
3531
* @since 2.1.0
3632
*/
37-
public final class DynamoDbDataStoreBuilder implements PersistentDataStoreFactory, DiagnosticDescription {
33+
public final class DynamoDbDataStoreBuilder implements PersistentDataStoreFactory, BigSegmentStoreFactory, DiagnosticDescription {
3834
private final String tableName;
3935

4036
private String prefix;
@@ -111,7 +107,7 @@ public DynamoDbDataStoreBuilder prefix(String prefix) {
111107
* Specifies an existing, already-configured DynamoDB client instance that the data store
112108
* should use rather than creating one of its own. If you specify an existing client, then the
113109
* other builder methods for configuring DynamoDB are ignored.
114-
*
110+
*
115111
* @param existingClient an existing DynamoDB client instance
116112
* @return the builder
117113
*/
@@ -127,7 +123,17 @@ public DynamoDbDataStoreBuilder existingClient(DynamoDbClient existingClient) {
127123
@Override
128124
public PersistentDataStore createPersistentDataStore(ClientContext context) {
129125
DynamoDbClient client = (existingClient != null) ? existingClient : clientBuilder.build();
130-
return new DynamoDbDataStoreImpl(client, tableName, prefix);
126+
return new DynamoDbDataStoreImpl(client, existingClient != null, tableName, prefix);
127+
}
128+
129+
/**
130+
* Called internally by the SDK to create the actual Big Segment store instance.
131+
* @return the Big Segment store configured by this builder
132+
*/
133+
@Override
134+
public BigSegmentStore createBigSegmentStore(ClientContext context) {
135+
DynamoDbClient client = (existingClient != null) ? existingClient : clientBuilder.build();
136+
return new DynamoDbBigSegmentStoreImpl(client, existingClient != null, tableName, prefix);
131137
}
132138

133139
@Override

src/main/java/com/launchdarkly/sdk/server/integrations/DynamoDbDataStoreImpl.java

Lines changed: 22 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
import org.slf4j.Logger;
1010
import org.slf4j.LoggerFactory;
1111

12-
import java.io.IOException;
1312
import java.util.AbstractMap;
1413
import java.util.ArrayList;
1514
import java.util.HashSet;
@@ -59,29 +58,17 @@
5958
* stored as a single item, this mechanism will not work for extremely large flags or segments.
6059
* </ul>
6160
*/
62-
final class DynamoDbDataStoreImpl implements PersistentDataStore {
61+
final class DynamoDbDataStoreImpl extends DynamoDbStoreImplBase implements PersistentDataStore {
6362
private static final Logger logger = LoggerFactory.getLogger("com.launchdarkly.sdk.server.LDClient.DataStore.DynamoDB");
6463

65-
static final String partitionKey = "namespace";
66-
static final String sortKey = "key";
6764
private static final String versionAttribute = "version";
6865
private static final String itemJsonAttribute = "item";
6966
private static final String deletedItemPlaceholder = "null"; // DynamoDB doesn't allow empty strings
70-
private final DynamoDbClient client;
71-
private final String tableName;
72-
private final String prefix;
73-
67+
7468
private Runnable updateHook;
7569

76-
DynamoDbDataStoreImpl(DynamoDbClient client, String tableName, String prefix) {
77-
this.client = client;
78-
this.tableName = tableName;
79-
this.prefix = "".equals(prefix) ? null : prefix;
80-
}
81-
82-
@Override
83-
public void close() throws IOException {
84-
client.close();
70+
DynamoDbDataStoreImpl(DynamoDbClient client, boolean wasExistingClient, String tableName, String prefix) {
71+
super(client, wasExistingClient, tableName, prefix);
8572
}
8673

8774
@Override
@@ -95,10 +82,8 @@ public KeyedItems<SerializedItemDescriptor> getAll(DataKind kind) {
9582
List<Map.Entry<String, SerializedItemDescriptor>> itemsOut = new ArrayList<>();
9683
for (QueryResponse resp: client.queryPaginator(makeQueryForKind(kind).build())) {
9784
for (Map<String, AttributeValue> item: resp.items()) {
98-
AttributeValue keyAttr = item.get(sortKey);
99-
if (keyAttr == null || keyAttr.s() == null) {
100-
101-
} else {
85+
AttributeValue keyAttr = item.get(SORT_KEY);
86+
if (keyAttr != null && keyAttr.s() != null) {
10287
SerializedItemDescriptor itemOut = unmarshalItem(kind, item);
10388
if (itemOut != null) {
10489
itemsOut.add(new AbstractMap.SimpleEntry<>(keyAttr.s(), itemOut));
@@ -135,18 +120,17 @@ public void init(FullDataSet<SerializedItemDescriptor> allData) {
135120
// Now delete any previously existing items whose keys were not in the current data
136121
for (Map.Entry<String, String> combinedKey: unusedOldKeys) {
137122
if (!combinedKey.getKey().equals(initedKey())) {
138-
Map<String, AttributeValue> keys = mapOf(
139-
partitionKey, AttributeValue.builder().s(combinedKey.getKey()).build(),
140-
sortKey, AttributeValue.builder().s(combinedKey.getValue()).build());
141-
requests.add(WriteRequest.builder().deleteRequest(builder -> builder.key(keys)).build());
123+
requests.add(WriteRequest.builder()
124+
.deleteRequest(builder ->
125+
builder.key(makeKeysMap(combinedKey.getKey(), combinedKey.getValue())))
126+
.build());
142127
}
143128
}
144129

145130
// Now set the special key that we check in initializedInternal()
146-
Map<String, AttributeValue> initedItem = mapOf(
147-
partitionKey, AttributeValue.builder().s(initedKey()).build(),
148-
sortKey, AttributeValue.builder().s(initedKey()).build());
149-
requests.add(WriteRequest.builder().putRequest(builder -> builder.item(initedItem)).build());
131+
requests.add(WriteRequest.builder()
132+
.putRequest(builder -> builder.item(makeKeysMap(initedKey(), initedKey())))
133+
.build());
150134

151135
batchWriteRequests(client, tableName, requests);
152136

@@ -166,8 +150,8 @@ public boolean upsert(DataKind kind, String key, SerializedItemDescriptor newIte
166150
.item(encodedItem)
167151
.conditionExpression("attribute_not_exists(#namespace) or attribute_not_exists(#key) or :version > #version")
168152
.expressionAttributeNames(mapOf(
169-
"#namespace", partitionKey,
170-
"#key", sortKey,
153+
"#namespace", PARTITION_KEY,
154+
"#key", SORT_KEY,
171155
"#version", versionAttribute))
172156
.expressionAttributeValues(mapOf(
173157
":version", AttributeValue.builder().n(String.valueOf(newItem.getVersion())).build()))
@@ -199,11 +183,7 @@ public boolean isStoreAvailable() {
199183
public void setUpdateHook(Runnable updateHook) {
200184
this.updateHook = updateHook;
201185
}
202-
203-
private String prefixedNamespace(String base) {
204-
return prefix == null ? base : (prefix + ":" + base);
205-
}
206-
186+
207187
private String namespaceForKind(DataKind kind) {
208188
return prefixedNamespace(kind.getName());
209189
}
@@ -214,7 +194,7 @@ private String initedKey() {
214194

215195
private QueryRequest.Builder makeQueryForKind(DataKind kind) {
216196
Map<String, Condition> keyConditions = mapOf(
217-
partitionKey,
197+
PARTITION_KEY,
218198
Condition.builder()
219199
.comparisonOperator(ComparisonOperator.EQ)
220200
.attributeValueList(AttributeValue.builder().s(namespaceForKind(kind)).build())
@@ -226,31 +206,20 @@ private QueryRequest.Builder makeQueryForKind(DataKind kind) {
226206
.keyConditions(keyConditions);
227207
}
228208

229-
private GetItemResponse getItemByKeys(String namespace, String key) {
230-
Map<String, AttributeValue> keyMap = mapOf(
231-
partitionKey, AttributeValue.builder().s(namespace).build(),
232-
sortKey, AttributeValue.builder().s(key).build()
233-
);
234-
return client.getItem(builder -> builder.tableName(tableName)
235-
.consistentRead(true)
236-
.key(keyMap)
237-
);
238-
}
239-
240209
private Set<Map.Entry<String, String>> readExistingKeys(FullDataSet<?> kindsFromThisDataSet) {
241210
Set<Map.Entry<String, String>> keys = new HashSet<>();
242211
for (Map.Entry<DataKind, ?> e: kindsFromThisDataSet.getData()) {
243212
DataKind kind = e.getKey();
244213
QueryRequest req = makeQueryForKind(kind)
245214
.projectionExpression("#namespace, #key")
246215
.expressionAttributeNames(mapOf(
247-
"#namespace", partitionKey, "#key", sortKey))
216+
"#namespace", PARTITION_KEY, "#key", SORT_KEY))
248217
.build();
249218
QueryIterable queryResults = client.queryPaginator(req);
250219
for (QueryResponse resp: queryResults) {
251220
for (Map<String, AttributeValue> item: resp.items()) {
252-
String namespace = item.get(partitionKey).s();
253-
String key = item.get(sortKey).s();
221+
String namespace = item.get(PARTITION_KEY).s();
222+
String key = item.get(SORT_KEY).s();
254223
keys.add(new AbstractMap.SimpleEntry<>(namespace, key));
255224
}
256225
}
@@ -261,8 +230,8 @@ private Set<Map.Entry<String, String>> readExistingKeys(FullDataSet<?> kindsFrom
261230
private Map<String, AttributeValue> marshalItem(DataKind kind, String key, SerializedItemDescriptor item) {
262231
String json = item.isDeleted() ? deletedItemPlaceholder : item.getSerializedItem();
263232
return mapOf(
264-
partitionKey, AttributeValue.builder().s(namespaceForKind(kind)).build(),
265-
sortKey, AttributeValue.builder().s(key).build(),
233+
PARTITION_KEY, AttributeValue.builder().s(namespaceForKind(kind)).build(),
234+
SORT_KEY, AttributeValue.builder().s(key).build(),
266235
versionAttribute, AttributeValue.builder().n(String.valueOf(item.getVersion())).build(),
267236
itemJsonAttribute, AttributeValue.builder().s(json).build()
268237
);

0 commit comments

Comments
 (0)