9292import java .util .concurrent .atomic .AtomicLong ;
9393import java .util .concurrent .atomic .AtomicReference ;
9494import java .util .function .Supplier ;
95+ import java .util .stream .Collectors ;
9596import java .util .stream .Stream ;
9697import java .util .zip .CRC32 ;
9798
@@ -229,7 +230,8 @@ private FDBDirectory(@Nonnull Subspace subspace, @Nullable Map<String, String> i
229230 this .fileSequenceCounter = new AtomicLong (-1 );
230231 this .serializer = new LuceneSerializer (Objects .requireNonNullElse (agilityContext .getPropertyValue (LuceneRecordContextProperties .LUCENE_INDEX_COMPRESSION_ENABLED ), false ),
231232 Objects .requireNonNullElse (agilityContext .getPropertyValue (LuceneRecordContextProperties .LUCENE_INDEX_ENCRYPTION_ENABLED ), false ),
232- agilityContext .getPropertyValue (LuceneRecordContextProperties .LUCENE_INDEX_KEY_MANAGER ));
233+ agilityContext .getPropertyValue (LuceneRecordContextProperties .LUCENE_INDEX_KEY_MANAGER ),
234+ Objects .requireNonNullElse (agilityContext .getPropertyValue (LuceneRecordContextProperties .LUCENE_FIELD_PROTOBUF_PREFIX_ENABLED ), false ));
233235 this .fileReferenceMapSupplier = Suppliers .memoize (this ::loadFileReferenceCacheForMemoization );
234236 this .sharedCacheManager = sharedCacheManager ;
235237 this .sharedCacheKey = sharedCacheKey ;
@@ -338,11 +340,12 @@ public void setFieldInfoId(final String filename, final long id, final ByteStrin
338340 writeFDBLuceneFileReference (filename , reference );
339341 }
340342
341- void writeFieldInfos (long id , byte [] value ) {
343+ void writeFieldInfos (long id , byte [] rawBytes ) {
342344 if (id == 0 ) {
343345 throw new RecordCoreArgumentException ("FieldInfo id should never be 0" );
344346 }
345347 byte [] key = fieldInfosSubspace .pack (id );
348+ byte [] value = serializer .encodeFieldProtobuf (rawBytes );
346349 agilityContext .recordSize (LuceneEvents .SizeEvents .LUCENE_WRITE , key .length + value .length );
347350 if (LOGGER .isTraceEnabled ()) {
348351 LOGGER .trace (getLogMessage ("Write lucene stored field infos data" ,
@@ -357,7 +360,9 @@ Stream<NonnullPair<Long, byte[]>> getAllFieldInfosStream() {
357360 LuceneEvents .Waits .WAIT_LUCENE_READ_FIELD_INFOS ,
358361 agilityContext .apply (aContext -> aContext .ensureActive ().getRange (fieldInfosSubspace .range ()).asList ()))
359362 .stream ()
360- .map (keyValue -> NonnullPair .of (fieldInfosSubspace .unpack (keyValue .getKey ()).getLong (0 ), keyValue .getValue ()));
363+ .map (keyValue -> NonnullPair .of (
364+ fieldInfosSubspace .unpack (keyValue .getKey ()).getLong (0 ),
365+ serializer .decodeFieldProtobuf (keyValue .getValue ())));
361366 }
362367
363368 public CompletableFuture <Integer > getFieldInfosCount () {
@@ -444,10 +449,11 @@ public int writeData(final long id, final int block, @Nonnull final byte[] value
444449 * Write stored fields document to the DB.
445450 * @param segmentName the segment name writing to
446451 * @param docID the document ID to write
447- * @param value the bytes value of the stored fields
452+ * @param rawBytes the bytes value of the stored fields
448453 */
449- public void writeStoredFields (@ Nonnull String segmentName , int docID , @ Nonnull final byte [] value ) {
454+ public void writeStoredFields (@ Nonnull String segmentName , int docID , @ Nonnull final byte [] rawBytes ) {
450455 byte [] key = storedFieldsSubspace .pack (Tuple .from (segmentName , docID ));
456+ byte [] value = serializer .encodeFieldProtobuf (rawBytes );
451457 agilityContext .recordSize (LuceneEvents .SizeEvents .LUCENE_WRITE_STORED_FIELDS , key .length + value .length );
452458 if (LOGGER .isTraceEnabled ()) {
453459 LOGGER .trace (getLogMessage ("Write lucene stored fields data" ,
@@ -542,7 +548,7 @@ private CompletableFuture<byte[]> readData(long id, int block) {
542548 }
543549
544550 @ Nonnull
545- public byte [] readStoredFields (String segmentName , int docId ) throws IOException {
551+ public byte [] readStoredFields (String segmentName , int docId ) {
546552 final byte [] key = storedFieldsSubspace .pack (Tuple .from (segmentName , docId ));
547553 final byte [] rawBytes = asyncToSync (LuceneEvents .Waits .WAIT_LUCENE_GET_STORED_FIELDS ,
548554 agilityContext .instrument (LuceneEvents .Events .LUCENE_READ_STORED_FIELDS ,
@@ -553,11 +559,11 @@ public byte[] readStoredFields(String segmentName, int docId) throws IOException
553559 .addLogInfo (LuceneLogMessageKeys .DOC_ID , docId )
554560 .addLogInfo (LogMessageKeys .KEY , ByteArrayUtil2 .loggable (key ));
555561 }
556- return rawBytes ;
562+ return Objects . requireNonNull ( serializer . decodeFieldProtobuf ( rawBytes )) ;
557563 }
558564
559565 @ Nonnull
560- public List <KeyValue > readAllStoredFields (String segmentName ) {
566+ public List <byte [] > readAllStoredFields (String segmentName ) {
561567 final Range range = storedFieldsSubspace .range (Tuple .from (segmentName ));
562568 final List <KeyValue > list = asyncToSync (LuceneEvents .Waits .WAIT_LUCENE_GET_ALL_STORED_FIELDS ,
563569 agilityContext .getRange (range .begin , range .end ));
@@ -567,7 +573,7 @@ public List<KeyValue> readAllStoredFields(String segmentName) {
567573 .addLogInfo (LogMessageKeys .RANGE_START , ByteArrayUtil2 .loggable (range .begin ))
568574 .addLogInfo (LogMessageKeys .RANGE_END , ByteArrayUtil2 .loggable (range .end ));
569575 }
570- return list ;
576+ return list . stream (). map ( KeyValue :: getValue ). map ( serializer :: decodeFieldProtobuf ). collect ( Collectors . toList ()) ;
571577 }
572578
573579 /**
@@ -1055,7 +1061,8 @@ private KeyValueLogMessage getKeyValueLogMessage(final @Nonnull String staticMsg
10551061 return KeyValueLogMessage .build (staticMsg , keysAndValues )
10561062 .addKeyAndValue (LogMessageKeys .SUBSPACE , subspace )
10571063 .addKeyAndValue (LuceneLogMessageKeys .COMPRESSION_SUPPOSED , serializer .isCompressionEnabled ())
1058- .addKeyAndValue (LuceneLogMessageKeys .ENCRYPTION_SUPPOSED , serializer .isEncryptionEnabled ());
1064+ .addKeyAndValue (LuceneLogMessageKeys .ENCRYPTION_SUPPOSED , serializer .isEncryptionEnabled ())
1065+ .addKeyAndValue (LuceneLogMessageKeys .FIELD_PROTOBUF_ENCODED , serializer .isFieldProtobufPrefixEnabled ());
10591066 }
10601067
10611068 /**
0 commit comments