3535from bson .son import SON
3636
3737from pymongo .cursor import CursorType
38- from pymongo .errors import (ConfigurationError ,
39- EncryptionError ,
40- InvalidOperation ,
41- OperationFailure )
4238from pymongo .encryption import (Algorithm ,
4339 ClientEncryption )
44- from pymongo .errors import ConfigurationError , DocumentTooLarge
4540from pymongo .encryption_options import AutoEncryptionOpts , _HAVE_PYMONGOCRYPT
46- from pymongo .message import _COMMAND_OVERHEAD
41+ from pymongo .errors import (BulkWriteError ,
42+ ConfigurationError ,
43+ EncryptionError ,
44+ InvalidOperation ,
45+ OperationFailure ,
46+ WriteError )
4747from pymongo .mongo_client import MongoClient
4848from pymongo .operations import InsertOne
4949from pymongo .write_concern import WriteConcern
@@ -918,6 +918,10 @@ def test_corpus_local_schema(self):
918918 self ._test_corpus (opts )
919919
920920
921+ _2_MiB = 2097152
922+ _16_MiB = 16777216
923+
924+
921925class TestBsonSizeBatches (EncryptionIntegrationTest ):
922926 """Prose tests for BSON size limits and batch splitting."""
923927
@@ -955,27 +959,14 @@ def tearDownClass(cls):
955959 super (TestBsonSizeBatches , cls ).tearDownClass ()
956960
957961 def test_01_insert_succeeds_under_2MiB (self ):
958- doc = {'_id' : 'no_encryption_under_2mib' ,
959- 'unencrypted' : 'a' * ((2 ** 21 ) - 1000 )}
962+ doc = {'_id' : 'over_2mib_under_16mib' , 'unencrypted' : 'a' * _2_MiB }
960963 self .coll_encrypted .insert_one (doc )
961964
962965 # Same with bulk_write.
963- doc = {'_id' : 'no_encryption_under_2mib_bulk' ,
964- 'unencrypted' : 'a' * ((2 ** 21 ) - 1000 )}
966+ doc ['_id' ] = 'over_2mib_under_16mib_bulk'
965967 self .coll_encrypted .bulk_write ([InsertOne (doc )])
966968
967- def test_02_insert_fails_over_2MiB (self ):
968- doc = {'_id' : 'no_encryption_over_2mib' ,
969- 'unencrypted' : 'a' * (2 ** 21 + _COMMAND_OVERHEAD )}
970-
971- with self .assertRaises (DocumentTooLarge ):
972- self .coll_encrypted .insert_one (doc )
973- with self .assertRaises (DocumentTooLarge ):
974- self .coll_encrypted .insert_many ([doc ])
975- with self .assertRaises (DocumentTooLarge ):
976- self .coll_encrypted .bulk_write ([InsertOne (doc )])
977-
978- def test_03_insert_succeeds_over_2MiB_post_encryption (self ):
969+ def test_02_insert_succeeds_over_2MiB_post_encryption (self ):
979970 doc = {'_id' : 'encryption_exceeds_2mib' ,
980971 'unencrypted' : 'a' * ((2 ** 21 ) - 2000 )}
981972 doc .update (json_data ('limits' , 'limits-doc.json' ))
@@ -985,29 +976,53 @@ def test_03_insert_succeeds_over_2MiB_post_encryption(self):
985976 doc ['_id' ] = 'encryption_exceeds_2mib_bulk'
986977 self .coll_encrypted .bulk_write ([InsertOne (doc )])
987978
988- def test_04_bulk_batch_split (self ):
989- doc1 = {'_id' : 'no_encryption_under_2mib_1' ,
990- 'unencrypted' : 'a' * ((2 ** 21 ) - 1000 )}
991- doc2 = {'_id' : 'no_encryption_under_2mib_2' ,
992- 'unencrypted' : 'a' * ((2 ** 21 ) - 1000 )}
979+ def test_03_bulk_batch_split (self ):
980+ doc1 = {'_id' : 'over_2mib_1' , 'unencrypted' : 'a' * _2_MiB }
981+ doc2 = {'_id' : 'over_2mib_2' , 'unencrypted' : 'a' * _2_MiB }
993982 self .listener .reset ()
994983 self .coll_encrypted .bulk_write ([InsertOne (doc1 ), InsertOne (doc2 )])
995984 self .assertEqual (
996985 self .listener .started_command_names (), ['insert' , 'insert' ])
997986
998- def test_05_bulk_batch_split (self ):
987+ def test_04_bulk_batch_split (self ):
999988 limits_doc = json_data ('limits' , 'limits-doc.json' )
1000989 doc1 = {'_id' : 'encryption_exceeds_2mib_1' ,
1001- 'unencrypted' : 'a' * (( 2 ** 21 ) - 2000 )}
990+ 'unencrypted' : 'a' * (_2_MiB - 2000 )}
1002991 doc1 .update (limits_doc )
1003992 doc2 = {'_id' : 'encryption_exceeds_2mib_2' ,
1004- 'unencrypted' : 'a' * (( 2 ** 21 ) - 2000 )}
993+ 'unencrypted' : 'a' * (_2_MiB - 2000 )}
1005994 doc2 .update (limits_doc )
1006995 self .listener .reset ()
1007996 self .coll_encrypted .bulk_write ([InsertOne (doc1 ), InsertOne (doc2 )])
1008997 self .assertEqual (
1009998 self .listener .started_command_names (), ['insert' , 'insert' ])
1010999
1000+ def test_05_insert_succeeds_just_under_16MiB (self ):
1001+ doc = {'_id' : 'under_16mib' , 'unencrypted' : 'a' * (_16_MiB - 2000 )}
1002+ self .coll_encrypted .insert_one (doc )
1003+
1004+ # Same with bulk_write.
1005+ doc ['_id' ] = 'under_16mib_bulk'
1006+ self .coll_encrypted .bulk_write ([InsertOne (doc )])
1007+
1008+ def test_06_insert_fails_over_16MiB (self ):
1009+ limits_doc = json_data ('limits' , 'limits-doc.json' )
1010+ doc = {'_id' : 'encryption_exceeds_16mib' ,
1011+ 'unencrypted' : 'a' * (_16_MiB - 2000 )}
1012+ doc .update (limits_doc )
1013+
1014+ with self .assertRaisesRegex (WriteError , 'object to insert too large' ):
1015+ self .coll_encrypted .insert_one (doc )
1016+
1017+ # Same with bulk_write.
1018+ doc ['_id' ] = 'encryption_exceeds_16mib_bulk'
1019+ with self .assertRaises (BulkWriteError ) as ctx :
1020+ self .coll_encrypted .bulk_write ([InsertOne (doc )])
1021+ err = ctx .exception .details ['writeErrors' ][0 ]
1022+ self .assertEqual (2 , err ['code' ])
1023+ self .assertIn ('object to insert too large' , err ['errmsg' ])
1024+
1025+
10111026
10121027class TestCustomEndpoint (EncryptionIntegrationTest ):
10131028 """Prose tests for creating data keys with a custom endpoint."""
0 commit comments