Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 34 additions & 8 deletions lib/mongo/server/connection_base.rb
Original file line number Diff line number Diff line change
Expand Up @@ -186,9 +186,6 @@ def deliver(message, client, options = {})
end

def serialize(message, client, buffer = BSON::ByteBuffer.new)
start_size = 0
final_message = message.maybe_compress(compressor, options[:zlib_compression_level])

# Driver specifications only mandate the fixed 16MiB limit for
# serialized BSON documents. However, the server returns its
# active serialized BSON document size limit in the ismaster response,
Expand All @@ -213,12 +210,41 @@ def serialize(message, client, buffer = BSON::ByteBuffer.new)
max_bson_size += MAX_BSON_COMMAND_OVERHEAD
end

final_message.serialize(buffer, max_bson_size)
if max_message_size &&
(buffer.length - start_size) > max_message_size
then
raise Error::MaxMessageSize.new(max_message_size)
# RUBY-2234: It is necessary to check that the message size does not
# exceed the maximum bson object size before compressing and serializing
# the final message.
#
# This is to avoid the case where the user performs a bulk write
# larger than 16MiB which, when compressed, becomes smaller than 16MiB.
# If the driver does not split the bulk writes prior to compression,
# the entire operation will be sent to the server, which will raise an
# error because the uncompressed operation exceeds the maximum bson size.
#
# To address this problem, we serialize the message prior to compression
# and raise an exception if the serialized message exceeds the maximum
# bson size.
if max_message_size
# Create a separate buffer that contains the un-compressed message
# for the purpose of checking its size. Write any pre-existing contents
# from the original buffer into the temporary one.
temp_buffer = BSON::ByteBuffer.new

# TODO: address the fact that this line mutates the buffer.
temp_buffer.put_bytes(buffer.get_bytes(buffer.length))

message.serialize(temp_buffer, max_bson_size)
if temp_buffer.length > max_message_size
raise Error::MaxMessageSize.new(max_message_size)
end
end
Comment on lines +226 to 239
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@p-mongo is this what you had in mind?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍 Yep looks good.


# RUBY-2335: When the un-compressed message is smaller than the maximum
# bson size limit, the message will be serialized twice. The operations
# layer should be refactored to allow compression on an already-
# serialized message.
final_message = message.maybe_compress(compressor, options[:zlib_compression_level])
final_message.serialize(buffer, max_bson_size)

buffer
end
end
Expand Down
19 changes: 19 additions & 0 deletions spec/integration/bulk_write_spec.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
require 'spec_helper'

describe 'Bulk writes' do
before do
authorized_collection.drop
end

context 'when bulk write is larger than 48MB' do
let(:operations) do
[ { insert_one: { text: 'a' * 1000 * 1000 } } ] * 48
end

it 'succeeds' do
expect do
authorized_collection.bulk_write(operations)
end.not_to raise_error
end
end
end
4 changes: 0 additions & 4 deletions spec/integration/size_limit_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,6 @@
end

it 'allows bulk writes of multiple documents of exactly 16 MiB each' do
if SpecConfig.instance.compressors
pending "RUBY-2234"
end

documents = []
1.upto(3) do |index|
document = { key: 'a' * (max_document_size - 28), _id: "in#{index}" }
Expand Down