Commit b199bcdd authored by fedor's avatar fedor Committed by Commit bot

unicode-decoder: fix out-of-band write in utf16

`WriteUtf16Slow` should not assume that the output buffer has enough
bytes to hold both words of surrogate pair. It should pass the number of
remaining bytes to the `Utf8::ValueOf` instead, just as we already do in
`Utf8DecoderBase::Reset`. Otherwise it will attempt to write the trail
uint16_t past the buffer boundary, leading to memory corruption and
possible crash.

Originally reported by: Kris Reeves <kris.re@bbhmedia.com>

BUG=v8:4274
R=danno
R=svenpanne
LOG=y

Review URL: https://codereview.chromium.org/1226493003

Cr-Commit-Position: refs/heads/master@{#29485}
parent 9599bad4
......@@ -15,6 +15,7 @@ void Utf8DecoderBase::Reset(uint16_t* buffer, size_t buffer_length,
// Assume everything will fit in the buffer and stream won't be needed.
last_byte_of_buffer_unused_ = false;
unbuffered_start_ = NULL;
unbuffered_length_ = 0;
bool writing_to_buffer = true;
// Loop until stream is read, writing to buffer as long as buffer has space.
size_t utf16_length = 0;
......@@ -41,6 +42,7 @@ void Utf8DecoderBase::Reset(uint16_t* buffer, size_t buffer_length,
// Just wrote last character of buffer
writing_to_buffer = false;
unbuffered_start_ = stream;
unbuffered_length_ = stream_length;
}
continue;
}
......@@ -50,19 +52,23 @@ void Utf8DecoderBase::Reset(uint16_t* buffer, size_t buffer_length,
writing_to_buffer = false;
last_byte_of_buffer_unused_ = true;
unbuffered_start_ = stream - cursor;
unbuffered_length_ = stream_length + cursor;
}
utf16_length_ = utf16_length;
}
void Utf8DecoderBase::WriteUtf16Slow(const uint8_t* stream, uint16_t* data,
void Utf8DecoderBase::WriteUtf16Slow(const uint8_t* stream,
size_t stream_length, uint16_t* data,
size_t data_length) {
while (data_length != 0) {
size_t cursor = 0;
uint32_t character = Utf8::ValueOf(stream, Utf8::kMaxEncodedSize, &cursor);
uint32_t character = Utf8::ValueOf(stream, stream_length, &cursor);
// There's a total lack of bounds checking for stream
// as it was already done in Reset.
stream += cursor;
DCHECK(stream_length >= cursor);
stream_length -= cursor;
if (character > unibrow::Utf16::kMaxNonSurrogateCharCode) {
*data++ = Utf16::LeadSurrogate(character);
*data++ = Utf16::TrailSurrogate(character);
......
......@@ -23,9 +23,10 @@ class Utf8DecoderBase {
// The first buffer_length utf16 chars are cached in the buffer.
void Reset(uint16_t* buffer, size_t buffer_length, const uint8_t* stream,
size_t stream_length);
static void WriteUtf16Slow(const uint8_t* stream, uint16_t* data,
size_t length);
static void WriteUtf16Slow(const uint8_t* stream, size_t stream_length,
uint16_t* data, size_t length);
const uint8_t* unbuffered_start_;
size_t unbuffered_length_;
size_t utf16_length_;
bool last_byte_of_buffer_unused_;
......@@ -48,6 +49,7 @@ class Utf8Decoder : public Utf8DecoderBase {
Utf8DecoderBase::Utf8DecoderBase()
: unbuffered_start_(NULL),
unbuffered_length_(0),
utf16_length_(0),
last_byte_of_buffer_unused_(false) {}
......@@ -84,7 +86,7 @@ size_t Utf8Decoder<kBufferSize>::WriteUtf16(uint16_t* data,
if (length <= buffer_length) return length;
DCHECK(unbuffered_start_ != NULL);
// Copy the rest the slow way.
WriteUtf16Slow(unbuffered_start_, data + buffer_length,
WriteUtf16Slow(unbuffered_start_, unbuffered_length_, data + buffer_length,
length - buffer_length);
return length;
}
......
......@@ -7418,6 +7418,57 @@ THREADED_TEST(Utf16Symbol) {
}
THREADED_TEST(Utf16MissingTrailing) {
LocalContext context;
v8::HandleScope scope(context->GetIsolate());
// Make sure it will go past the buffer, so it will call `WriteUtf16Slow`
int size = 1024 * 64;
uint8_t* buffer = new uint8_t[size];
for (int i = 0; i < size; i += 4) {
buffer[i] = 0xf0;
buffer[i + 1] = 0x9d;
buffer[i + 2] = 0x80;
buffer[i + 3] = 0x9e;
}
// Now invoke the decoder without last 3 bytes
v8::Local<v8::String> str =
v8::String::NewFromUtf8(
context->GetIsolate(), reinterpret_cast<char*>(buffer),
v8::NewStringType::kNormal, size - 3).ToLocalChecked();
USE(str);
delete[] buffer;
}
THREADED_TEST(Utf16Trailing3Byte) {
LocalContext context;
v8::HandleScope scope(context->GetIsolate());
// Make sure it will go past the buffer, so it will call `WriteUtf16Slow`
int size = 1024 * 63;
uint8_t* buffer = new uint8_t[size];
for (int i = 0; i < size; i += 3) {
buffer[i] = 0xe2;
buffer[i + 1] = 0x80;
buffer[i + 2] = 0xa6;
}
// Now invoke the decoder without last 3 bytes
v8::Local<v8::String> str =
v8::String::NewFromUtf8(
context->GetIsolate(), reinterpret_cast<char*>(buffer),
v8::NewStringType::kNormal, size).ToLocalChecked();
v8::String::Value value(str);
CHECK_EQ(value.length(), size / 3);
CHECK_EQ((*value)[value.length() - 1], 0x2026);
delete[] buffer;
}
THREADED_TEST(ToArrayIndex) {
LocalContext context;
v8::Isolate* isolate = context->GetIsolate();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment