Commit efada001 authored by yangguo@chromium.org's avatar yangguo@chromium.org

MIPS: port Generated code for substring slices in x64 and arm.

Ported r9111 (2b946464)

BUG=
TEST=

Review URL: http://codereview.chromium.org/7835025

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@9127 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 63bbe11e
......@@ -5642,11 +5642,6 @@ void SubStringStub::Generate(MacroAssembler* masm) {
Register to = t2;
Register from = t3;
if (FLAG_string_slices) {
__ nop(); // Jumping as first instruction would crash the code generation.
__ jmp(&sub_string_runtime);
}
// Check bounds and smi-ness.
__ lw(to, MemOperand(sp, kToOffset));
__ lw(from, MemOperand(sp, kFromOffset));
......@@ -5670,7 +5665,8 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// Special handling of sub-strings of length 1 and 2. One character strings
// are handled in the runtime system (looked up in the single character
// cache). Two character strings are looked for in the symbol cache.
// cache). Two character strings are looked for in the symbol cache in
// generated code.
__ Branch(&sub_string_runtime, lt, a2, Operand(2));
// Both to and from are smis.
......@@ -5682,19 +5678,32 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// t5: to index (untagged smi)
// Make sure first argument is a sequential (or flat) string.
__ lw(t1, MemOperand(sp, kStringOffset));
__ Branch(&sub_string_runtime, eq, t1, Operand(kSmiTagMask));
__ lw(v0, MemOperand(sp, kStringOffset));
__ Branch(&sub_string_runtime, eq, v0, Operand(kSmiTagMask));
__ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset));
__ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
__ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset));
__ And(t4, a1, Operand(kIsNotStringMask));
__ And(t4, v0, Operand(kIsNotStringMask));
__ Branch(&sub_string_runtime, ne, t4, Operand(zero_reg));
// Short-cut for the case of trivial substring.
Label return_v0;
// v0: original string
// a2: result string length
__ lw(t0, FieldMemOperand(v0, String::kLengthOffset));
__ sra(t0, t0, 1);
__ Branch(&return_v0, eq, a2, Operand(t0));
Label create_slice;
if (FLAG_string_slices) {
__ Branch(&create_slice, ge, a2, Operand(SlicedString::kMinLength));
}
// v0: original string
// a1: instance type
// a2: result string length
// a3: from index (untagged smi)
// t1: string
// t2: (a.k.a. to): to (smi)
// t3: (a.k.a. from): from offset (smi)
// t5: to index (untagged smi)
......@@ -5703,8 +5712,9 @@ void SubStringStub::Generate(MacroAssembler* masm) {
__ And(t0, a1, Operand(kStringRepresentationMask));
STATIC_ASSERT(kSeqStringTag < kConsStringTag);
STATIC_ASSERT(kConsStringTag < kExternalStringTag);
STATIC_ASSERT(kConsStringTag < kSlicedStringTag);
// External strings go to runtime.
// Slices and external strings go to runtime.
__ Branch(&sub_string_runtime, gt, t0, Operand(kConsStringTag));
// Sequential strings are handled directly.
......@@ -5713,32 +5723,32 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// Cons string. Try to recurse (once) on the first substring.
// (This adds a little more generality than necessary to handle flattened
// cons strings, but not much).
__ lw(t1, FieldMemOperand(t1, ConsString::kFirstOffset));
__ lw(t0, FieldMemOperand(t1, HeapObject::kMapOffset));
__ lw(v0, FieldMemOperand(v0, ConsString::kFirstOffset));
__ lw(t0, FieldMemOperand(v0, HeapObject::kMapOffset));
__ lbu(a1, FieldMemOperand(t0, Map::kInstanceTypeOffset));
STATIC_ASSERT(kSeqStringTag == 0);
// Cons and External strings go to runtime.
// Cons, slices and external strings go to runtime.
__ Branch(&sub_string_runtime, ne, a1, Operand(kStringRepresentationMask));
// Definitly a sequential string.
__ bind(&seq_string);
// v0: original string
// a1: instance type
// a2: result string length
// a3: from index (untagged smi)
// t1: string
// t2: (a.k.a. to): to (smi)
// t3: (a.k.a. from): from offset (smi)
// t5: to index (untagged smi)
__ lw(t0, FieldMemOperand(t1, String::kLengthOffset));
__ lw(t0, FieldMemOperand(v0, String::kLengthOffset));
__ Branch(&sub_string_runtime, lt, t0, Operand(to)); // Fail if to > length.
to = no_reg;
// v0: original string or left hand side of the original cons string.
// a1: instance type
// a2: result string length
// a3: from index (untagged smi)
// t1: string
// t3: (a.k.a. from): from offset (smi)
// t5: to index (untagged smi)
......@@ -5754,84 +5764,147 @@ void SubStringStub::Generate(MacroAssembler* masm) {
// Sub string of length 2 requested.
// Get the two characters forming the sub string.
__ Addu(t1, t1, Operand(a3));
__ lbu(a3, FieldMemOperand(t1, SeqAsciiString::kHeaderSize));
__ lbu(t0, FieldMemOperand(t1, SeqAsciiString::kHeaderSize + 1));
__ Addu(v0, v0, Operand(a3));
__ lbu(a3, FieldMemOperand(v0, SeqAsciiString::kHeaderSize));
__ lbu(t0, FieldMemOperand(v0, SeqAsciiString::kHeaderSize + 1));
// Try to lookup two character string in symbol table.
Label make_two_character_string;
StringHelper::GenerateTwoCharacterSymbolTableProbe(
masm, a3, t0, a1, t1, t2, t3, t4, &make_two_character_string);
Counters* counters = masm->isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
__ Addu(sp, sp, Operand(3 * kPointerSize));
__ Ret();
__ jmp(&return_v0);
// a2: result string length.
// a3: two characters combined into halfword in little endian byte order.
__ bind(&make_two_character_string);
__ AllocateAsciiString(v0, a2, t0, t1, t4, &sub_string_runtime);
__ sh(a3, FieldMemOperand(v0, SeqAsciiString::kHeaderSize));
__ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
__ Addu(sp, sp, Operand(3 * kPointerSize));
__ Ret();
__ jmp(&return_v0);
__ bind(&result_longer_than_two);
// Locate 'from' character of string.
__ Addu(t1, v0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
__ sra(t4, from, 1);
__ Addu(t1, t1, t4);
// Allocate the result.
__ AllocateAsciiString(v0, a2, t4, t0, a1, &sub_string_runtime);
// v0: result string.
// a2: result string length.
// v0: result string
// a2: result string length
// a3: from index (untagged smi)
// t1: string.
// t1: first character of substring to copy
// t3: (a.k.a. from): from offset (smi)
// Locate first character of result.
__ Addu(a1, v0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
// Locate 'from' character of string.
__ Addu(t1, t1, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
__ Addu(t1, t1, Operand(a3));
// v0: result string.
// a1: first character of result string.
// a2: result string length.
// t1: first character of sub string to copy.
// v0: result string
// a1: first character of result string
// a2: result string length
// t1: first character of substring to copy
STATIC_ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
StringHelper::GenerateCopyCharactersLong(
masm, a1, t1, a2, a3, t0, t2, t3, t4, COPY_ASCII | DEST_ALWAYS_ALIGNED);
__ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
__ Addu(sp, sp, Operand(3 * kPointerSize));
__ Ret();
__ jmp(&return_v0);
__ bind(&non_ascii_flat);
// a2: result string length.
// t1: string.
// a2: result string length
// t1: string
// t3: (a.k.a. from): from offset (smi)
// Check for flat two byte string.
// Locate 'from' character of string.
__ Addu(t1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
// As "from" is a smi it is 2 times the value which matches the size of a two
// byte character.
STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ Addu(t1, t1, Operand(from));
// Allocate the result.
__ AllocateTwoByteString(v0, a2, a1, a3, t0, &sub_string_runtime);
// v0: result string.
// a2: result string length.
// t1: string.
// v0: result string
// a2: result string length
// t1: first character of substring to copy
// Locate first character of result.
__ Addu(a1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
// Locate 'from' character of string.
__ Addu(t1, t1, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
// As "from" is a smi it is 2 times the value which matches the size of a two
// byte character.
__ Addu(t1, t1, Operand(from));
from = no_reg;
// v0: result string.
// a1: first character of result.
// a2: result length.
// t1: first character of string to copy.
// t1: first character of substring to copy.
STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
StringHelper::GenerateCopyCharactersLong(
masm, a1, t1, a2, a3, t0, t2, t3, t4, DEST_ALWAYS_ALIGNED);
__ jmp(&return_v0);
if (FLAG_string_slices) {
__ bind(&create_slice);
// v0: original string
// a1: instance type
// a2: length
// a3: from index (untagged smi)
// t2 (a.k.a. to): to (smi)
// t3 (a.k.a. from): from offset (smi)
Label allocate_slice, sliced_string, seq_string;
STATIC_ASSERT(kSeqStringTag == 0);
__ And(t4, a1, Operand(kStringRepresentationMask));
__ Branch(&seq_string, eq, t4, Operand(zero_reg));
STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
STATIC_ASSERT(kIsIndirectStringMask != 0);
__ And(t4, a1, Operand(kIsIndirectStringMask));
// External string. Jump to runtime.
__ Branch(&sub_string_runtime, eq, t4, Operand(zero_reg));
__ And(t4, a1, Operand(kSlicedNotConsMask));
__ Branch(&sliced_string, ne, t4, Operand(zero_reg));
// Cons string. Check whether it is flat, then fetch first part.
__ lw(t1, FieldMemOperand(v0, ConsString::kSecondOffset));
__ LoadRoot(t5, Heap::kEmptyStringRootIndex);
__ Branch(&sub_string_runtime, ne, t1, Operand(t5));
__ lw(t1, FieldMemOperand(v0, ConsString::kFirstOffset));
__ jmp(&allocate_slice);
__ bind(&sliced_string);
// Sliced string. Fetch parent and correct start index by offset.
__ lw(t1, FieldMemOperand(v0, SlicedString::kOffsetOffset));
__ addu(t3, t3, t1);
__ lw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
__ jmp(&allocate_slice);
__ bind(&seq_string);
// Sequential string. Just move string to the right register.
__ mov(t1, v0);
__ bind(&allocate_slice);
// a1: instance type of original string
// a2: length
// t1: underlying subject string
// t3 (a.k.a. from): from offset (smi)
// Allocate new sliced string. At this point we do not reload the instance
// type including the string encoding because we simply rely on the info
// provided by the original string. It does not matter if the original
// string's encoding is wrong because we always have to recheck encoding of
// the newly created string's parent anyways due to externalized strings.
Label two_byte_slice, set_slice_header;
STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0);
STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
__ And(t4, a1, Operand(kStringEncodingMask));
__ Branch(&two_byte_slice, eq, t4, Operand(zero_reg));
__ AllocateAsciiSlicedString(v0, a2, a3, t0, &sub_string_runtime);
__ jmp(&set_slice_header);
__ bind(&two_byte_slice);
__ AllocateTwoByteSlicedString(v0, a2, a3, t0, &sub_string_runtime);
__ bind(&set_slice_header);
__ sw(t3, FieldMemOperand(v0, SlicedString::kOffsetOffset));
__ sw(t1, FieldMemOperand(v0, SlicedString::kParentOffset));
}
__ bind(&return_v0);
__ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
__ Addu(sp, sp, Operand(3 * kPointerSize));
__ Ret();
......
......@@ -2815,6 +2815,46 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
}
void MacroAssembler::AllocateTwoByteSlicedString(Register result,
Register length,
Register scratch1,
Register scratch2,
Label* gc_required) {
AllocateInNewSpace(SlicedString::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
InitializeNewString(result,
length,
Heap::kSlicedStringMapRootIndex,
scratch1,
scratch2);
}
void MacroAssembler::AllocateAsciiSlicedString(Register result,
Register length,
Register scratch1,
Register scratch2,
Label* gc_required) {
AllocateInNewSpace(SlicedString::kSize,
result,
scratch1,
scratch2,
gc_required,
TAG_OBJECT);
InitializeNewString(result,
length,
Heap::kSlicedAsciiStringMapRootIndex,
scratch1,
scratch2);
}
// Allocates a heap number or jumps to the label if the young space is full and
// a scavenge is needed.
void MacroAssembler::AllocateHeapNumber(Register result,
......
......@@ -362,6 +362,16 @@ class MacroAssembler: public Assembler {
Register scratch1,
Register scratch2,
Label* gc_required);
void AllocateTwoByteSlicedString(Register result,
Register length,
Register scratch1,
Register scratch2,
Label* gc_required);
void AllocateAsciiSlicedString(Register result,
Register length,
Register scratch1,
Register scratch2,
Label* gc_required);
// Allocates a heap number or jumps to the gc_required label if the young
// space is full and a scavenge is needed. All registers are clobbered also
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment