Skip to content

Commit

Permalink
[VM] Ensure PP is restored when returning from runtime calls in bare …
Browse files Browse the repository at this point in the history
…instructions mode.

Now that we have a moving GC it's no longer sufficient to rely on callee-saving
behavior of PP.

This change also ensures that all calls go through RuntimeEntry::Call() .

Issue #33274

Change-Id: I159986dc18a9b201175fd4a7064a24a1533790ef
Reviewed-on: https://dart-review.googlesource.com/c/88711
Reviewed-by: Vyacheslav Egorov <[email protected]>
Commit-Queue: Martin Kustermann <[email protected]>
  • Loading branch information
mkustermann authored and [email protected] committed Jan 9, 2019
1 parent d09c126 commit 3c66273
Show file tree
Hide file tree
Showing 7 changed files with 30 additions and 21 deletions.
3 changes: 2 additions & 1 deletion runtime/vm/object.h
Original file line number Diff line number Diff line change
Expand Up @@ -4120,7 +4120,8 @@ class ObjectPool : public Object {
}
void SetObjectAt(intptr_t index, const Object& obj) const {
ASSERT((TypeAt(index) == kTaggedObject) ||
(TypeAt(index) == kNativeEntryData));
(TypeAt(index) == kNativeEntryData) ||
(TypeAt(index) == kImmediate && obj.IsSmi()));
StorePointer(&EntryAddr(index)->raw_obj_, obj.raw());
}

Expand Down
2 changes: 2 additions & 0 deletions runtime/vm/runtime_entry_arm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@ void RuntimeEntry::Call(Assembler* assembler, intptr_t argument_count) const {
__ blx(TMP);
__ LoadImmediate(TMP, VMTag::kDartCompiledTagId);
__ str(TMP, Address(THR, Thread::vm_tag_offset()));
ASSERT((kAbiPreservedCpuRegs & (1 << THR)) != 0);
ASSERT((kAbiPreservedCpuRegs & (1 << PP)) != 0);
} else {
// Argument count is not checked here, but in the runtime entry for a more
// informative error message.
Expand Down
2 changes: 2 additions & 0 deletions runtime/vm/runtime_entry_arm64.cc
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,8 @@ void RuntimeEntry::Call(Assembler* assembler, intptr_t argument_count) const {
__ str(TMP, Address(THR, Thread::vm_tag_offset()));
__ mov(SP, R25);
__ mov(CSP, R23);
ASSERT((kAbiPreservedCpuRegs & (1 << THR)) != 0);
ASSERT((kAbiPreservedCpuRegs & (1 << PP)) != 0);
} else {
// Argument count is not checked here, but in the runtime entry for a more
// informative error message.
Expand Down
2 changes: 2 additions & 0 deletions runtime/vm/runtime_entry_x64.cc
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ void RuntimeEntry::Call(Assembler* assembler, intptr_t argument_count) const {
__ movq(Assembler::VMTagAddress(), RAX);
__ CallCFunction(RAX);
__ movq(Assembler::VMTagAddress(), Immediate(VMTag::kDartCompiledTagId));
ASSERT((CallingConventions::kCalleeSaveCpuRegisters & (1 << THR)) != 0);
ASSERT((CallingConventions::kCalleeSaveCpuRegisters & (1 << PP)) != 0);
} else {
// Argument count is not checked here, but in the runtime entry for a more
// informative error message.
Expand Down
14 changes: 7 additions & 7 deletions runtime/vm/stub_code_arm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,12 @@ void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) {
__ LoadImmediate(R2, 0);
__ StoreToOffset(kWord, R2, THR, Thread::top_exit_frame_info_offset());

// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
__ ldr(PP, Address(THR, Thread::global_object_pool_offset()));
}

__ LeaveStubFrame();

// The following return can jump to a lazy-deopt stub, which assumes R0
Expand Down Expand Up @@ -143,13 +149,7 @@ void StubCode::GenerateSharedStub(Assembler* assembler,
__ ldr(CODE_REG, Address(THR, self_code_stub_offset_from_thread));

__ EnterStubFrame();

__ ldr(CODE_REG, Address(THR, Thread::call_to_runtime_stub_offset()));
__ ldr(R9, Address(THR, Thread::OffsetFromThread(target)));
__ mov(R4, Operand(/*argument_count=*/0));
__ ldr(TMP, Address(THR, Thread::call_to_runtime_entry_point_offset()));
__ blx(TMP);

__ CallRuntime(*target, /*argument_count=*/0);
if (!allow_return) {
__ Breakpoint();
return;
Expand Down
15 changes: 8 additions & 7 deletions runtime/vm/stub_code_arm64.cc
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,13 @@ void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) {
// Reset exit frame information in Isolate structure.
__ StoreToOffset(ZR, THR, Thread::top_exit_frame_info_offset());

// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
__ ldr(PP, Address(THR, Thread::global_object_pool_offset()));
__ sub(PP, PP, Operand(kHeapObjectTag)); // Pool in PP is untagged!
}

__ LeaveStubFrame();

// The following return can jump to a lazy-deopt stub, which assumes R0
Expand Down Expand Up @@ -166,13 +173,7 @@ void StubCode::GenerateSharedStub(Assembler* assembler,
__ ldr(CODE_REG, Address(THR, self_code_stub_offset_from_thread));

__ EnterStubFrame();

__ ldr(CODE_REG, Address(THR, Thread::call_to_runtime_stub_offset()));
__ ldr(R5, Address(THR, Thread::OffsetFromThread(target)));
__ LoadImmediate(R4, /*argument_count=*/0);
__ ldr(TMP, Address(THR, Thread::call_to_runtime_entry_point_offset()));
__ blr(TMP);

__ CallRuntime(*target, /*argument_count=*/0);
if (!allow_return) {
__ Breakpoint();
return;
Expand Down
13 changes: 7 additions & 6 deletions runtime/vm/stub_code_x64.cc
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,12 @@ void StubCode::GenerateCallToRuntimeStub(Assembler* assembler) {
// Reset exit frame information in Isolate structure.
__ movq(Address(THR, Thread::top_exit_frame_info_offset()), Immediate(0));

// Restore the global object pool after returning from runtime (old space is
// moving, so the GOP could have been relocated).
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
__ movq(PP, Address(THR, Thread::global_object_pool_offset()));
}

__ LeaveStubFrame();

// The following return can jump to a lazy-deopt stub, which assumes RAX
Expand Down Expand Up @@ -136,12 +142,7 @@ void StubCode::GenerateSharedStub(Assembler* assembler,
__ movq(CODE_REG, Address(THR, self_code_stub_offset_from_thread));

__ EnterStubFrame();

__ movq(CODE_REG, Address(THR, Thread::call_to_runtime_stub_offset()));
__ movq(RBX, Address(THR, Thread::OffsetFromThread(target)));
__ movq(R10, Immediate(/*argument_count=*/0));
__ call(Address(THR, Thread::call_to_runtime_entry_point_offset()));

__ CallRuntime(*target, /*argument_count=*/0);
if (!allow_return) {
__ Breakpoint();
return;
Expand Down

0 comments on commit 3c66273

Please sign in to comment.