Runtime support for the new stack maps for the opt compiler.
Now most of the methods supported by the compiler can be optimized,
instead of using the baseline.
Change-Id: I80ab36a34913fa4e7dd576c7bf55af63594dc1fa
diff --git a/compiler/optimizing/locations.h b/compiler/optimizing/locations.h
index aaddb09..041e85b 100644
--- a/compiler/optimizing/locations.h
+++ b/compiler/optimizing/locations.h
@@ -18,6 +18,7 @@
#define ART_COMPILER_OPTIMIZING_LOCATIONS_H_
#include "base/bit_field.h"
+#include "base/bit_vector.h"
#include "utils/allocation.h"
#include "utils/growable_array.h"
#include "utils/managed_register.h"
@@ -43,13 +44,13 @@
// low bits are in the last parameter register, and the high
// bits are in a stack slot. The kQuickParameter kind is for
// handling this special case.
- kQuickParameter = 5,
+ kQuickParameter = 6,
// Unallocated location represents a location that is not fixed and can be
// allocated by a register allocator. Each unallocated location has
// a policy that specifies what kind of location is suitable. Payload
// contains register allocation policy.
- kUnallocated = 6,
+ kUnallocated = 7,
};
Location() : value_(kInvalid) {
@@ -59,8 +60,8 @@
COMPILE_ASSERT((kStackSlot & kLocationTagMask) != kConstant, TagError);
COMPILE_ASSERT((kDoubleStackSlot & kLocationTagMask) != kConstant, TagError);
COMPILE_ASSERT((kRegister & kLocationTagMask) != kConstant, TagError);
+ COMPILE_ASSERT((kQuickParameter & kLocationTagMask) != kConstant, TagError);
COMPILE_ASSERT((kConstant & kLocationTagMask) == kConstant, TagError);
- COMPILE_ASSERT((kQuickParameter & kLocationTagMask) == kConstant, TagError);
DCHECK(!IsValid());
}
@@ -173,7 +174,7 @@
x86_64::X86_64ManagedRegister AsX86_64() const;
Kind GetKind() const {
- return KindField::Decode(value_);
+ return IsConstant() ? kConstant : KindField::Decode(value_);
}
bool Equals(Location other) const {
@@ -275,7 +276,13 @@
*/
class LocationSummary : public ArenaObject {
public:
- explicit LocationSummary(HInstruction* instruction);
+ enum CallKind {
+ kNoCall,
+ kCallOnSlowPath,
+ kCall
+ };
+
+ explicit LocationSummary(HInstruction* instruction, CallKind call_kind = kNoCall);
void SetInAt(uint32_t at, Location location) {
inputs_.Put(at, location);
@@ -309,12 +316,50 @@
return temps_.Size();
}
+ void SetEnvironmentAt(uint32_t at, Location location) {
+ environment_.Put(at, location);
+ }
+
+ Location GetEnvironmentAt(uint32_t at) const {
+ return environment_.Get(at);
+ }
+
Location Out() const { return output_; }
+ bool CanCall() const { return call_kind_ != kNoCall; }
+ bool NeedsSafepoint() const { return CanCall(); }
+
+ void SetStackBit(uint32_t index) {
+ stack_mask_->SetBit(index);
+ }
+
+ void SetRegisterBit(uint32_t reg_id) {
+ register_mask_ |= (1 << reg_id);
+ }
+
+ void SetLiveRegister(uint32_t reg_id) {
+ live_registers_ |= (1 << reg_id);
+ }
+
+ BitVector* GetStackMask() const {
+ return stack_mask_;
+ }
+
private:
GrowableArray<Location> inputs_;
GrowableArray<Location> temps_;
+ GrowableArray<Location> environment_;
Location output_;
+ const CallKind call_kind_;
+
+ // Mask of objects that live in the stack.
+ BitVector* stack_mask_;
+
+ // Mask of objects that live in register.
+ uint32_t register_mask_;
+
+ // Registers that are in use at this position.
+ uint32_t live_registers_;
DISALLOW_COPY_AND_ASSIGN(LocationSummary);
};