1 /* 2 * Copyright (C) 2017 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_ 18 #define ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_ 19 20 #include "base/arena_allocator.h" 21 #include "base/arena_bit_vector.h" 22 #include "base/bit_vector-inl.h" 23 #include "base/macros.h" 24 #include "base/scoped_arena_allocator.h" 25 #include "base/scoped_arena_containers.h" 26 #include "base/stl_util.h" 27 #include "escape.h" 28 #include "nodes.h" 29 #include "optimizing/optimizing_compiler_stats.h" 30 31 namespace art HIDDEN { 32 33 // A ReferenceInfo contains additional info about a reference such as 34 // whether it's a singleton, returned, etc. 35 class ReferenceInfo : public DeletableArenaObject<kArenaAllocLSA> { 36 public: ReferenceInfo(HInstruction * reference,size_t pos)37 ReferenceInfo(HInstruction* reference, size_t pos) 38 : reference_(reference), 39 position_(pos), 40 is_singleton_(true), 41 is_singleton_and_not_returned_(true), 42 is_singleton_and_not_deopt_visible_(true) { 43 CalculateEscape(reference_, 44 nullptr, 45 &is_singleton_, 46 &is_singleton_and_not_returned_, 47 &is_singleton_and_not_deopt_visible_); 48 } 49 GetReference()50 HInstruction* GetReference() const { 51 return reference_; 52 } 53 GetPosition()54 size_t GetPosition() const { 55 return position_; 56 } 57 58 // Returns true if reference_ is the only name that can refer to its value during 59 // the lifetime of the method. So it's guaranteed to not have any alias in 60 // the method (including its callees). IsSingleton()61 bool IsSingleton() const { 62 return is_singleton_; 63 } 64 65 // Returns true if reference_ is a singleton and not returned to the caller or 66 // used as an environment local of an HDeoptimize instruction. 67 // The allocation and stores into reference_ may be eliminated for such cases. IsSingletonAndRemovable()68 bool IsSingletonAndRemovable() const { 69 return is_singleton_and_not_returned_ && is_singleton_and_not_deopt_visible_; 70 } 71 72 // Returns true if reference_ is a singleton and returned to the caller or 73 // used as an environment local of an HDeoptimize instruction. IsSingletonAndNonRemovable()74 bool IsSingletonAndNonRemovable() const { 75 return is_singleton_ && 76 (!is_singleton_and_not_returned_ || !is_singleton_and_not_deopt_visible_); 77 } 78 79 private: 80 HInstruction* const reference_; 81 const size_t position_; // position in HeapLocationCollector's ref_info_array_. 82 83 // Can only be referred to by a single name in the method. 84 bool is_singleton_; 85 // Is singleton and not returned to caller. 86 bool is_singleton_and_not_returned_; 87 // Is singleton and not used as an environment local of HDeoptimize. 88 bool is_singleton_and_not_deopt_visible_; 89 90 DISALLOW_COPY_AND_ASSIGN(ReferenceInfo); 91 }; 92 93 // A heap location is a reference-offset/index pair that a value can be loaded from 94 // or stored to. 95 class HeapLocation : public ArenaObject<kArenaAllocLSA> { 96 public: 97 static constexpr size_t kInvalidFieldOffset = -1; 98 // Default value for heap locations which are not vector data. 99 static constexpr size_t kScalar = 1; 100 // TODO: more fine-grained array types. 101 static constexpr int16_t kDeclaringClassDefIndexForArrays = -1; 102 HeapLocation(ReferenceInfo * ref_info,DataType::Type type,size_t offset,HInstruction * index,size_t vector_length,int16_t declaring_class_def_index,bool is_vec_op)103 HeapLocation(ReferenceInfo* ref_info, 104 DataType::Type type, 105 size_t offset, 106 HInstruction* index, 107 size_t vector_length, 108 int16_t declaring_class_def_index, 109 bool is_vec_op) 110 : ref_info_(ref_info), 111 type_(DataType::ToSigned(type)), 112 offset_(offset), 113 index_(index), 114 vector_length_(vector_length), 115 declaring_class_def_index_(declaring_class_def_index), 116 has_aliased_locations_(false), 117 is_vec_op_(is_vec_op) { 118 DCHECK(ref_info != nullptr); 119 DCHECK((offset == kInvalidFieldOffset && index != nullptr) || 120 (offset != kInvalidFieldOffset && index == nullptr)); 121 } 122 GetReferenceInfo()123 ReferenceInfo* GetReferenceInfo() const { return ref_info_; } GetType()124 DataType::Type GetType() const { return type_; } GetOffset()125 size_t GetOffset() const { return offset_; } GetIndex()126 HInstruction* GetIndex() const { return index_; } GetVectorLength()127 size_t GetVectorLength() const { return vector_length_; } IsVecOp()128 bool IsVecOp() const { return is_vec_op_; } 129 130 // Returns the definition of declaring class' dex index. 131 // It's kDeclaringClassDefIndexForArrays for an array element. GetDeclaringClassDefIndex()132 int16_t GetDeclaringClassDefIndex() const { 133 return declaring_class_def_index_; 134 } 135 IsArray()136 bool IsArray() const { 137 return index_ != nullptr; 138 } 139 HasAliasedLocations()140 bool HasAliasedLocations() const { 141 return has_aliased_locations_; 142 } 143 SetHasAliasedLocations(bool val)144 void SetHasAliasedLocations(bool val) { 145 has_aliased_locations_ = val; 146 } 147 148 private: 149 // Reference for instance/static field, array element or vector data. 150 ReferenceInfo* const ref_info_; 151 // Type of data residing at HeapLocation (always signed for integral 152 // data since e.g. a[i] and a[i] & 0xff are represented by differently 153 // signed types; char vs short are disambiguated through the reference). 154 const DataType::Type type_; 155 // Offset of static/instance field. 156 // Invalid when this HeapLocation is not field. 157 const size_t offset_; 158 // Index of an array element or starting index of vector data. 159 // Invalid when this HeapLocation is not array. 160 HInstruction* const index_; 161 // Vector length of vector data. 162 // When this HeapLocation is not vector data, it's value is kScalar. 163 const size_t vector_length_; 164 // Declaring class's def's dex index. 165 // Invalid when this HeapLocation is not field access. 166 const int16_t declaring_class_def_index_; 167 // Has aliased heap locations in the method, due to either the 168 // reference is aliased or the array element is aliased via different 169 // index names. 170 bool has_aliased_locations_; 171 // Whether this HeapLocation represents a vector operation. 172 bool is_vec_op_; 173 174 DISALLOW_COPY_AND_ASSIGN(HeapLocation); 175 }; 176 177 // A HeapLocationCollector collects all relevant heap locations and keeps 178 // an aliasing matrix for all locations. 179 class HeapLocationCollector : public HGraphVisitor { 180 public: 181 static constexpr size_t kHeapLocationNotFound = -1; 182 // Start with a single uint32_t word. That's enough bits for pair-wise 183 // aliasing matrix of 8 heap locations. 184 static constexpr uint32_t kInitialAliasingMatrixBitVectorSize = 32; 185 HeapLocationCollector(HGraph * graph,ScopedArenaAllocator * allocator)186 HeapLocationCollector(HGraph* graph, ScopedArenaAllocator* allocator) 187 : HGraphVisitor(graph), 188 allocator_(allocator), 189 ref_info_array_(allocator->Adapter(kArenaAllocLSA)), 190 heap_locations_(allocator->Adapter(kArenaAllocLSA)), 191 aliasing_matrix_(allocator, kInitialAliasingMatrixBitVectorSize, true, kArenaAllocLSA), 192 has_heap_stores_(false) {} 193 ~HeapLocationCollector()194 ~HeapLocationCollector() { 195 CleanUp(); 196 } 197 CleanUp()198 void CleanUp() { 199 heap_locations_.clear(); 200 STLDeleteContainerPointers(ref_info_array_.begin(), ref_info_array_.end()); 201 ref_info_array_.clear(); 202 } 203 GetNumberOfHeapLocations()204 size_t GetNumberOfHeapLocations() const { 205 return heap_locations_.size(); 206 } 207 GetHeapLocation(size_t index)208 HeapLocation* GetHeapLocation(size_t index) const { 209 return heap_locations_[index]; 210 } 211 GetHeapLocationIndex(const HeapLocation * hl)212 size_t GetHeapLocationIndex(const HeapLocation* hl) const { 213 auto res = std::find(heap_locations_.cbegin(), heap_locations_.cend(), hl); 214 return std::distance(heap_locations_.cbegin(), res); 215 } 216 HuntForOriginalReference(HInstruction * ref)217 HInstruction* HuntForOriginalReference(HInstruction* ref) const { 218 // An original reference can be transformed by instructions like: 219 // i0 NewArray 220 // i1 HInstruction(i0) <-- NullCheck, BoundType, IntermediateAddress. 221 // i2 ArrayGet(i1, index) 222 DCHECK(ref != nullptr); 223 while (ref->IsNullCheck() || ref->IsBoundType() || ref->IsIntermediateAddress()) { 224 ref = ref->InputAt(0); 225 } 226 return ref; 227 } 228 FindReferenceInfoOf(HInstruction * ref)229 ReferenceInfo* FindReferenceInfoOf(HInstruction* ref) const { 230 for (size_t i = 0; i < ref_info_array_.size(); i++) { 231 ReferenceInfo* ref_info = ref_info_array_[i]; 232 if (ref_info->GetReference() == ref) { 233 DCHECK_EQ(i, ref_info->GetPosition()); 234 return ref_info; 235 } 236 } 237 return nullptr; 238 } 239 GetFieldHeapLocation(HInstruction * object,const FieldInfo * field)240 size_t GetFieldHeapLocation(HInstruction* object, const FieldInfo* field) const { 241 DCHECK(object != nullptr); 242 DCHECK(field != nullptr); 243 return FindHeapLocationIndex(FindReferenceInfoOf(HuntForOriginalReference(object)), 244 field->GetFieldType(), 245 field->GetFieldOffset().SizeValue(), 246 nullptr, 247 HeapLocation::kScalar, 248 field->GetDeclaringClassDefIndex(), 249 /*is_vec_op=*/false); 250 } 251 GetArrayHeapLocation(HInstruction * instruction)252 size_t GetArrayHeapLocation(HInstruction* instruction) const { 253 DCHECK(instruction != nullptr); 254 HInstruction* array = instruction->InputAt(0); 255 HInstruction* index = instruction->InputAt(1); 256 DataType::Type type = instruction->GetType(); 257 size_t vector_length = HeapLocation::kScalar; 258 const bool is_vec_op = instruction->IsVecStore() || instruction->IsVecLoad(); 259 if (instruction->IsArraySet()) { 260 type = instruction->AsArraySet()->GetComponentType(); 261 } else if (is_vec_op) { 262 HVecOperation* vec_op = instruction->AsVecOperation(); 263 type = vec_op->GetPackedType(); 264 vector_length = vec_op->GetVectorLength(); 265 } else { 266 DCHECK(instruction->IsArrayGet()); 267 } 268 return FindHeapLocationIndex(FindReferenceInfoOf(HuntForOriginalReference(array)), 269 type, 270 HeapLocation::kInvalidFieldOffset, 271 index, 272 vector_length, 273 HeapLocation::kDeclaringClassDefIndexForArrays, 274 is_vec_op); 275 } 276 HasHeapStores()277 bool HasHeapStores() const { 278 return has_heap_stores_; 279 } 280 281 // Find and return the heap location index in heap_locations_. 282 // NOTE: When heap locations are created, potentially aliasing/overlapping 283 // accesses are given different indexes. This find function also 284 // doesn't take aliasing/overlapping into account. For example, 285 // this function returns three different indexes for: 286 // - ref_info=array, index=i, vector_length=kScalar; 287 // - ref_info=array, index=i, vector_length=2; 288 // - ref_info=array, index=i, vector_length=4; 289 // In later analysis, ComputeMayAlias() and MayAlias() compute and tell whether 290 // these indexes alias. FindHeapLocationIndex(ReferenceInfo * ref_info,DataType::Type type,size_t offset,HInstruction * index,size_t vector_length,int16_t declaring_class_def_index,bool is_vec_op)291 size_t FindHeapLocationIndex(ReferenceInfo* ref_info, 292 DataType::Type type, 293 size_t offset, 294 HInstruction* index, 295 size_t vector_length, 296 int16_t declaring_class_def_index, 297 bool is_vec_op) const { 298 DataType::Type lookup_type = DataType::ToSigned(type); 299 for (size_t i = 0; i < heap_locations_.size(); i++) { 300 HeapLocation* loc = heap_locations_[i]; 301 if (loc->GetReferenceInfo() == ref_info && 302 loc->GetType() == lookup_type && 303 loc->GetOffset() == offset && 304 loc->GetIndex() == index && 305 loc->GetVectorLength() == vector_length && 306 loc->GetDeclaringClassDefIndex() == declaring_class_def_index && 307 loc->IsVecOp() == is_vec_op) { 308 return i; 309 } 310 } 311 return kHeapLocationNotFound; 312 } 313 314 bool InstructionEligibleForLSERemoval(HInstruction* inst) const; 315 316 // Get some estimated statistics based on our analysis. 317 void DumpReferenceStats(OptimizingCompilerStats* stats); 318 319 // Returns true if heap_locations_[index1] and heap_locations_[index2] may alias. MayAlias(size_t index1,size_t index2)320 bool MayAlias(size_t index1, size_t index2) const { 321 if (index1 < index2) { 322 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index1, index2)); 323 } else if (index1 > index2) { 324 return aliasing_matrix_.IsBitSet(AliasingMatrixPosition(index2, index1)); 325 } else { 326 DCHECK(false) << "index1 and index2 are expected to be different"; 327 return true; 328 } 329 } 330 BuildAliasingMatrix()331 void BuildAliasingMatrix() { 332 const size_t number_of_locations = heap_locations_.size(); 333 if (number_of_locations == 0) { 334 return; 335 } 336 size_t pos = 0; 337 // Compute aliasing info between every pair of different heap locations. 338 // Save the result in a matrix represented as a BitVector. 339 for (size_t i = 0; i < number_of_locations - 1; i++) { 340 for (size_t j = i + 1; j < number_of_locations; j++) { 341 if (ComputeMayAlias(i, j)) { 342 aliasing_matrix_.SetBit(CheckedAliasingMatrixPosition(i, j, pos)); 343 } 344 pos++; 345 } 346 } 347 } 348 CanReferencesAlias(ReferenceInfo * ref_info1,ReferenceInfo * ref_info2)349 static bool CanReferencesAlias(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) { 350 if (ref_info1 == ref_info2) { 351 return true; 352 } else if (ref_info1->IsSingleton()) { 353 return false; 354 } else if (ref_info2->IsSingleton()) { 355 return false; 356 } else if (!MayAliasWithPreexistenceChecking(ref_info1, ref_info2) || 357 !MayAliasWithPreexistenceChecking(ref_info2, ref_info1)) { 358 return false; 359 } 360 return true; 361 } 362 363 private: 364 // An allocation cannot alias with a name which already exists at the point 365 // of the allocation, such as a parameter or a load happening before the allocation. MayAliasWithPreexistenceChecking(ReferenceInfo * ref_info1,ReferenceInfo * ref_info2)366 static bool MayAliasWithPreexistenceChecking(ReferenceInfo* ref_info1, ReferenceInfo* ref_info2) { 367 if (ref_info1->GetReference()->IsNewInstance() || ref_info1->GetReference()->IsNewArray()) { 368 // Any reference that can alias with the allocation must appear after it in the block/in 369 // the block's successors. In reverse post order, those instructions will be visited after 370 // the allocation. 371 return ref_info2->GetPosition() >= ref_info1->GetPosition(); 372 } 373 return true; 374 } 375 376 bool CanArrayElementsAlias(const HInstruction* idx1, 377 const size_t vector_length1, 378 const HInstruction* idx2, 379 const size_t vector_length2) const; 380 381 // `index1` and `index2` are indices in the array of collected heap locations. 382 // Returns the position in the bit vector that tracks whether the two heap 383 // locations may alias. AliasingMatrixPosition(size_t index1,size_t index2)384 size_t AliasingMatrixPosition(size_t index1, size_t index2) const { 385 DCHECK(index2 > index1); 386 const size_t number_of_locations = heap_locations_.size(); 387 // It's (num_of_locations - 1) + ... + (num_of_locations - index1) + (index2 - index1 - 1). 388 return (number_of_locations * index1 - (1 + index1) * index1 / 2 + (index2 - index1 - 1)); 389 } 390 391 // An additional position is passed in to make sure the calculated position is correct. CheckedAliasingMatrixPosition(size_t index1,size_t index2,size_t position)392 size_t CheckedAliasingMatrixPosition(size_t index1, size_t index2, size_t position) { 393 size_t calculated_position = AliasingMatrixPosition(index1, index2); 394 DCHECK_EQ(calculated_position, position); 395 return calculated_position; 396 } 397 398 // Compute if two locations may alias to each other. ComputeMayAlias(size_t index1,size_t index2)399 bool ComputeMayAlias(size_t index1, size_t index2) const { 400 DCHECK_NE(index1, index2); 401 HeapLocation* loc1 = heap_locations_[index1]; 402 HeapLocation* loc2 = heap_locations_[index2]; 403 if (loc1->GetOffset() != loc2->GetOffset()) { 404 // Either two different instance fields, or one is an instance 405 // field and the other is an array data. 406 return false; 407 } 408 if (loc1->GetDeclaringClassDefIndex() != loc2->GetDeclaringClassDefIndex()) { 409 // Different types. 410 return false; 411 } 412 if (!CanReferencesAlias(loc1->GetReferenceInfo(), loc2->GetReferenceInfo())) { 413 return false; 414 } 415 if (loc1->IsArray() && loc2->IsArray()) { 416 HInstruction* idx1 = loc1->GetIndex(); 417 HInstruction* idx2 = loc2->GetIndex(); 418 size_t vector_length1 = loc1->GetVectorLength(); 419 size_t vector_length2 = loc2->GetVectorLength(); 420 if (!CanArrayElementsAlias(idx1, vector_length1, idx2, vector_length2)) { 421 return false; 422 } 423 } 424 loc1->SetHasAliasedLocations(true); 425 loc2->SetHasAliasedLocations(true); 426 return true; 427 } 428 GetOrCreateReferenceInfo(HInstruction * instruction)429 ReferenceInfo* GetOrCreateReferenceInfo(HInstruction* instruction) { 430 ReferenceInfo* ref_info = FindReferenceInfoOf(instruction); 431 if (ref_info == nullptr) { 432 size_t pos = ref_info_array_.size(); 433 ref_info = new (allocator_) ReferenceInfo(instruction, pos); 434 ref_info_array_.push_back(ref_info); 435 } 436 return ref_info; 437 } 438 CreateReferenceInfoForReferenceType(HInstruction * instruction)439 void CreateReferenceInfoForReferenceType(HInstruction* instruction) { 440 if (instruction->GetType() != DataType::Type::kReference) { 441 return; 442 } 443 DCHECK(FindReferenceInfoOf(instruction) == nullptr); 444 GetOrCreateReferenceInfo(instruction); 445 } 446 MaybeCreateHeapLocation(HInstruction * ref,DataType::Type type,size_t offset,HInstruction * index,size_t vector_length,int16_t declaring_class_def_index,bool is_vec_op)447 void MaybeCreateHeapLocation(HInstruction* ref, 448 DataType::Type type, 449 size_t offset, 450 HInstruction* index, 451 size_t vector_length, 452 int16_t declaring_class_def_index, 453 bool is_vec_op) { 454 HInstruction* original_ref = HuntForOriginalReference(ref); 455 ReferenceInfo* ref_info = GetOrCreateReferenceInfo(original_ref); 456 size_t heap_location_idx = FindHeapLocationIndex( 457 ref_info, type, offset, index, vector_length, declaring_class_def_index, is_vec_op); 458 if (heap_location_idx == kHeapLocationNotFound) { 459 HeapLocation* heap_loc = new (allocator_) HeapLocation( 460 ref_info, type, offset, index, vector_length, declaring_class_def_index, is_vec_op); 461 heap_locations_.push_back(heap_loc); 462 } 463 } 464 VisitFieldAccess(HInstruction * ref,const FieldInfo & field_info)465 void VisitFieldAccess(HInstruction* ref, const FieldInfo& field_info) { 466 DataType::Type type = field_info.GetFieldType(); 467 const uint16_t declaring_class_def_index = field_info.GetDeclaringClassDefIndex(); 468 const size_t offset = field_info.GetFieldOffset().SizeValue(); 469 MaybeCreateHeapLocation(ref, 470 type, 471 offset, 472 nullptr, 473 HeapLocation::kScalar, 474 declaring_class_def_index, 475 /*is_vec_op=*/false); 476 } 477 VisitArrayAccess(HInstruction * array,HInstruction * index,DataType::Type type,size_t vector_length,bool is_vec_op)478 void VisitArrayAccess(HInstruction* array, 479 HInstruction* index, 480 DataType::Type type, 481 size_t vector_length, 482 bool is_vec_op) { 483 MaybeCreateHeapLocation(array, 484 type, 485 HeapLocation::kInvalidFieldOffset, 486 index, 487 vector_length, 488 HeapLocation::kDeclaringClassDefIndexForArrays, 489 is_vec_op); 490 } 491 VisitInstanceFieldGet(HInstanceFieldGet * instruction)492 void VisitInstanceFieldGet(HInstanceFieldGet* instruction) override { 493 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo()); 494 CreateReferenceInfoForReferenceType(instruction); 495 } 496 VisitInstanceFieldSet(HInstanceFieldSet * instruction)497 void VisitInstanceFieldSet(HInstanceFieldSet* instruction) override { 498 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo()); 499 has_heap_stores_ = true; 500 } 501 VisitStaticFieldGet(HStaticFieldGet * instruction)502 void VisitStaticFieldGet(HStaticFieldGet* instruction) override { 503 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo()); 504 CreateReferenceInfoForReferenceType(instruction); 505 } 506 VisitStaticFieldSet(HStaticFieldSet * instruction)507 void VisitStaticFieldSet(HStaticFieldSet* instruction) override { 508 VisitFieldAccess(instruction->InputAt(0), instruction->GetFieldInfo()); 509 has_heap_stores_ = true; 510 } 511 512 // We intentionally don't collect HUnresolvedInstanceField/HUnresolvedStaticField accesses 513 // since we cannot accurately track the fields. 514 VisitArrayGet(HArrayGet * instruction)515 void VisitArrayGet(HArrayGet* instruction) override { 516 HInstruction* array = instruction->InputAt(0); 517 HInstruction* index = instruction->InputAt(1); 518 DataType::Type type = instruction->GetType(); 519 VisitArrayAccess(array, index, type, HeapLocation::kScalar, /*is_vec_op=*/false); 520 CreateReferenceInfoForReferenceType(instruction); 521 } 522 VisitArraySet(HArraySet * instruction)523 void VisitArraySet(HArraySet* instruction) override { 524 HInstruction* array = instruction->InputAt(0); 525 HInstruction* index = instruction->InputAt(1); 526 DataType::Type type = instruction->GetComponentType(); 527 VisitArrayAccess(array, index, type, HeapLocation::kScalar, /*is_vec_op=*/false); 528 has_heap_stores_ = true; 529 } 530 VisitVecLoad(HVecLoad * instruction)531 void VisitVecLoad(HVecLoad* instruction) override { 532 DCHECK(!instruction->IsPredicated()); 533 HInstruction* array = instruction->InputAt(0); 534 HInstruction* index = instruction->InputAt(1); 535 DataType::Type type = instruction->GetPackedType(); 536 VisitArrayAccess(array, index, type, instruction->GetVectorLength(), /*is_vec_op=*/true); 537 CreateReferenceInfoForReferenceType(instruction); 538 } 539 VisitVecStore(HVecStore * instruction)540 void VisitVecStore(HVecStore* instruction) override { 541 DCHECK(!instruction->IsPredicated()); 542 HInstruction* array = instruction->InputAt(0); 543 HInstruction* index = instruction->InputAt(1); 544 DataType::Type type = instruction->GetPackedType(); 545 VisitArrayAccess(array, index, type, instruction->GetVectorLength(), /*is_vec_op=*/true); 546 has_heap_stores_ = true; 547 } 548 VisitInstruction(HInstruction * instruction)549 void VisitInstruction(HInstruction* instruction) override { 550 // Any new-instance or new-array cannot alias with references that 551 // pre-exist the new-instance/new-array. We append entries into 552 // ref_info_array_ which keeps track of the order of creation 553 // of reference values since we visit the blocks in reverse post order. 554 // 555 // By default, VisitXXX() (including VisitPhi()) calls VisitInstruction(), 556 // unless VisitXXX() is overridden. VisitInstanceFieldGet() etc. above 557 // also call CreateReferenceInfoForReferenceType() explicitly. 558 CreateReferenceInfoForReferenceType(instruction); 559 } 560 561 ScopedArenaAllocator* allocator_; 562 ScopedArenaVector<ReferenceInfo*> ref_info_array_; // All references used for heap accesses. 563 ScopedArenaVector<HeapLocation*> heap_locations_; // All heap locations. 564 ArenaBitVector aliasing_matrix_; // aliasing info between each pair of locations. 565 bool has_heap_stores_; // If there is no heap stores, LSE acts as GVN with better 566 // alias analysis and won't be as effective. 567 568 DISALLOW_COPY_AND_ASSIGN(HeapLocationCollector); 569 }; 570 571 class LoadStoreAnalysis { 572 public: LoadStoreAnalysis(HGraph * graph,OptimizingCompilerStats * stats,ScopedArenaAllocator * local_allocator)573 explicit LoadStoreAnalysis(HGraph* graph, 574 OptimizingCompilerStats* stats, 575 ScopedArenaAllocator* local_allocator) 576 : graph_(graph), stats_(stats), heap_location_collector_(graph, local_allocator) {} 577 GetHeapLocationCollector()578 const HeapLocationCollector& GetHeapLocationCollector() const { 579 return heap_location_collector_; 580 } 581 582 bool Run(); 583 584 private: 585 HGraph* graph_; 586 OptimizingCompilerStats* stats_; 587 HeapLocationCollector heap_location_collector_; 588 589 DISALLOW_COPY_AND_ASSIGN(LoadStoreAnalysis); 590 }; 591 592 } // namespace art 593 594 #endif // ART_COMPILER_OPTIMIZING_LOAD_STORE_ANALYSIS_H_ 595