Georgia Kouveli | 1cb7144 | 2017-01-30 13:35:28 +0000 | [diff] [blame] | 1 | // Copyright 2017, VIXL authors |
| 2 | // All rights reserved. |
| 3 | // |
| 4 | // Redistribution and use in source and binary forms, with or without |
| 5 | // modification, are permitted provided that the following conditions are met: |
| 6 | // |
| 7 | // * Redistributions of source code must retain the above copyright notice, |
| 8 | // this list of conditions and the following disclaimer. |
| 9 | // * Redistributions in binary form must reproduce the above copyright notice, |
| 10 | // this list of conditions and the following disclaimer in the documentation |
| 11 | // and/or other materials provided with the distribution. |
| 12 | // * Neither the name of ARM Limited nor the names of its contributors may be |
| 13 | // used to endorse or promote products derived from this software without |
| 14 | // specific prior written permission. |
| 15 | // |
| 16 | // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND |
| 17 | // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED |
| 18 | // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE |
| 19 | // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE |
| 20 | // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL |
| 21 | // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR |
| 22 | // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER |
| 23 | // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, |
| 24 | // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 25 | // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 26 | |
| 27 | #include "test-pool-manager.h" |
| 28 | |
| 29 | #include <stdio.h> |
| 30 | |
Georgia Kouveli | 1cb7144 | 2017-01-30 13:35:28 +0000 | [diff] [blame] | 31 | #include "pool-manager-impl.h" |
Pierre Langlois | 1bce007 | 2017-06-06 17:58:58 +0100 | [diff] [blame] | 32 | #include "pool-manager.h" |
Georgia Kouveli | 1cb7144 | 2017-01-30 13:35:28 +0000 | [diff] [blame] | 33 | #include "test-runner.h" |
| 34 | |
| 35 | #define TEST(Name) TEST_(POOL_MANAGER_##Name) |
| 36 | |
| 37 | #define IF_VERBOSE(exp) \ |
| 38 | if (Test::verbose()) exp |
| 39 | |
| 40 | #define BUFFER_ALIGNMENT 16 |
| 41 | |
| 42 | using namespace vixl; |
| 43 | |
| 44 | static int Random() { return static_cast<int>(std::abs(mrand48())); } |
| 45 | |
| 46 | static int RandomObjectID(size_t num_objects) { return Random() % num_objects; } |
| 47 | |
| 48 | static int RandomObjectSize() { return 1 + Random() % 256; } |
| 49 | |
| 50 | static int RandomObjectAlignment(int size) { |
| 51 | const int limit = static_cast<int>(floor(log2(BUFFER_ALIGNMENT))); |
| 52 | int log2Size = static_cast<int>(floor(log2(size))); |
| 53 | // Restrict alignment due to buffer alignment. |
| 54 | log2Size = std::min(log2Size, limit); |
| 55 | return (1 << (Random() % (1 + log2Size))); |
| 56 | } |
| 57 | |
| 58 | // The size of the instruction. |
| 59 | static int RandomReferenceSize() { return (Random() % 2) ? 2 : 4; } |
| 60 | |
| 61 | // The alignment of an instruction is either 2 or 4. |
| 62 | static int RandomInstructionAlignment() { return (Random() % 2) ? 2 : 4; } |
| 63 | |
| 64 | static int32_t RandomMinOffset() { |
| 65 | const int N = 3; |
| 66 | static const int offsets[N] = {0, 2, 4}; |
| 67 | return offsets[Random() % N]; |
| 68 | } |
| 69 | |
| 70 | static int32_t RandomMaxOffset() { |
| 71 | const int N = 5; |
| 72 | static const int offsets[N] = {255, 1020, 1024, 4096, 16384}; |
| 73 | return offsets[Random() % N]; |
| 74 | } |
| 75 | |
| 76 | static int32_t RandomBranchMaxOffset() { |
| 77 | const int N = 10; |
| 78 | // The maximum offsets used for testing are taken from A32 and T32. |
| 79 | static const int offsets[N] = |
| 80 | {126, 254, 255, 1020, 1024, 2046, 4095, 1048574, 16777214, 33554428}; |
| 81 | return offsets[Random() % N]; |
| 82 | } |
| 83 | |
| 84 | static int RandomPCIncrement() { |
| 85 | // A multiple of two. |
| 86 | return 2 * (Random() % 4 + 1); |
| 87 | } |
| 88 | |
Georgia Kouveli | 8b57c86 | 2017-03-02 15:18:58 +0000 | [diff] [blame] | 89 | class TestObject : public LocationBase<int32_t> { |
Georgia Kouveli | 1cb7144 | 2017-01-30 13:35:28 +0000 | [diff] [blame] | 90 | public: |
| 91 | TestObject(int size, int alignment, int id = 0) |
Georgia Kouveli | 8b57c86 | 2017-03-02 15:18:58 +0000 | [diff] [blame] | 92 | : LocationBase(0 /*type*/, size, alignment), id_(id) {} |
Georgia Kouveli | 1cb7144 | 2017-01-30 13:35:28 +0000 | [diff] [blame] | 93 | |
Georgia Kouveli | 1cb7144 | 2017-01-30 13:35:28 +0000 | [diff] [blame] | 94 | void EmitPoolObject(MacroAssemblerInterface *masm) VIXL_OVERRIDE { |
| 95 | USE(masm); |
| 96 | } |
| 97 | |
| 98 | bool ShouldDeletePoolObjectOnPlacement() const VIXL_OVERRIDE { return true; } |
| 99 | |
| 100 | // Update the references to this object. |
| 101 | void ResolveReferences(internal::AssemblerBase *assembler) VIXL_OVERRIDE { |
| 102 | int32_t location = GetLocation(); |
| 103 | USE(assembler); |
| 104 | for (std::vector<ForwardReference<int32_t> *>::iterator iter = |
| 105 | references_.begin(); |
| 106 | iter != references_.end();) { |
| 107 | ForwardReference<int32_t> *ref = *iter; |
| 108 | VIXL_ASSERT(ref->LocationIsEncodable(location)); |
| 109 | delete ref; |
| 110 | iter = references_.erase(iter); |
| 111 | } |
| 112 | IF_VERBOSE(printf("Placed object %d at location: 0x%x (%u)\n", |
| 113 | id_, |
| 114 | location, |
| 115 | location)); |
| 116 | } |
| 117 | |
| 118 | void AddReference(ForwardReference<int32_t> *ref) { |
| 119 | references_.push_back(ref); |
| 120 | } |
| 121 | |
| 122 | int GetID() { return id_; } |
| 123 | |
| 124 | static TestObject *CreateRandom(int id) { |
| 125 | int size = RandomObjectSize(); |
| 126 | int alignment = RandomObjectAlignment(size); |
| 127 | IF_VERBOSE(printf("Object %d -> size = %d, alignment = %d\n", |
| 128 | id, |
| 129 | size, |
| 130 | alignment)); |
| 131 | return new TestObject(size, alignment, id); |
| 132 | } |
| 133 | |
| 134 | private: |
| 135 | // Store pointers to ForwardReference objects - TestObject is responsible |
| 136 | // for deleting them. |
| 137 | std::vector<ForwardReference<int32_t> *> references_; |
| 138 | // Object id used for debugging. |
| 139 | int id_; |
| 140 | }; |
| 141 | |
Georgia Kouveli | 8b57c86 | 2017-03-02 15:18:58 +0000 | [diff] [blame] | 142 | class TestBranchObject : public LocationBase<int32_t> { |
Georgia Kouveli | 1cb7144 | 2017-01-30 13:35:28 +0000 | [diff] [blame] | 143 | public: |
| 144 | TestBranchObject(int size, int alignment, int id = 0) |
Georgia Kouveli | 8b57c86 | 2017-03-02 15:18:58 +0000 | [diff] [blame] | 145 | : LocationBase(1 /* type */, size, alignment), id_(id) {} |
Georgia Kouveli | 1cb7144 | 2017-01-30 13:35:28 +0000 | [diff] [blame] | 146 | |
Georgia Kouveli | 1cb7144 | 2017-01-30 13:35:28 +0000 | [diff] [blame] | 147 | bool UsePoolObjectEmissionMargin() const VIXL_OVERRIDE { return true; } |
| 148 | int32_t GetPoolObjectEmissionMargin() const VIXL_OVERRIDE { |
| 149 | return 1 * KBytes; |
| 150 | } |
| 151 | |
| 152 | // Do nothing for now. |
| 153 | void EmitPoolObject(MacroAssemblerInterface *masm) VIXL_OVERRIDE { |
| 154 | USE(masm); |
| 155 | } |
| 156 | |
| 157 | bool ShouldDeletePoolObjectOnPlacement() const VIXL_OVERRIDE { return false; } |
| 158 | |
| 159 | virtual void UpdatePoolObject(PoolObject<int32_t> *object) VIXL_OVERRIDE { |
| 160 | // Reference from the last emitted veneer: |
| 161 | int32_t min = location_ + min_offset_; |
| 162 | int32_t max = location_ + max_offset_; |
| 163 | // The alignment that the new "veneer" requires of the label. |
| 164 | int reference_alignment = RandomInstructionAlignment(); |
| 165 | reference_alignment = |
| 166 | std::max(reference_alignment, GetPoolObjectAlignment()); |
| 167 | ForwardReference<int32_t> *ref = |
| 168 | new ForwardReference<int32_t>(location_, |
| 169 | 4 /*size*/, |
| 170 | min, |
| 171 | max, |
| 172 | reference_alignment); |
| 173 | AddReference(ref); |
| 174 | object->Update(min, max, reference_alignment); |
| 175 | } |
| 176 | |
| 177 | // Update the references to this object. |
| 178 | void ResolveReferences(internal::AssemblerBase *assembler) VIXL_OVERRIDE { |
| 179 | int32_t location = GetLocation(); |
| 180 | USE(assembler); |
| 181 | for (std::vector<ForwardReference<int32_t> *>::iterator iter = |
| 182 | references_.begin(); |
| 183 | iter != references_.end();) { |
| 184 | ForwardReference<int32_t> *ref = *iter; |
| 185 | VIXL_ASSERT(ref->LocationIsEncodable(location)); |
| 186 | delete ref; |
| 187 | iter = references_.erase(iter); |
| 188 | } |
| 189 | IF_VERBOSE(printf("Veneer %d placed at location: 0x%x (%u)\n", |
| 190 | id_, |
| 191 | location, |
| 192 | location)); |
| 193 | } |
| 194 | |
| 195 | void AddReference(ForwardReference<int32_t> *ref) { |
| 196 | references_.push_back(ref); |
| 197 | } |
| 198 | |
| 199 | virtual int GetMaxAlignment() const VIXL_OVERRIDE { |
| 200 | int max_alignment = GetPoolObjectAlignment(); |
| 201 | for (std::vector<ForwardReference<int32_t> *>::const_iterator iter = |
| 202 | references_.begin(); |
| 203 | iter != references_.end(); |
| 204 | ++iter) { |
| 205 | const ForwardReference<int32_t> *ref = *iter; |
| 206 | if (ref->GetAlignment() > max_alignment) |
| 207 | max_alignment = ref->GetAlignment(); |
| 208 | } |
| 209 | return max_alignment; |
| 210 | } |
| 211 | virtual int32_t GetMinLocation() const VIXL_OVERRIDE { |
| 212 | int32_t min_location = 0; |
| 213 | for (std::vector<ForwardReference<int32_t> *>::const_iterator iter = |
| 214 | references_.begin(); |
| 215 | iter != references_.end(); |
| 216 | ++iter) { |
| 217 | const ForwardReference<int32_t> *ref = *iter; |
| 218 | if (ref->GetMinLocation() > min_location) |
| 219 | min_location = ref->GetMinLocation(); |
| 220 | } |
| 221 | return min_location; |
| 222 | } |
| 223 | |
| 224 | int GetID() { return id_; } |
| 225 | |
| 226 | static TestBranchObject *CreateRandom(int id) { |
| 227 | int size = RandomReferenceSize(); |
| 228 | int alignment = size; |
| 229 | IF_VERBOSE(printf("Object %d -> size = %d, alignment = %d\n", |
| 230 | id, |
| 231 | size, |
| 232 | alignment)); |
| 233 | return new TestBranchObject(size, alignment, id); |
| 234 | } |
| 235 | |
| 236 | private: |
| 237 | // Store pointers to ForwardReference objects - TestBranchObject is |
| 238 | // responsible for deleting them. |
| 239 | std::vector<ForwardReference<int32_t> *> references_; |
| 240 | // Object id used for debugging. |
| 241 | int id_; |
| 242 | |
| 243 | // These are the min and max offsets of the type of branch used for the |
| 244 | // veneer. |
| 245 | static const int32_t min_offset_ = 0; |
| 246 | static const int32_t max_offset_ = 16 * 1024 * 1024; |
| 247 | }; |
| 248 | |
| 249 | // MacroAssembler implementation that does nothing but print in verbose mode. |
| 250 | class TestMacroAssembler : public MacroAssemblerInterface { |
| 251 | public: |
| 252 | TestMacroAssembler() : assembler_(128) {} |
| 253 | |
| 254 | void EmitPoolHeader() VIXL_OVERRIDE { |
| 255 | IF_VERBOSE(printf("[MASM] Emitting pool header.\n")); |
| 256 | } |
| 257 | void EmitPoolFooter() VIXL_OVERRIDE { |
| 258 | IF_VERBOSE(printf("[MASM] Emitting pool footer.\n")); |
| 259 | } |
| 260 | void EmitPaddingBytes(int n) VIXL_OVERRIDE { |
| 261 | IF_VERBOSE(printf("[MASM] Added %d bytes of padding.\n", n)); |
| 262 | } |
| 263 | void EmitNopBytes(int n) VIXL_OVERRIDE { |
| 264 | IF_VERBOSE(printf("[MASM] Added %d bytes of NOPs.\n", n)); |
| 265 | } |
| 266 | bool ArePoolsBlocked() const VIXL_OVERRIDE { return false; } |
| 267 | bool AllowMacroInstructions() const VIXL_OVERRIDE { return false; } |
| 268 | void SetAllowMacroInstructions(bool allow) VIXL_OVERRIDE { USE(allow); } |
| 269 | |
| 270 | void BlockPools() VIXL_OVERRIDE {} |
| 271 | void ReleasePools() VIXL_OVERRIDE {} |
| 272 | void EnsureEmitPoolsFor(size_t) VIXL_OVERRIDE {} |
| 273 | internal::AssemblerBase *AsAssemblerBase() VIXL_OVERRIDE { |
| 274 | return &assembler_; |
| 275 | } |
| 276 | |
| 277 | private: |
| 278 | internal::AssemblerBase assembler_; |
| 279 | }; |
| 280 | |
| 281 | // Used for debugging. |
| 282 | namespace vixl { |
| 283 | template <> |
| 284 | void PoolManager<int32_t>::DumpCurrentState(int32_t pc) const { |
| 285 | IF_VERBOSE( |
| 286 | printf("Number of objects: %d\n", static_cast<int>(objects_.size()))); |
| 287 | IF_VERBOSE(printf("Current pc = 0x%x (%d)\n", pc, pc)); |
| 288 | |
| 289 | for (int i = 0; i < static_cast<int>(objects_.size()); ++i) { |
| 290 | const PoolObject<int32_t> &object = objects_[i]; |
| 291 | IF_VERBOSE( |
| 292 | printf("Object %d -> size = %d, alignment = %d, range = (%d,%d)\n", |
| 293 | i, |
| 294 | object.label_base_->GetPoolObjectSizeInBytes(), |
| 295 | object.alignment_, |
| 296 | object.min_location_, |
| 297 | object.max_location_)); |
| 298 | } |
| 299 | } |
| 300 | } |
| 301 | |
| 302 | // Basic test - checks that emitting a very simple pool works. |
| 303 | TEST(Basic) { |
| 304 | TestMacroAssembler masm; |
| 305 | |
| 306 | PoolManager<int32_t> pool_manager(4 /*header_size*/, |
| 307 | 2 /*header_alignment*/, |
| 308 | BUFFER_ALIGNMENT); |
| 309 | TestObject object1(4 /*size*/, 4 /*alignment*/); |
| 310 | TestObject object2(128 /*size*/, 4 /*alignment*/); |
| 311 | ForwardReference<int32_t> *ref1_obj1 = |
| 312 | new ForwardReference<int32_t>(0 /*location*/, 2 /*size*/, 0, 200); |
| 313 | ForwardReference<int32_t> *ref2_obj1 = |
| 314 | new ForwardReference<int32_t>(2 /*location*/, 2 /*size*/, 2, 202); |
| 315 | ForwardReference<int32_t> *ref3_obj1 = |
| 316 | new ForwardReference<int32_t>(4 /*location*/, 2 /*size*/, 4, 204); |
| 317 | object1.AddReference(ref1_obj1); |
| 318 | object1.AddReference(ref2_obj1); |
| 319 | object1.AddReference(ref3_obj1); |
| 320 | ForwardReference<int32_t> *ref1_obj2 = |
| 321 | new ForwardReference<int32_t>(8 /*location*/, 2 /*size*/, 8, 500); |
| 322 | ForwardReference<int32_t> *ref2_obj2 = |
| 323 | new ForwardReference<int32_t>(12 /*location*/, 4 /*size*/, 12, 300); |
| 324 | ForwardReference<int32_t> *ref3_obj2 = |
| 325 | new ForwardReference<int32_t>(16 /*location*/, 4 /*size*/, 16, 400); |
| 326 | object2.AddReference(ref1_obj2); |
| 327 | object2.AddReference(ref2_obj2); |
| 328 | object2.AddReference(ref3_obj2); |
| 329 | |
| 330 | pool_manager.AddObjectReference(ref1_obj1, &object1); |
| 331 | pool_manager.AddObjectReference(ref2_obj1, &object1); |
| 332 | pool_manager.AddObjectReference(ref3_obj1, &object1); |
| 333 | pool_manager.AddObjectReference(ref1_obj2, &object2); |
| 334 | pool_manager.AddObjectReference(ref2_obj2, &object2); |
| 335 | pool_manager.AddObjectReference(ref3_obj2, &object2); |
| 336 | |
| 337 | pool_manager.Emit(&masm, 20); |
| 338 | } |
| 339 | |
| 340 | static ForwardReference<int32_t> *CreateReference(int id, |
| 341 | int32_t pc, |
| 342 | int size, |
| 343 | int32_t min_offset, |
| 344 | int32_t max_offset, |
| 345 | int alignment) { |
| 346 | IF_VERBOSE(printf( |
| 347 | "About to add a new reference to object %d with min location = %d, max " |
| 348 | "location = %d, alignment = %d, size = %d\n", |
| 349 | id, |
| 350 | min_offset + pc, |
| 351 | max_offset + pc, |
| 352 | alignment, |
| 353 | size)); |
| 354 | return new ForwardReference<int32_t>(pc, |
| 355 | size, |
| 356 | min_offset + pc, |
| 357 | max_offset + pc, |
| 358 | alignment); |
| 359 | } |
| 360 | |
| 361 | // Fuzz test that uses literal-like objects, that get deleted when they are |
| 362 | // placed. |
| 363 | TEST(FuzzObjectDeletedWhenPlaced) { |
| 364 | TestMacroAssembler masm; |
| 365 | PoolManager<int32_t> pool_manager(4 /*header_size*/, |
| 366 | 2 /*header_alignment*/, |
| 367 | BUFFER_ALIGNMENT); |
| 368 | |
| 369 | const int kObjectNum = 100; |
| 370 | std::vector<TestObject *> objects; |
| 371 | |
| 372 | // Create objects. |
| 373 | for (int i = 0; i < kObjectNum; ++i) { |
| 374 | objects.push_back(TestObject::CreateRandom(i)); |
| 375 | } |
| 376 | |
| 377 | int32_t pc = 0; |
| 378 | for (int i = 0; !objects.empty(); ++i) { |
| 379 | IF_VERBOSE(printf("PC = 0x%x (%d)\n", pc, pc)); |
| 380 | int32_t pc_increment = RandomPCIncrement(); |
| 381 | IF_VERBOSE(printf("Attempting to increment PC by %d\n", pc_increment)); |
| 382 | if (pool_manager.MustEmit(pc, pc_increment)) { |
| 383 | pc = pool_manager.Emit(&masm, pc, pc_increment); |
| 384 | } |
| 385 | pc += pc_increment; |
| 386 | // Pick an object, randomly. |
| 387 | TestObject *object = objects[RandomObjectID(objects.size())]; |
| 388 | int32_t min_offset = RandomMinOffset(); |
| 389 | int32_t max_offset = RandomMaxOffset(); |
| 390 | int32_t size = RandomReferenceSize(); |
| 391 | int32_t alignment = |
| 392 | RandomObjectAlignment(object->GetPoolObjectSizeInBytes()); |
| 393 | ForwardReference<int32_t> *ref = CreateReference(object->GetID(), |
| 394 | pc, |
| 395 | size, |
| 396 | min_offset, |
| 397 | max_offset, |
| 398 | alignment); |
| 399 | if (pool_manager.MustEmit(pc, size, ref, object)) { |
| 400 | pc = pool_manager.Emit(&masm, pc, size, ref, object); |
| 401 | delete ref; |
| 402 | // We must recreate the reference, the PC has changed, but only if |
| 403 | // it still is a forward reference. |
| 404 | if (!object->IsBound()) { |
| 405 | ref = CreateReference(object->GetID(), |
| 406 | pc, |
| 407 | size, |
| 408 | min_offset, |
| 409 | max_offset, |
| 410 | alignment); |
| 411 | } |
| 412 | } |
| 413 | IF_VERBOSE(printf("Incrementing PC by size of reference (%d).\n", size)); |
| 414 | pc += size; |
| 415 | // We only need to track the reference if it's a forward reference. |
| 416 | if (!object->IsBound()) { |
| 417 | object->AddReference(ref); |
| 418 | pool_manager.AddObjectReference(ref, object); |
| 419 | } |
| 420 | VIXL_ASSERT(!pool_manager.MustEmit(pc - 1)); |
| 421 | // Remove bound objects. |
| 422 | for (std::vector<TestObject *>::iterator iter = objects.begin(); |
| 423 | iter != objects.end();) { |
| 424 | TestObject *object = *iter; |
| 425 | if (object->IsBound()) { |
| 426 | delete object; |
| 427 | iter = objects.erase(iter); |
| 428 | } else { |
| 429 | ++iter; |
| 430 | } |
| 431 | } |
| 432 | } |
| 433 | |
| 434 | pool_manager.Emit(&masm, pc); |
| 435 | } |
| 436 | |
| 437 | // Fuzz test that uses veneer-like objects, that get updated when they are |
| 438 | // placed and get deleted when they are bound by the user. |
| 439 | TEST(FuzzObjectUpdatedWhenPlaced) { |
| 440 | TestMacroAssembler masm; |
| 441 | PoolManager<int32_t> pool_manager(4 /*header_size*/, |
| 442 | 2 /*header_alignment*/, |
| 443 | BUFFER_ALIGNMENT); |
| 444 | const int kObjectNum = 1000; |
| 445 | std::vector<TestBranchObject *> objects; |
| 446 | |
| 447 | // Create objects. |
| 448 | for (int i = 0; i < kObjectNum; ++i) { |
| 449 | objects.push_back(TestBranchObject::CreateRandom(i)); |
| 450 | } |
| 451 | |
| 452 | int32_t pc = 0; |
| 453 | for (int i = 0; !objects.empty(); ++i) { |
| 454 | IF_VERBOSE(printf("PC = 0x%x (%d)\n", pc, pc)); |
| 455 | |
| 456 | int32_t pc_increment = RandomPCIncrement(); |
| 457 | IF_VERBOSE(printf("Attempting to increment PC by %d\n", pc_increment)); |
| 458 | |
| 459 | if (pool_manager.MustEmit(pc, pc_increment)) { |
| 460 | pc = pool_manager.Emit(&masm, pc, pc_increment); |
| 461 | } |
| 462 | pc += pc_increment; |
| 463 | |
| 464 | // Pick a random object. |
| 465 | TestBranchObject *object = objects[RandomObjectID(objects.size())]; |
| 466 | int32_t min_offset = RandomMinOffset(); |
| 467 | int32_t max_offset = RandomBranchMaxOffset(); |
| 468 | int32_t size = RandomReferenceSize(); |
| 469 | int32_t alignment = |
| 470 | RandomObjectAlignment(object->GetPoolObjectSizeInBytes()); |
| 471 | ForwardReference<int32_t> *ref = CreateReference(object->GetID(), |
| 472 | pc, |
| 473 | size, |
| 474 | min_offset, |
| 475 | max_offset, |
| 476 | alignment); |
| 477 | if (pool_manager.MustEmit(pc, size, ref, object)) { |
| 478 | pc = pool_manager.Emit(&masm, pc, size); |
| 479 | delete ref; |
| 480 | // We must recreate the reference, the PC has changed. |
| 481 | ref = CreateReference(object->GetID(), |
| 482 | pc, |
| 483 | size, |
| 484 | min_offset, |
| 485 | max_offset, |
| 486 | alignment); |
| 487 | } |
| 488 | IF_VERBOSE(printf("Incrementing PC by size of reference (%d).\n", size)); |
| 489 | pc += size; |
| 490 | object->AddReference(ref); |
| 491 | pool_manager.AddObjectReference(ref, object); |
| 492 | VIXL_ASSERT(!pool_manager.MustEmit(pc - 1)); |
| 493 | |
| 494 | // Pick another random label to bind. |
| 495 | const int kProbabilityToBind = 20; |
| 496 | if ((Random() % 100) < kProbabilityToBind) { |
| 497 | TestBranchObject *object = objects[RandomObjectID(objects.size())]; |
| 498 | // Binding can cause the pool emission, so check if we need to emit |
| 499 | // the pools. The actual backends will know the max alignment we |
| 500 | // might need here, so can simplify the check (won't need to check |
| 501 | // the object references). |
| 502 | int max_padding = object->GetMaxAlignment() - 1; |
| 503 | if (pool_manager.MustEmit(pc, max_padding)) { |
| 504 | pc = pool_manager.Emit(&masm, pc, max_padding); |
| 505 | } |
| 506 | pc = pool_manager.Bind(&masm, object, pc); |
| 507 | } |
| 508 | |
| 509 | // Remove bound objects. |
| 510 | for (std::vector<TestBranchObject *>::iterator iter = objects.begin(); |
| 511 | iter != objects.end();) { |
| 512 | TestBranchObject *object = *iter; |
| 513 | if (object->IsBound()) { |
| 514 | delete object; |
| 515 | iter = objects.erase(iter); |
| 516 | } else { |
| 517 | ++iter; |
| 518 | } |
| 519 | } |
| 520 | } |
| 521 | |
| 522 | pool_manager.Emit(&masm, pc); |
| 523 | } |
| 524 | |
| 525 | // Test that binding an unused label works. |
| 526 | TEST(BindUnusedLabel) { |
| 527 | TestMacroAssembler masm; |
| 528 | |
| 529 | PoolManager<int32_t> pool_manager(4 /*header_size*/, |
| 530 | 2 /*header_alignment*/, |
| 531 | BUFFER_ALIGNMENT); |
| 532 | TestBranchObject *object = new TestBranchObject(4 /*size*/, 4 /*alignment*/); |
| 533 | int32_t pc = 0; |
| 534 | pool_manager.Bind(&masm, object, pc); |
| 535 | delete object; |
| 536 | } |
| 537 | |
| 538 | // Test that binding a label adds necessary padding. |
| 539 | TEST(BindLabelNeedsPadding) { |
| 540 | TestMacroAssembler masm; |
| 541 | |
| 542 | PoolManager<int32_t> pool_manager(4 /*header_size*/, |
| 543 | 2 /*header_alignment*/, |
| 544 | BUFFER_ALIGNMENT); |
| 545 | |
| 546 | // Label that needs padding because of the minimum location of the reference. |
| 547 | TestBranchObject *object = new TestBranchObject(4 /*size*/, 2 /*alignment*/); |
| 548 | ForwardReference<int32_t> *ref = |
| 549 | new ForwardReference<int32_t>(0 /*location*/, |
| 550 | 2 /*size*/, |
| 551 | 4 /*min_location*/, |
| 552 | 500 /*max_location*/); |
| 553 | object->AddReference(ref); |
| 554 | pool_manager.AddObjectReference(ref, object); |
| 555 | int32_t pc = 2; |
| 556 | pc = pool_manager.Bind(&masm, object, pc); |
| 557 | VIXL_ASSERT(pc == 4); |
| 558 | delete object; |
| 559 | |
| 560 | // Label that needs padding because of the alignment of the object. |
| 561 | object = new TestBranchObject(4 /*size*/, 4 /*alignment*/); |
| 562 | ref = new ForwardReference<int32_t>(0 /*location*/, |
| 563 | 2 /*size*/, |
| 564 | 0 /*min_location*/, |
| 565 | 500 /*max_location*/); |
| 566 | object->AddReference(ref); |
| 567 | pool_manager.AddObjectReference(ref, object); |
| 568 | |
| 569 | pc = 2; |
| 570 | pc = pool_manager.Bind(&masm, object, pc); |
| 571 | VIXL_ASSERT(pc == 4); |
| 572 | delete object; |
| 573 | |
| 574 | // Label that needs padding because of the alignment of the reference. |
| 575 | object = new TestBranchObject(4 /*size*/, 1 /*alignment*/); |
| 576 | ref = new ForwardReference<int32_t>(0 /*location*/, |
| 577 | 2 /*size*/, |
| 578 | 0 /*min_location*/, |
| 579 | 500 /*max_location*/, |
| 580 | 4 /*alignment*/); |
| 581 | object->AddReference(ref); |
| 582 | pool_manager.AddObjectReference(ref, object); |
| 583 | |
| 584 | pc = 2; |
| 585 | pc = pool_manager.Bind(&masm, object, pc); |
| 586 | VIXL_ASSERT(pc == 4); |
| 587 | delete object; |
| 588 | } |
| 589 | |
| 590 | // This test checks that when we omit the pool header, we insert any padding |
| 591 | // needed in order to meet the minimum location of the first object. |
| 592 | TEST(PoolWithoutHeaderMinLocation) { |
| 593 | TestMacroAssembler masm; |
| 594 | |
| 595 | PoolManager<int32_t> pool_manager(4 /*header_size*/, |
| 596 | 2 /*header_alignment*/, |
| 597 | BUFFER_ALIGNMENT); |
| 598 | int object_size = 4; |
| 599 | int object_alignment = 1; // Do not restrict alignment for this test. |
| 600 | int min_location = 4; // We emit the pool at location 2, so need padding. |
| 601 | int max_location = 500; |
| 602 | TestObject object(object_size, object_alignment); |
| 603 | ForwardReference<int32_t> *ref = new ForwardReference<int32_t>(0 /*location*/, |
| 604 | 2 /*size*/, |
| 605 | min_location, |
| 606 | max_location); |
| 607 | object.AddReference(ref); |
| 608 | pool_manager.AddObjectReference(ref, &object); |
| 609 | |
| 610 | int32_t new_pc = pool_manager.Emit(&masm, |
| 611 | 2, |
| 612 | 0, /* no new code added */ |
| 613 | NULL, |
| 614 | NULL, |
| 615 | PoolManager<int32_t>::kNoBranchRequired); |
| 616 | USE(new_pc); |
| 617 | VIXL_ASSERT(new_pc == min_location + object_size); |
| 618 | } |
| 619 | |
| 620 | // This test checks that when we omit the pool header, we insert any padding |
| 621 | // needed in order to meet the alignment of the first object. |
| 622 | TEST(PoolWithoutHeaderAlignment) { |
| 623 | TestMacroAssembler masm; |
| 624 | |
| 625 | PoolManager<int32_t> pool_manager(4 /*header_size*/, |
| 626 | 2 /*header_alignment*/, |
| 627 | BUFFER_ALIGNMENT); |
| 628 | int object_size = 4; |
| 629 | int object_alignment = 4; // We emit the pool at location 2, so need padding. |
| 630 | int min_location = 0; // Do not restrict this for this test. |
| 631 | int max_location = 500; |
| 632 | TestObject object(object_size, object_alignment); |
| 633 | ForwardReference<int32_t> *ref = new ForwardReference<int32_t>(0 /*location*/, |
| 634 | 2 /*size*/, |
| 635 | min_location, |
| 636 | max_location); |
| 637 | object.AddReference(ref); |
| 638 | pool_manager.AddObjectReference(ref, &object); |
| 639 | |
| 640 | int32_t pc = 2; |
| 641 | int32_t new_pc = pool_manager.Emit(&masm, |
| 642 | pc, |
| 643 | 0, /* no new code added */ |
| 644 | NULL, |
| 645 | NULL, |
| 646 | PoolManager<int32_t>::kNoBranchRequired); |
| 647 | USE(pc); |
| 648 | USE(new_pc); |
| 649 | VIXL_ASSERT(new_pc == AlignUp(pc, object_alignment) + object_size); |
| 650 | } |
| 651 | |
| 652 | static int32_t AddNBranches(PoolManager<int32_t> *pool_manager, |
| 653 | int32_t pc, |
| 654 | TestBranchObject *labels[], |
| 655 | int num_branches, |
| 656 | int branch_size, |
| 657 | int veneer_size, |
| 658 | int veneer_alignment, |
| 659 | int branch_range) { |
| 660 | for (int i = 0; i < num_branches; ++i) { |
| 661 | labels[i] = new TestBranchObject(veneer_size, veneer_alignment); |
| 662 | int32_t min_location = pc; |
| 663 | int32_t max_location = pc + branch_range; |
| 664 | ForwardReference<int32_t> *ref = |
| 665 | new ForwardReference<int32_t>(pc, |
| 666 | branch_size, |
| 667 | min_location, |
| 668 | max_location); |
| 669 | labels[i]->AddReference(ref); |
| 670 | // We have picked the object sizes so that we do not need to emit now. |
| 671 | VIXL_ASSERT(!pool_manager->MustEmit(pc, branch_size, ref, labels[i])); |
| 672 | pool_manager->AddObjectReference(ref, labels[i]); |
| 673 | pc += branch_size; |
| 674 | } |
| 675 | return pc; |
| 676 | } |
| 677 | |
| 678 | TEST(MustEmitNewReferenceDueToRange) { |
| 679 | const int kHeaderSize = 4; |
| 680 | const int kHeaderAlignment = 2; |
| 681 | const int kNumBranches = 550; |
| 682 | const int kBranchSize = 4; |
| 683 | const int kVeneerSize = 4; |
| 684 | const int kVeneerAlignment = 2; |
| 685 | const int kBranchRange = 1 * MBytes; |
| 686 | int32_t pc = 0; |
| 687 | |
| 688 | TestMacroAssembler masm; |
| 689 | TestBranchObject *labels[kNumBranches]; |
| 690 | PoolManager<int32_t> pool_manager(kHeaderSize, |
| 691 | kHeaderAlignment, |
| 692 | BUFFER_ALIGNMENT); |
Jacob Bramley | 5523f6c | 2019-06-28 11:37:26 +0100 | [diff] [blame] | 693 | AddNBranches(&pool_manager, |
| 694 | pc, |
| 695 | labels, |
| 696 | kNumBranches, |
| 697 | kBranchSize, |
| 698 | kVeneerSize, |
| 699 | kVeneerAlignment, |
| 700 | kBranchRange); |
Georgia Kouveli | 1cb7144 | 2017-01-30 13:35:28 +0000 | [diff] [blame] | 701 | |
| 702 | // Increment PC to close to the checkpoint of the pools. |
| 703 | TestPoolManager test(&pool_manager); |
| 704 | pc = test.GetPoolCheckpoint() - 4; |
| 705 | VIXL_ASSERT(!pool_manager.MustEmit(pc)); |
| 706 | |
| 707 | // Now, attempt to add a reference that would make the problem impossible. |
| 708 | // We need to emit the pool immediately after this new instruction, and |
| 709 | // the current size of the pool is kVeneerSize * kNumBranches, so adding a |
| 710 | // short-range (smaller than the pool size) reference should trigger pool |
| 711 | // emission. |
| 712 | const int kPoolSize = kVeneerSize * kNumBranches + kHeaderSize; |
| 713 | |
| 714 | const int kNewObjectSize = 2; |
| 715 | TestObject new_object(kNewObjectSize, 1); |
| 716 | |
| 717 | ForwardReference<int32_t> temp_ref(pc, |
| 718 | kBranchSize, |
| 719 | pc, |
| 720 | pc + kPoolSize + kBranchSize - 1); |
| 721 | VIXL_ASSERT(pool_manager.MustEmit(pc, kBranchSize, &temp_ref, &new_object)); |
| 722 | |
| 723 | // Before actually emitting the pool, try a few different references to make |
| 724 | // sure that this works as expected. |
| 725 | { |
| 726 | // This reference has a large enough range, so should not force pool |
| 727 | // emission. |
| 728 | ForwardReference<int32_t> far_ref(pc, |
| 729 | kBranchSize, |
| 730 | pc, |
| 731 | pc + kPoolSize + kBranchSize); |
| 732 | VIXL_ASSERT(!pool_manager.MustEmit(pc, kBranchSize, &far_ref, &new_object)); |
| 733 | |
| 734 | // This reference had a large enough range but will be restricted by |
| 735 | // alignment so should force pool emission. |
| 736 | int alignment = 16; |
| 737 | VIXL_ASSERT((pc & (alignment - 1)) != 0); |
| 738 | ForwardReference<int32_t> aligned_ref(pc, |
| 739 | kBranchSize, |
| 740 | pc, |
| 741 | pc + kPoolSize + kBranchSize, |
| 742 | alignment); |
| 743 | VIXL_ASSERT( |
| 744 | pool_manager.MustEmit(pc, kBranchSize, &aligned_ref, &new_object)); |
| 745 | } |
| 746 | |
| 747 | // Emit the pool and check its size. |
| 748 | int32_t new_pc = |
| 749 | pool_manager.Emit(&masm, pc, kBranchSize, &temp_ref, &new_object); |
| 750 | VIXL_ASSERT(pc % kHeaderAlignment == 0); // No need for padding. |
| 751 | VIXL_ASSERT(new_pc == pc + kPoolSize); |
| 752 | pc = new_pc; |
| 753 | |
| 754 | // Add the new reference, safely. |
| 755 | ForwardReference<int32_t> *ref = |
| 756 | new ForwardReference<int32_t>(pc, 4 /*size*/, pc, pc + kBranchRange); |
| 757 | new_object.AddReference(ref); |
| 758 | pool_manager.AddObjectReference(ref, &new_object); |
| 759 | pc += 4; |
| 760 | |
| 761 | // Emit the pool again. |
| 762 | new_pc = pool_manager.Emit(&masm, pc); |
| 763 | VIXL_ASSERT(pc % kHeaderAlignment == 0); // No need for padding. |
| 764 | VIXL_ASSERT(new_pc == pc + kNewObjectSize + kHeaderSize); |
| 765 | pc = new_pc; |
| 766 | |
| 767 | // Finally, bind the labels. |
| 768 | for (int i = 0; i < kNumBranches; ++i) { |
| 769 | pc = pool_manager.Bind(&masm, labels[i], pc); |
| 770 | delete labels[i]; |
| 771 | } |
| 772 | } |
| 773 | |
| 774 | TEST(MustEmitNewReferenceDueToSizeOfObject) { |
| 775 | const int kHeaderSize = 4; |
| 776 | const int kHeaderAlignment = 2; |
| 777 | const int kNumBranches = 550; |
| 778 | const int kBranchSize = 4; |
| 779 | const int kVeneerSize = 4; |
| 780 | const int kVeneerAlignment = 2; |
| 781 | const int kBranchRange = 1 * MBytes; |
| 782 | int32_t pc = 0; |
| 783 | |
| 784 | TestMacroAssembler masm; |
| 785 | PoolManager<int32_t> pool_manager(kHeaderSize, |
| 786 | kHeaderAlignment, |
| 787 | BUFFER_ALIGNMENT); |
| 788 | TestBranchObject *labels[kNumBranches]; |
Jacob Bramley | 5523f6c | 2019-06-28 11:37:26 +0100 | [diff] [blame] | 789 | AddNBranches(&pool_manager, |
| 790 | pc, |
| 791 | labels, |
| 792 | kNumBranches, |
| 793 | kBranchSize, |
| 794 | kVeneerSize, |
| 795 | kVeneerAlignment, |
| 796 | kBranchRange); |
Georgia Kouveli | 1cb7144 | 2017-01-30 13:35:28 +0000 | [diff] [blame] | 797 | |
| 798 | |
| 799 | // Increment PC to close to the checkpoint of the pools minus a known |
| 800 | // thershold. |
| 801 | const int kBigObjectSize = 1024; |
| 802 | TestPoolManager test(&pool_manager); |
| 803 | pc = test.GetPoolCheckpoint() - kBigObjectSize; |
| 804 | VIXL_ASSERT(!pool_manager.MustEmit(pc)); |
| 805 | |
| 806 | // Now, attempt to add a reference that would make the problem impossible. |
| 807 | // If we add a short-range (smaller than the pool size) reference with a |
| 808 | // large size (larger than the margin we have until pool emission), pool |
| 809 | // emission should be triggered. |
| 810 | const int kPoolSize = kVeneerSize * kNumBranches + kHeaderSize; |
| 811 | |
| 812 | TestObject new_object(kBigObjectSize, 1); |
| 813 | ForwardReference<int32_t> temp_ref(pc, kBranchSize, pc, pc + kPoolSize); |
| 814 | VIXL_ASSERT(pool_manager.MustEmit(pc, kBranchSize, &temp_ref, &new_object)); |
| 815 | |
| 816 | // Before actually emitting the pool, try a few different references to make |
| 817 | // sure that this works as expected. |
| 818 | { |
| 819 | // If the object is smaller, we can emit the reference. |
| 820 | TestObject smaller_object(kBigObjectSize - 4, 1); |
| 821 | ForwardReference<int32_t> temp_ref(pc, kBranchSize, pc, pc + kPoolSize); |
| 822 | VIXL_ASSERT( |
| 823 | !pool_manager.MustEmit(pc, kBranchSize, &temp_ref, &smaller_object)); |
| 824 | |
| 825 | // If the reference is going to be added after the current objects in the |
| 826 | // pool, we can still emit it. |
| 827 | ForwardReference<int32_t> far_ref(pc, kBranchSize, pc, pc + kBranchRange); |
| 828 | VIXL_ASSERT(!pool_manager.MustEmit(pc, kBranchSize, &far_ref, &new_object)); |
| 829 | } |
| 830 | |
| 831 | // Emit the pool and check its size. |
| 832 | int32_t new_pc = |
| 833 | pool_manager.Emit(&masm, pc, kBranchSize, &temp_ref, &new_object); |
| 834 | VIXL_ASSERT(pc % kHeaderAlignment == 0); // No need for padding. |
| 835 | VIXL_ASSERT(new_pc == pc + kPoolSize); |
| 836 | pc = new_pc; |
| 837 | |
| 838 | // Add the new reference, safely. |
| 839 | ForwardReference<int32_t> *ref = |
| 840 | new ForwardReference<int32_t>(pc, 4 /*size*/, pc, pc + kBranchRange); |
| 841 | new_object.AddReference(ref); |
| 842 | pool_manager.AddObjectReference(ref, &new_object); |
| 843 | pc += 4; |
| 844 | |
| 845 | // Emit the pool again. |
| 846 | new_pc = pool_manager.Emit(&masm, pc); |
| 847 | VIXL_ASSERT(pc % kHeaderAlignment == 0); // No need for padding. |
| 848 | VIXL_ASSERT(new_pc == pc + kBigObjectSize + kHeaderSize); |
| 849 | pc = new_pc; |
| 850 | |
| 851 | // Finally, bind the labels. |
| 852 | for (int i = 0; i < kNumBranches; ++i) { |
| 853 | pc = pool_manager.Bind(&masm, labels[i], pc); |
| 854 | delete labels[i]; |
| 855 | } |
| 856 | } |
Georgia Kouveli | 4443d2c | 2017-05-09 18:14:15 +0100 | [diff] [blame] | 857 | |
| 858 | template <typename ObjectType> |
Georgia Kouveli | 8b57c86 | 2017-03-02 15:18:58 +0000 | [diff] [blame] | 859 | void ManagedLocationBaseTestHelper() { |
Georgia Kouveli | 4443d2c | 2017-05-09 18:14:15 +0100 | [diff] [blame] | 860 | TestMacroAssembler masm; |
| 861 | |
| 862 | PoolManager<int32_t> pool_manager(4 /*header_size*/, |
| 863 | 2 /*header_alignment*/, |
| 864 | BUFFER_ALIGNMENT); |
| 865 | ObjectType *object1 = new ObjectType(); |
| 866 | ObjectType *object2 = new ObjectType(); |
| 867 | ForwardReference<int32_t> *ref_obj1 = |
| 868 | new ForwardReference<int32_t>(0 /*location*/, 2 /*size*/, 0, 200); |
| 869 | object1->AddReference(ref_obj1); |
| 870 | ForwardReference<int32_t> *ref_obj2 = |
| 871 | new ForwardReference<int32_t>(8 /*location*/, 2 /*size*/, 8, 500); |
| 872 | object2->AddReference(ref_obj2); |
| 873 | |
| 874 | pool_manager.AddObjectReference(ref_obj1, object1); |
| 875 | pool_manager.AddObjectReference(ref_obj2, object2); |
| 876 | |
| 877 | pool_manager.Emit(&masm, 20); |
| 878 | } |
| 879 | |
| 880 | class TestObjectDeletedOnPlacement : public TestObject { |
| 881 | public: |
| 882 | TestObjectDeletedOnPlacement() : TestObject(4 /*size*/, 4 /*alignment*/) {} |
| 883 | // After passing ownership of this type of object to the pool manager, it is |
| 884 | // not safe to use it anymore. |
| 885 | virtual bool ShouldBeDeletedOnPlacementByPoolManager() const VIXL_OVERRIDE { |
| 886 | return true; |
| 887 | } |
| 888 | }; |
| 889 | |
Georgia Kouveli | 8b57c86 | 2017-03-02 15:18:58 +0000 | [diff] [blame] | 890 | TEST(DeleteLocationBaseOnPlacement) { |
| 891 | ManagedLocationBaseTestHelper<TestObjectDeletedOnPlacement>(); |
Georgia Kouveli | 4443d2c | 2017-05-09 18:14:15 +0100 | [diff] [blame] | 892 | } |
| 893 | |
| 894 | class TestObjectDeletedOnPoolManagerDestruction : public TestObject { |
| 895 | public: |
| 896 | TestObjectDeletedOnPoolManagerDestruction() |
| 897 | : TestObject(4 /*size*/, 4 /*alignment*/) {} |
| 898 | // We can continue using this type of object after passing its ownership to |
| 899 | // the pool manager, as it will be deleted only when the pool manager is |
| 900 | // destroyed. |
| 901 | virtual bool ShouldBeDeletedOnPoolManagerDestruction() const VIXL_OVERRIDE { |
| 902 | return true; |
| 903 | } |
| 904 | }; |
| 905 | |
| 906 | |
Georgia Kouveli | 8b57c86 | 2017-03-02 15:18:58 +0000 | [diff] [blame] | 907 | TEST(DeleteLocationBaseOnPoolManagerDestruction) { |
| 908 | ManagedLocationBaseTestHelper<TestObjectDeletedOnPoolManagerDestruction>(); |
Georgia Kouveli | 4443d2c | 2017-05-09 18:14:15 +0100 | [diff] [blame] | 909 | } |