backport_1084820.patch 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770
  1. From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
  2. From: Cheng Zhao <[email protected]>
  3. Date: Thu, 4 Oct 2018 14:57:02 -0700
  4. Subject: fix: object materialization
  5. [1084820] [High] [CVE-2020-6512]: DCHECK failure in value.IsHeapObject() in objects-debug.cc
  6. Backport https://chromium.googlesource.com/v8/v8.git/+/c16f62d6e943756d8b4170a36f61e666ced82e6d
  7. diff --git a/src/deoptimizer/deoptimizer.cc b/src/deoptimizer/deoptimizer.cc
  8. index 9fcccd483c6a10a42a217c61d7ef60f50b3e4f07..44c92f557046dbb4268b218d0753367356ee9cd3 100644
  9. --- a/src/deoptimizer/deoptimizer.cc
  10. +++ b/src/deoptimizer/deoptimizer.cc
  11. @@ -48,7 +48,6 @@ class FrameWriter {
  12. void PushRawValue(intptr_t value, const char* debug_hint) {
  13. PushValue(value);
  14. -
  15. if (trace_scope_ != nullptr) {
  16. DebugPrintOutputValue(value, debug_hint);
  17. }
  18. @@ -83,13 +82,10 @@ class FrameWriter {
  19. void PushTranslatedValue(const TranslatedFrame::iterator& iterator,
  20. const char* debug_hint = "") {
  21. Object obj = iterator->GetRawValue();
  22. -
  23. PushRawObject(obj, debug_hint);
  24. -
  25. if (trace_scope_) {
  26. PrintF(trace_scope_->file(), " (input #%d)\n", iterator.input_index());
  27. }
  28. -
  29. deoptimizer_->QueueValueForMaterialization(output_address(top_offset_), obj,
  30. iterator);
  31. }
  32. @@ -2428,6 +2424,11 @@ int TranslatedValue::object_index() const {
  33. Object TranslatedValue::GetRawValue() const {
  34. // If we have a value, return it.
  35. if (materialization_state() == kFinished) {
  36. + int smi;
  37. + if (storage_->IsHeapNumber() &&
  38. + DoubleToSmiInteger(storage_->Number(), &smi)) {
  39. + return Smi::FromInt(smi);
  40. + }
  41. return *storage_;
  42. }
  43. @@ -2470,6 +2471,22 @@ Object TranslatedValue::GetRawValue() const {
  44. }
  45. }
  46. + case kFloat: {
  47. + int smi;
  48. + if (DoubleToSmiInteger(float_value().get_scalar(), &smi)) {
  49. + return Smi::FromInt(smi);
  50. + }
  51. + break;
  52. + }
  53. +
  54. + case kDouble: {
  55. + int smi;
  56. + if (DoubleToSmiInteger(double_value().get_scalar(), &smi)) {
  57. + return Smi::FromInt(smi);
  58. + }
  59. + break;
  60. + }
  61. +
  62. default:
  63. break;
  64. }
  65. @@ -2479,106 +2496,76 @@ Object TranslatedValue::GetRawValue() const {
  66. return ReadOnlyRoots(isolate()).arguments_marker();
  67. }
  68. -void TranslatedValue::set_initialized_storage(Handle<Object> storage) {
  69. +void TranslatedValue::set_initialized_storage(Handle<HeapObject> storage) {
  70. DCHECK_EQ(kUninitialized, materialization_state());
  71. storage_ = storage;
  72. materialization_state_ = kFinished;
  73. }
  74. Handle<Object> TranslatedValue::GetValue() {
  75. - // If we already have a value, then get it.
  76. - if (materialization_state() == kFinished) return storage_;
  77. -
  78. - // Otherwise we have to materialize.
  79. - switch (kind()) {
  80. - case TranslatedValue::kTagged:
  81. - case TranslatedValue::kInt32:
  82. - case TranslatedValue::kInt64:
  83. - case TranslatedValue::kUInt32:
  84. - case TranslatedValue::kBoolBit:
  85. - case TranslatedValue::kFloat:
  86. - case TranslatedValue::kDouble: {
  87. - MaterializeSimple();
  88. - return storage_;
  89. - }
  90. -
  91. - case TranslatedValue::kCapturedObject:
  92. - case TranslatedValue::kDuplicatedObject: {
  93. - // We need to materialize the object (or possibly even object graphs).
  94. - // To make the object verifier happy, we materialize in two steps.
  95. -
  96. - // 1. Allocate storage for reachable objects. This makes sure that for
  97. - // each object we have allocated space on heap. The space will be
  98. - // a byte array that will be later initialized, or a fully
  99. - // initialized object if it is safe to allocate one that will
  100. - // pass the verifier.
  101. - container_->EnsureObjectAllocatedAt(this);
  102. -
  103. - // 2. Initialize the objects. If we have allocated only byte arrays
  104. - // for some objects, we now overwrite the byte arrays with the
  105. - // correct object fields. Note that this phase does not allocate
  106. - // any new objects, so it does not trigger the object verifier.
  107. - return container_->InitializeObjectAt(this);
  108. - }
  109. -
  110. - case TranslatedValue::kInvalid:
  111. - FATAL("unexpected case");
  112. - return Handle<Object>::null();
  113. + Handle<Object> value(GetRawValue(), isolate());
  114. + if (materialization_state() == kFinished) return value;
  115. +
  116. + if (value->IsSmi()) {
  117. + // Even though stored as a Smi, this number might instead be needed as a
  118. + // HeapNumber when materializing a JSObject with a field of HeapObject
  119. + // representation. Since we don't have this information available here, we
  120. + // just always allocate a HeapNumber and later extract the Smi again if we
  121. + // don't need a HeapObject.
  122. + set_initialized_storage(
  123. + isolate()->factory()->NewHeapNumber(value->Number()));
  124. + return value;
  125. }
  126. - FATAL("internal error: value missing");
  127. - return Handle<Object>::null();
  128. -}
  129. -
  130. -void TranslatedValue::MaterializeSimple() {
  131. - // If we already have materialized, return.
  132. - if (materialization_state() == kFinished) return;
  133. -
  134. - Object raw_value = GetRawValue();
  135. - if (raw_value != ReadOnlyRoots(isolate()).arguments_marker()) {
  136. - // We can get the value without allocation, just return it here.
  137. - set_initialized_storage(Handle<Object>(raw_value, isolate()));
  138. - return;
  139. + if (*value != ReadOnlyRoots(isolate()).arguments_marker()) {
  140. + set_initialized_storage(Handle<HeapObject>::cast(value));
  141. + return storage_;
  142. }
  143. - switch (kind()) {
  144. - case kInt32:
  145. - set_initialized_storage(
  146. - Handle<Object>(isolate()->factory()->NewNumber(int32_value())));
  147. - return;
  148. + // Otherwise we have to materialize.
  149. - case kInt64:
  150. - set_initialized_storage(Handle<Object>(
  151. - isolate()->factory()->NewNumber(static_cast<double>(int64_value()))));
  152. - return;
  153. + if (kind() == TranslatedValue::kCapturedObject ||
  154. + kind() == TranslatedValue::kDuplicatedObject) {
  155. + // We need to materialize the object (or possibly even object graphs).
  156. + // To make the object verifier happy, we materialize in two steps.
  157. - case kUInt32:
  158. - set_initialized_storage(
  159. - Handle<Object>(isolate()->factory()->NewNumber(uint32_value())));
  160. - return;
  161. + // 1. Allocate storage for reachable objects. This makes sure that for
  162. + // each object we have allocated space on heap. The space will be
  163. + // a byte array that will be later initialized, or a fully
  164. + // initialized object if it is safe to allocate one that will
  165. + // pass the verifier.
  166. + container_->EnsureObjectAllocatedAt(this);
  167. - case kFloat: {
  168. - double scalar_value = float_value().get_scalar();
  169. - set_initialized_storage(
  170. - Handle<Object>(isolate()->factory()->NewNumber(scalar_value)));
  171. - return;
  172. - }
  173. -
  174. - case kDouble: {
  175. - double scalar_value = double_value().get_scalar();
  176. - set_initialized_storage(
  177. - Handle<Object>(isolate()->factory()->NewNumber(scalar_value)));
  178. - return;
  179. - }
  180. + // 2. Initialize the objects. If we have allocated only byte arrays
  181. + // for some objects, we now overwrite the byte arrays with the
  182. + // correct object fields. Note that this phase does not allocate
  183. + // any new objects, so it does not trigger the object verifier.
  184. + return container_->InitializeObjectAt(this);
  185. + }
  186. - case kCapturedObject:
  187. - case kDuplicatedObject:
  188. - case kInvalid:
  189. - case kTagged:
  190. - case kBoolBit:
  191. - FATAL("internal error: unexpected materialization.");
  192. + double number;
  193. + switch (kind()) {
  194. + case TranslatedValue::kInt32:
  195. + number = int32_value();
  196. + break;
  197. + case TranslatedValue::kInt64:
  198. + number = int64_value();
  199. + break;
  200. + case TranslatedValue::kUInt32:
  201. + number = uint32_value();
  202. + break;
  203. + case TranslatedValue::kFloat:
  204. + number = float_value().get_scalar();
  205. break;
  206. + case TranslatedValue::kDouble:
  207. + number = double_value().get_scalar();
  208. + break;
  209. + default:
  210. + UNREACHABLE();
  211. }
  212. + DCHECK(!IsSmiDouble(number));
  213. + set_initialized_storage(isolate()->factory()->NewHeapNumber(number));
  214. + return storage_;
  215. }
  216. bool TranslatedValue::IsMaterializedObject() const {
  217. @@ -2634,8 +2621,9 @@ Float64 TranslatedState::GetDoubleSlot(Address fp, int slot_offset) {
  218. }
  219. void TranslatedValue::Handlify() {
  220. - if (kind() == kTagged) {
  221. - set_initialized_storage(Handle<Object>(raw_literal(), isolate()));
  222. + if (kind() == kTagged && raw_literal().IsHeapObject()) {
  223. + set_initialized_storage(
  224. + Handle<HeapObject>(HeapObject::cast(raw_literal()), isolate()));
  225. raw_literal_ = Object();
  226. }
  227. }
  228. @@ -3386,7 +3374,7 @@ TranslatedValue* TranslatedState::GetValueByObjectIndex(int object_index) {
  229. return &(frames_[pos.frame_index_].values_[pos.value_index_]);
  230. }
  231. -Handle<Object> TranslatedState::InitializeObjectAt(TranslatedValue* slot) {
  232. +Handle<HeapObject> TranslatedState::InitializeObjectAt(TranslatedValue* slot) {
  233. slot = ResolveCapturedObject(slot);
  234. DisallowHeapAllocation no_allocation;
  235. @@ -3401,7 +3389,7 @@ Handle<Object> TranslatedState::InitializeObjectAt(TranslatedValue* slot) {
  236. InitializeCapturedObjectAt(index, &worklist, no_allocation);
  237. }
  238. }
  239. - return slot->GetStorage();
  240. + return slot->storage();
  241. }
  242. void TranslatedState::InitializeCapturedObjectAt(
  243. @@ -3501,11 +3489,17 @@ void TranslatedState::EnsureObjectAllocatedAt(TranslatedValue* slot) {
  244. }
  245. }
  246. +int TranslatedValue::GetSmiValue() const {
  247. + Object value = GetRawValue();
  248. + CHECK(value.IsSmi());
  249. + return Smi::cast(value).value();
  250. +}
  251. +
  252. void TranslatedState::MaterializeFixedDoubleArray(TranslatedFrame* frame,
  253. int* value_index,
  254. TranslatedValue* slot,
  255. Handle<Map> map) {
  256. - int length = Smi::cast(frame->values_[*value_index].GetRawValue()).value();
  257. + int length = frame->values_[*value_index].GetSmiValue();
  258. (*value_index)++;
  259. Handle<FixedDoubleArray> array = Handle<FixedDoubleArray>::cast(
  260. isolate()->factory()->NewFixedDoubleArray(length));
  261. @@ -3539,10 +3533,10 @@ void TranslatedState::MaterializeHeapNumber(TranslatedFrame* frame,
  262. namespace {
  263. -enum DoubleStorageKind : uint8_t {
  264. +enum StorageKind : uint8_t {
  265. kStoreTagged,
  266. kStoreUnboxedDouble,
  267. - kStoreMutableHeapNumber,
  268. + kStoreHeapObject
  269. };
  270. } // namespace
  271. @@ -3614,9 +3608,7 @@ void TranslatedState::EnsureCapturedObjectAllocatedAt(
  272. case SIMPLE_NUMBER_DICTIONARY_TYPE:
  273. case STRING_TABLE_TYPE: {
  274. // Check we have the right size.
  275. - int array_length =
  276. - Smi::cast(frame->values_[value_index].GetRawValue()).value();
  277. -
  278. + int array_length = frame->values_[value_index].GetSmiValue();
  279. int instance_size = FixedArray::SizeFor(array_length);
  280. CHECK_EQ(instance_size, slot->GetChildrenCount() * kTaggedSize);
  281. @@ -3635,13 +3627,13 @@ void TranslatedState::EnsureCapturedObjectAllocatedAt(
  282. case PROPERTY_ARRAY_TYPE: {
  283. // Check we have the right size.
  284. - int length_or_hash =
  285. - Smi::cast(frame->values_[value_index].GetRawValue()).value();
  286. + int length_or_hash = frame->values_[value_index].GetSmiValue();
  287. int array_length = PropertyArray::LengthField::decode(length_or_hash);
  288. int instance_size = PropertyArray::SizeFor(array_length);
  289. CHECK_EQ(instance_size, slot->GetChildrenCount() * kTaggedSize);
  290. slot->set_storage(AllocateStorageFor(slot));
  291. +
  292. // Make sure all the remaining children (after the map) are allocated.
  293. return EnsureChildrenAllocated(slot->GetChildrenCount() - 1, frame,
  294. &value_index, worklist);
  295. @@ -3686,7 +3678,7 @@ void TranslatedState::EnsureChildrenAllocated(int count, TranslatedFrame* frame,
  296. } else {
  297. // Make sure the simple values (heap numbers, etc.) are properly
  298. // initialized.
  299. - child_slot->MaterializeSimple();
  300. + child_slot->GetValue();
  301. }
  302. SkipSlots(1, frame, value_index);
  303. }
  304. @@ -3701,16 +3693,17 @@ void TranslatedState::EnsurePropertiesAllocatedAndMarked(
  305. properties_slot->mark_allocated();
  306. properties_slot->set_storage(object_storage);
  307. - // Set markers for the double properties.
  308. + // Set markers for out-of-object properties.
  309. Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate());
  310. for (InternalIndex i : map->IterateOwnDescriptors()) {
  311. FieldIndex index = FieldIndex::ForDescriptor(*map, i);
  312. - if (descriptors->GetDetails(i).representation().IsDouble() &&
  313. - !index.is_inobject()) {
  314. + Representation representation = descriptors->GetDetails(i).representation();
  315. + if (!index.is_inobject() &&
  316. + (representation.IsDouble() || representation.IsHeapObject())) {
  317. CHECK(!map->IsUnboxedDoubleField(index));
  318. int outobject_index = index.outobject_array_index();
  319. int array_index = outobject_index * kTaggedSize;
  320. - object_storage->set(array_index, kStoreMutableHeapNumber);
  321. + object_storage->set(array_index, kStoreHeapObject);
  322. }
  323. }
  324. }
  325. @@ -3736,31 +3729,44 @@ void TranslatedState::EnsureJSObjectAllocated(TranslatedValue* slot,
  326. // Now we handle the interesting (JSObject) case.
  327. Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate());
  328. - // Set markers for the double properties.
  329. + // Set markers for in-object properties.
  330. for (InternalIndex i : map->IterateOwnDescriptors()) {
  331. FieldIndex index = FieldIndex::ForDescriptor(*map, i);
  332. - if (descriptors->GetDetails(i).representation().IsDouble() &&
  333. - index.is_inobject()) {
  334. + Representation representation = descriptors->GetDetails(i).representation();
  335. + if (index.is_inobject() &&
  336. + (representation.IsDouble() || representation.IsHeapObject())) {
  337. CHECK_GE(index.index(), FixedArray::kHeaderSize / kTaggedSize);
  338. int array_index = index.index() * kTaggedSize - FixedArray::kHeaderSize;
  339. - uint8_t marker = map->IsUnboxedDoubleField(index)
  340. - ? kStoreUnboxedDouble
  341. - : kStoreMutableHeapNumber;
  342. + uint8_t marker = map->IsUnboxedDoubleField(index) ? kStoreUnboxedDouble
  343. + : kStoreHeapObject;
  344. object_storage->set(array_index, marker);
  345. }
  346. }
  347. slot->set_storage(object_storage);
  348. }
  349. -Handle<Object> TranslatedState::GetValueAndAdvance(TranslatedFrame* frame,
  350. - int* value_index) {
  351. - TranslatedValue* slot = frame->ValueAt(*value_index);
  352. - SkipSlots(1, frame, value_index);
  353. +TranslatedValue* TranslatedState::GetResolvedSlot(TranslatedFrame* frame,
  354. + int value_index) {
  355. + TranslatedValue* slot = frame->ValueAt(value_index);
  356. if (slot->kind() == TranslatedValue::kDuplicatedObject) {
  357. slot = ResolveCapturedObject(slot);
  358. }
  359. - CHECK_NE(TranslatedValue::kUninitialized, slot->materialization_state());
  360. - return slot->GetStorage();
  361. + CHECK_NE(slot->materialization_state(), TranslatedValue::kUninitialized);
  362. + return slot;
  363. +}
  364. +
  365. +TranslatedValue* TranslatedState::GetResolvedSlotAndAdvance(
  366. + TranslatedFrame* frame, int* value_index) {
  367. + TranslatedValue* slot = GetResolvedSlot(frame, *value_index);
  368. + SkipSlots(1, frame, value_index);
  369. + return slot;
  370. +}
  371. +
  372. +Handle<Object> TranslatedState::GetValueAndAdvance(TranslatedFrame* frame,
  373. + int* value_index) {
  374. + TranslatedValue* slot = GetResolvedSlot(frame, *value_index);
  375. + SkipSlots(1, frame, value_index);
  376. + return slot->GetValue();
  377. }
  378. void TranslatedState::InitializeJSObjectAt(
  379. @@ -3788,29 +3794,25 @@ void TranslatedState::InitializeJSObjectAt(
  380. // marker to see if we store an unboxed double.
  381. DCHECK_EQ(kTaggedSize, JSObject::kPropertiesOrHashOffset);
  382. for (int i = 2; i < slot->GetChildrenCount(); i++) {
  383. - // Initialize and extract the value from its slot.
  384. - Handle<Object> field_value = GetValueAndAdvance(frame, value_index);
  385. -
  386. + TranslatedValue* slot = GetResolvedSlotAndAdvance(frame, value_index);
  387. // Read out the marker and ensure the field is consistent with
  388. // what the markers in the storage say (note that all heap numbers
  389. // should be fully initialized by now).
  390. int offset = i * kTaggedSize;
  391. uint8_t marker = object_storage->ReadField<uint8_t>(offset);
  392. if (marker == kStoreUnboxedDouble) {
  393. - double double_field_value;
  394. - if (field_value->IsSmi()) {
  395. - double_field_value = Smi::cast(*field_value).value();
  396. - } else {
  397. - CHECK(field_value->IsHeapNumber());
  398. - double_field_value = HeapNumber::cast(*field_value).value();
  399. - }
  400. - object_storage->WriteField<double>(offset, double_field_value);
  401. - } else if (marker == kStoreMutableHeapNumber) {
  402. + Handle<HeapObject> field_value = slot->storage();
  403. CHECK(field_value->IsHeapNumber());
  404. + object_storage->WriteField<double>(offset, field_value->Number());
  405. + } else if (marker == kStoreHeapObject) {
  406. + Handle<HeapObject> field_value = slot->storage();
  407. WRITE_FIELD(*object_storage, offset, *field_value);
  408. WRITE_BARRIER(*object_storage, offset, *field_value);
  409. } else {
  410. CHECK_EQ(kStoreTagged, marker);
  411. + Handle<Object> field_value = slot->GetValue();
  412. + DCHECK_IMPLIES(field_value->IsHeapNumber(),
  413. + !IsSmiDouble(field_value->Number()));
  414. WRITE_FIELD(*object_storage, offset, *field_value);
  415. WRITE_BARRIER(*object_storage, offset, *field_value);
  416. }
  417. @@ -3836,15 +3838,18 @@ void TranslatedState::InitializeObjectWithTaggedFieldsAt(
  418. // Write the fields to the object.
  419. for (int i = 1; i < slot->GetChildrenCount(); i++) {
  420. - Handle<Object> field_value = GetValueAndAdvance(frame, value_index);
  421. + TranslatedValue* slot = GetResolvedSlotAndAdvance(frame, value_index);
  422. int offset = i * kTaggedSize;
  423. uint8_t marker = object_storage->ReadField<uint8_t>(offset);
  424. - if (i > 1 && marker == kStoreMutableHeapNumber) {
  425. - CHECK(field_value->IsHeapNumber());
  426. + Handle<Object> field_value;
  427. + if (i > 1 && marker == kStoreHeapObject) {
  428. + field_value = slot->storage();
  429. } else {
  430. CHECK(marker == kStoreTagged || i == 1);
  431. + field_value = slot->GetValue();
  432. + DCHECK_IMPLIES(field_value->IsHeapNumber(),
  433. + !IsSmiDouble(field_value->Number()));
  434. }
  435. -
  436. WRITE_FIELD(*object_storage, offset, *field_value);
  437. WRITE_BARRIER(*object_storage, offset, *field_value);
  438. }
  439. @@ -3911,10 +3916,7 @@ TranslatedFrame* TranslatedState::GetArgumentsInfoFromJSFrameIndex(
  440. // argument (the receiver).
  441. static constexpr int kTheContext = 1;
  442. const int height = frames_[i].height() + kTheContext;
  443. - Object argc_object = frames_[i].ValueAt(height - 1)->GetRawValue();
  444. - CHECK(argc_object.IsSmi());
  445. - *args_count = Smi::ToInt(argc_object);
  446. -
  447. + *args_count = frames_[i].ValueAt(height - 1)->GetSmiValue();
  448. DCHECK_EQ(*args_count, 1);
  449. } else {
  450. *args_count = InternalFormalParameterCountWithReceiver(
  451. @@ -3956,21 +3958,30 @@ void TranslatedState::StoreMaterializedValuesAndDeopt(JavaScriptFrame* frame) {
  452. CHECK(value_info->IsMaterializedObject());
  453. - // Skip duplicate objects (i.e., those that point to some
  454. - // other object id).
  455. + // Skip duplicate objects (i.e., those that point to some other object id).
  456. if (value_info->object_index() != i) continue;
  457. + Handle<Object> previous_value(previously_materialized_objects->get(i),
  458. + isolate_);
  459. Handle<Object> value(value_info->GetRawValue(), isolate_);
  460. - if (!value.is_identical_to(marker)) {
  461. - if (previously_materialized_objects->get(i) == *marker) {
  462. + if (value.is_identical_to(marker)) {
  463. + DCHECK_EQ(*previous_value, *marker);
  464. + } else {
  465. + if (*previous_value == *marker) {
  466. + if (value->IsSmi()) {
  467. + value = isolate()->factory()->NewHeapNumber(value->Number());
  468. + }
  469. previously_materialized_objects->set(i, *value);
  470. value_changed = true;
  471. } else {
  472. - CHECK(previously_materialized_objects->get(i) == *value);
  473. + CHECK(*previous_value == *value ||
  474. + (previous_value->IsHeapNumber() && value->IsSmi() &&
  475. + previous_value->Number() == value->Number()));
  476. }
  477. }
  478. }
  479. +
  480. if (new_store && value_changed) {
  481. materialized_store->Set(stack_frame_pointer_,
  482. previously_materialized_objects);
  483. @@ -4004,8 +4015,10 @@ void TranslatedState::UpdateFromPreviouslyMaterializedObjects() {
  484. CHECK(value_info->IsMaterializedObject());
  485. if (value_info->kind() == TranslatedValue::kCapturedObject) {
  486. - value_info->set_initialized_storage(
  487. - Handle<Object>(previously_materialized_objects->get(i), isolate_));
  488. + Handle<Object> object(previously_materialized_objects->get(i),
  489. + isolate_);
  490. + CHECK(object->IsHeapObject());
  491. + value_info->set_initialized_storage(Handle<HeapObject>::cast(object));
  492. }
  493. }
  494. }
  495. @@ -4019,7 +4032,7 @@ void TranslatedState::VerifyMaterializedObjects() {
  496. if (slot->kind() == TranslatedValue::kCapturedObject) {
  497. CHECK_EQ(slot, GetValueByObjectIndex(slot->object_index()));
  498. if (slot->materialization_state() == TranslatedValue::kFinished) {
  499. - slot->GetStorage()->ObjectVerify(isolate());
  500. + slot->storage()->ObjectVerify(isolate());
  501. } else {
  502. CHECK_EQ(slot->materialization_state(),
  503. TranslatedValue::kUninitialized);
  504. diff --git a/src/deoptimizer/deoptimizer.h b/src/deoptimizer/deoptimizer.h
  505. index 2766ed7c6381a279d6161c058ea33fffe9860426..6c68ea1f96f00df51008a14d3ca7c7e672c47f0f 100644
  506. --- a/src/deoptimizer/deoptimizer.h
  507. +++ b/src/deoptimizer/deoptimizer.h
  508. @@ -39,13 +39,17 @@ enum class BuiltinContinuationMode;
  509. class TranslatedValue {
  510. public:
  511. - // Allocation-less getter of the value.
  512. + // Allocation-free getter of the value.
  513. // Returns ReadOnlyRoots::arguments_marker() if allocation would be necessary
  514. - // to get the value.
  515. + // to get the value. In the case of numbers, returns a Smi if possible.
  516. Object GetRawValue() const;
  517. - // Getter for the value, takes care of materializing the subgraph
  518. - // reachable from this value.
  519. + // Convenience wrapper around GetRawValue (checked).
  520. + int GetSmiValue() const;
  521. +
  522. + // Returns the value, possibly materializing it first (and the whole subgraph
  523. + // reachable from this value). In the case of numbers, returns a Smi if
  524. + // possible.
  525. Handle<Object> GetValue();
  526. bool IsMaterializedObject() const;
  527. @@ -102,15 +106,14 @@ class TranslatedValue {
  528. static TranslatedValue NewInvalid(TranslatedState* container);
  529. Isolate* isolate() const;
  530. - void MaterializeSimple();
  531. void set_storage(Handle<HeapObject> storage) { storage_ = storage; }
  532. - void set_initialized_storage(Handle<Object> storage);
  533. + void set_initialized_storage(Handle<HeapObject> storage);
  534. void mark_finished() { materialization_state_ = kFinished; }
  535. void mark_allocated() { materialization_state_ = kAllocated; }
  536. - Handle<Object> GetStorage() {
  537. - DCHECK_NE(kUninitialized, materialization_state());
  538. + Handle<HeapObject> storage() {
  539. + DCHECK_NE(materialization_state(), kUninitialized);
  540. return storage_;
  541. }
  542. @@ -120,9 +123,9 @@ class TranslatedValue {
  543. // objects and constructing handles (to get
  544. // to the isolate).
  545. - Handle<Object> storage_; // Contains the materialized value or the
  546. - // byte-array that will be later morphed into
  547. - // the materialized object.
  548. + Handle<HeapObject> storage_; // Contains the materialized value or the
  549. + // byte-array that will be later morphed into
  550. + // the materialized object.
  551. struct MaterializedObjectInfo {
  552. int id_;
  553. @@ -376,7 +379,7 @@ class TranslatedState {
  554. int* value_index, std::stack<int>* worklist);
  555. void EnsureCapturedObjectAllocatedAt(int object_index,
  556. std::stack<int>* worklist);
  557. - Handle<Object> InitializeObjectAt(TranslatedValue* slot);
  558. + Handle<HeapObject> InitializeObjectAt(TranslatedValue* slot);
  559. void InitializeCapturedObjectAt(int object_index, std::stack<int>* worklist,
  560. const DisallowHeapAllocation& no_allocation);
  561. void InitializeJSObjectAt(TranslatedFrame* frame, int* value_index,
  562. @@ -392,6 +395,9 @@ class TranslatedState {
  563. TranslatedValue* ResolveCapturedObject(TranslatedValue* slot);
  564. TranslatedValue* GetValueByObjectIndex(int object_index);
  565. Handle<Object> GetValueAndAdvance(TranslatedFrame* frame, int* value_index);
  566. + TranslatedValue* GetResolvedSlot(TranslatedFrame* frame, int value_index);
  567. + TranslatedValue* GetResolvedSlotAndAdvance(TranslatedFrame* frame,
  568. + int* value_index);
  569. static uint32_t GetUInt32Slot(Address fp, int slot_index);
  570. static uint64_t GetUInt64Slot(Address fp, int slot_index);
  571. @@ -773,7 +779,7 @@ class FrameDescription {
  572. intptr_t continuation_;
  573. // This must be at the end of the object as the object is allocated larger
  574. - // than it's definition indicate to extend this array.
  575. + // than its definition indicates to extend this array.
  576. intptr_t frame_content_[1];
  577. intptr_t* GetFrameSlotPointer(unsigned offset) {
  578. diff --git a/test/mjsunit/compiler/regress-1084820.js b/test/mjsunit/compiler/regress-1084820.js
  579. new file mode 100644
  580. index 0000000000000000000000000000000000000000..beb168b413ff045c5aff8e68d2e6da32b27800d6
  581. --- /dev/null
  582. +++ b/test/mjsunit/compiler/regress-1084820.js
  583. @@ -0,0 +1,27 @@
  584. +// Copyright 2020 the V8 project authors. All rights reserved.
  585. +// Use of this source code is governed by a BSD-style license that can be
  586. +// found in the LICENSE file.
  587. +
  588. +// Flags: --allow-natives-syntax
  589. +
  590. +// Create a map where 'my_property' has HeapObject representation.
  591. +const dummy_obj = {};
  592. +dummy_obj.my_property = 'some HeapObject';
  593. +dummy_obj.my_property = 'some other HeapObject';
  594. +
  595. +function gaga() {
  596. + const obj = {};
  597. + // Store a HeapNumber and then a Smi.
  598. + // This must happen in a loop, even if it's only 2 iterations:
  599. + for (let j = -3_000_000_000; j <= -1_000_000_000; j += 2_000_000_000) {
  600. + obj.my_property = j;
  601. + }
  602. + // Trigger (soft) deopt.
  603. + if (!%IsBeingInterpreted()) obj + obj;
  604. +}
  605. +
  606. +%PrepareFunctionForOptimization(gaga);
  607. +gaga();
  608. +gaga();
  609. +%OptimizeFunctionOnNextCall(gaga);
  610. +gaga();
  611. diff --git a/test/mjsunit/compiler/regress-1092650.js b/test/mjsunit/compiler/regress-1092650.js
  612. new file mode 100644
  613. index 0000000000000000000000000000000000000000..ba94375aeb8262536e28d5d409d69115e385c3b3
  614. --- /dev/null
  615. +++ b/test/mjsunit/compiler/regress-1092650.js
  616. @@ -0,0 +1,23 @@
  617. +// Copyright 2020 the V8 project authors. All rights reserved.
  618. +// Use of this source code is governed by a BSD-style license that can be
  619. +// found in the LICENSE file.
  620. +
  621. +// Flags: --allow-natives-syntax
  622. +
  623. +// Create map with HeapNumber in field 'a'
  624. +({a: 2**30});
  625. +
  626. +function foo() {
  627. + return foo.arguments[0];
  628. +}
  629. +
  630. +function main() {
  631. + foo({a: 42});
  632. +}
  633. +
  634. +%PrepareFunctionForOptimization(foo);
  635. +%PrepareFunctionForOptimization(main);
  636. +main();
  637. +main();
  638. +%OptimizeFunctionOnNextCall(main);
  639. +main();
  640. diff --git a/test/mjsunit/compiler/regress-1094132.js b/test/mjsunit/compiler/regress-1094132.js
  641. new file mode 100644
  642. index 0000000000000000000000000000000000000000..418637d86f8c363b9c0c41c450914e758ff73e9c
  643. --- /dev/null
  644. +++ b/test/mjsunit/compiler/regress-1094132.js
  645. @@ -0,0 +1,78 @@
  646. +// Copyright 2020 the V8 project authors. All rights reserved.
  647. +// Use of this source code is governed by a BSD-style license that can be
  648. +// found in the LICENSE file.
  649. +
  650. +// Flags: --allow-natives-syntax
  651. +
  652. +function prettyPrinted() {}
  653. +
  654. +function formatFailureText() {
  655. + if (expectedText.length <= 40 && foundText.length <= 40) {
  656. + message += ": expected <" + expectedText + "> found <" + foundText + ">";
  657. + message += ":\nexpected:\n" + expectedText + "\nfound:\n" + foundText;
  658. + }
  659. +}
  660. +
  661. +function fail(expectedText, found, name_opt) {
  662. + formatFailureText(expectedText, found, name_opt);
  663. + if (!a[aProps[i]][aProps[i]]) { }
  664. +}
  665. +
  666. +function deepEquals(a, b) {
  667. + if (a === 0) return 1 / a === 1 / b;
  668. + if (typeof a !== typeof a) return false;
  669. + if (typeof a !== "object" && typeof a !== "function") return false;
  670. + if (objectClass !== classOf()) return false;
  671. + if (objectClass === "RegExp") { }
  672. +}
  673. +
  674. +function assertEquals() {
  675. + if (!deepEquals()) {
  676. + fail(prettyPrinted(), undefined, undefined);
  677. + }
  678. +}
  679. +
  680. +({y: {}, x: 0.42});
  681. +
  682. +function gaga() {
  683. + return {gx: bar.arguments[0], hx: baz.arguments[0]};
  684. +}
  685. +
  686. +function baz() {
  687. + return gaga();
  688. +}
  689. +
  690. +function bar(obj) {
  691. + return baz(obj.y);
  692. +}
  693. +
  694. +function foo() {
  695. + bar({y: {}, x: 42});
  696. + try { assertEquals() } catch (e) {}
  697. + try { assertEquals() } catch (e) {}
  698. + assertEquals();
  699. +}
  700. +
  701. +%PrepareFunctionForOptimization(prettyPrinted);
  702. +%PrepareFunctionForOptimization(formatFailureText);
  703. +%PrepareFunctionForOptimization(fail);
  704. +%PrepareFunctionForOptimization(deepEquals);
  705. +%PrepareFunctionForOptimization(assertEquals);
  706. +%PrepareFunctionForOptimization(gaga);
  707. +%PrepareFunctionForOptimization(baz);
  708. +%PrepareFunctionForOptimization(bar);
  709. +%PrepareFunctionForOptimization(foo);
  710. +try { foo() } catch (e) {}
  711. +%OptimizeFunctionOnNextCall(foo);
  712. +try { foo() } catch (e) {}
  713. +%PrepareFunctionForOptimization(prettyPrinted);
  714. +%PrepareFunctionForOptimization(formatFailureText);
  715. +%PrepareFunctionForOptimization(fail);
  716. +%PrepareFunctionForOptimization(deepEquals);
  717. +%PrepareFunctionForOptimization(assertEquals);
  718. +%PrepareFunctionForOptimization(gaga);
  719. +%PrepareFunctionForOptimization(baz);
  720. +%PrepareFunctionForOptimization(bar);
  721. +%PrepareFunctionForOptimization(foo);
  722. +%OptimizeFunctionOnNextCall(foo);
  723. +try { foo() } catch (e) {}