backport_1084820.patch 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767
  1. From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
  2. From: Cheng Zhao <[email protected]>
  3. Date: Thu, 4 Oct 2018 14:57:02 -0700
  4. Subject: fix: object materialization
  5. [1084820] [High] [CVE-2020-6512]: DCHECK failure in value.IsHeapObject() in objectsdebug.cc
  6. Backport https://chromium.googlesource.com/v8/v8.git/+/c16f62d6e943756d8b4170a36f61e666ced82e6d
  7. diff --git a/src/deoptimizer/deoptimizer.cc b/src/deoptimizer/deoptimizer.cc
  8. index 64551c68996f8c4d57cb16e8a22d202b00c624e2..d4389f5059ecade0e792bfee78155d7f1f0446bb 100644
  9. --- a/src/deoptimizer/deoptimizer.cc
  10. +++ b/src/deoptimizer/deoptimizer.cc
  11. @@ -47,7 +47,6 @@ class FrameWriter {
  12. void PushRawValue(intptr_t value, const char* debug_hint) {
  13. PushValue(value);
  14. -
  15. if (trace_scope_ != nullptr) {
  16. DebugPrintOutputValue(value, debug_hint);
  17. }
  18. @@ -82,13 +81,10 @@ class FrameWriter {
  19. void PushTranslatedValue(const TranslatedFrame::iterator& iterator,
  20. const char* debug_hint = "") {
  21. Object obj = iterator->GetRawValue();
  22. -
  23. PushRawObject(obj, debug_hint);
  24. -
  25. if (trace_scope_) {
  26. PrintF(trace_scope_->file(), " (input #%d)\n", iterator.input_index());
  27. }
  28. -
  29. deoptimizer_->QueueValueForMaterialization(output_address(top_offset_), obj,
  30. iterator);
  31. }
  32. @@ -2365,6 +2361,11 @@ int TranslatedValue::object_index() const {
  33. Object TranslatedValue::GetRawValue() const {
  34. // If we have a value, return it.
  35. if (materialization_state() == kFinished) {
  36. + int smi;
  37. + if (storage_->IsHeapNumber() &&
  38. + DoubleToSmiInteger(storage_->Number(), &smi)) {
  39. + return Smi::FromInt(smi);
  40. + }
  41. return *storage_;
  42. }
  43. @@ -2407,6 +2408,22 @@ Object TranslatedValue::GetRawValue() const {
  44. }
  45. }
  46. + case kFloat: {
  47. + int smi;
  48. + if (DoubleToSmiInteger(float_value().get_scalar(), &smi)) {
  49. + return Smi::FromInt(smi);
  50. + }
  51. + break;
  52. + }
  53. +
  54. + case kDouble: {
  55. + int smi;
  56. + if (DoubleToSmiInteger(double_value().get_scalar(), &smi)) {
  57. + return Smi::FromInt(smi);
  58. + }
  59. + break;
  60. + }
  61. +
  62. default:
  63. break;
  64. }
  65. @@ -2416,106 +2433,76 @@ Object TranslatedValue::GetRawValue() const {
  66. return ReadOnlyRoots(isolate()).arguments_marker();
  67. }
  68. -void TranslatedValue::set_initialized_storage(Handle<Object> storage) {
  69. +void TranslatedValue::set_initialized_storage(Handle<HeapObject> storage) {
  70. DCHECK_EQ(kUninitialized, materialization_state());
  71. storage_ = storage;
  72. materialization_state_ = kFinished;
  73. }
  74. Handle<Object> TranslatedValue::GetValue() {
  75. - // If we already have a value, then get it.
  76. - if (materialization_state() == kFinished) return storage_;
  77. -
  78. - // Otherwise we have to materialize.
  79. - switch (kind()) {
  80. - case TranslatedValue::kTagged:
  81. - case TranslatedValue::kInt32:
  82. - case TranslatedValue::kInt64:
  83. - case TranslatedValue::kUInt32:
  84. - case TranslatedValue::kBoolBit:
  85. - case TranslatedValue::kFloat:
  86. - case TranslatedValue::kDouble: {
  87. - MaterializeSimple();
  88. - return storage_;
  89. - }
  90. -
  91. - case TranslatedValue::kCapturedObject:
  92. - case TranslatedValue::kDuplicatedObject: {
  93. - // We need to materialize the object (or possibly even object graphs).
  94. - // To make the object verifier happy, we materialize in two steps.
  95. -
  96. - // 1. Allocate storage for reachable objects. This makes sure that for
  97. - // each object we have allocated space on heap. The space will be
  98. - // a byte array that will be later initialized, or a fully
  99. - // initialized object if it is safe to allocate one that will
  100. - // pass the verifier.
  101. - container_->EnsureObjectAllocatedAt(this);
  102. -
  103. - // 2. Initialize the objects. If we have allocated only byte arrays
  104. - // for some objects, we now overwrite the byte arrays with the
  105. - // correct object fields. Note that this phase does not allocate
  106. - // any new objects, so it does not trigger the object verifier.
  107. - return container_->InitializeObjectAt(this);
  108. - }
  109. -
  110. - case TranslatedValue::kInvalid:
  111. - FATAL("unexpected case");
  112. - return Handle<Object>::null();
  113. + Handle<Object> value(GetRawValue(), isolate());
  114. + if (materialization_state() == kFinished) return value;
  115. +
  116. + if (value->IsSmi()) {
  117. + // Even though stored as a Smi, this number might instead be needed as a
  118. + // HeapNumber when materializing a JSObject with a field of HeapObject
  119. + // representation. Since we don't have this information available here, we
  120. + // just always allocate a HeapNumber and later extract the Smi again if we
  121. + // don't need a HeapObject.
  122. + set_initialized_storage(
  123. + isolate()->factory()->NewHeapNumber(value->Number()));
  124. + return value;
  125. }
  126. - FATAL("internal error: value missing");
  127. - return Handle<Object>::null();
  128. -}
  129. -
  130. -void TranslatedValue::MaterializeSimple() {
  131. - // If we already have materialized, return.
  132. - if (materialization_state() == kFinished) return;
  133. -
  134. - Object raw_value = GetRawValue();
  135. - if (raw_value != ReadOnlyRoots(isolate()).arguments_marker()) {
  136. - // We can get the value without allocation, just return it here.
  137. - set_initialized_storage(Handle<Object>(raw_value, isolate()));
  138. - return;
  139. + if (*value != ReadOnlyRoots(isolate()).arguments_marker()) {
  140. + set_initialized_storage(Handle<HeapObject>::cast(value));
  141. + return storage_;
  142. }
  143. - switch (kind()) {
  144. - case kInt32:
  145. - set_initialized_storage(
  146. - Handle<Object>(isolate()->factory()->NewNumber(int32_value())));
  147. - return;
  148. + // Otherwise we have to materialize.
  149. - case kInt64:
  150. - set_initialized_storage(Handle<Object>(
  151. - isolate()->factory()->NewNumber(static_cast<double>(int64_value()))));
  152. - return;
  153. + if (kind() == TranslatedValue::kCapturedObject ||
  154. + kind() == TranslatedValue::kDuplicatedObject) {
  155. + // We need to materialize the object (or possibly even object graphs).
  156. + // To make the object verifier happy, we materialize in two steps.
  157. - case kUInt32:
  158. - set_initialized_storage(
  159. - Handle<Object>(isolate()->factory()->NewNumber(uint32_value())));
  160. - return;
  161. + // 1. Allocate storage for reachable objects. This makes sure that for
  162. + // each object we have allocated space on heap. The space will be
  163. + // a byte array that will be later initialized, or a fully
  164. + // initialized object if it is safe to allocate one that will
  165. + // pass the verifier.
  166. + container_->EnsureObjectAllocatedAt(this);
  167. - case kFloat: {
  168. - double scalar_value = float_value().get_scalar();
  169. - set_initialized_storage(
  170. - Handle<Object>(isolate()->factory()->NewNumber(scalar_value)));
  171. - return;
  172. - }
  173. -
  174. - case kDouble: {
  175. - double scalar_value = double_value().get_scalar();
  176. - set_initialized_storage(
  177. - Handle<Object>(isolate()->factory()->NewNumber(scalar_value)));
  178. - return;
  179. - }
  180. + // 2. Initialize the objects. If we have allocated only byte arrays
  181. + // for some objects, we now overwrite the byte arrays with the
  182. + // correct object fields. Note that this phase does not allocate
  183. + // any new objects, so it does not trigger the object verifier.
  184. + return container_->InitializeObjectAt(this);
  185. + }
  186. - case kCapturedObject:
  187. - case kDuplicatedObject:
  188. - case kInvalid:
  189. - case kTagged:
  190. - case kBoolBit:
  191. - FATAL("internal error: unexpected materialization.");
  192. + double number;
  193. + switch (kind()) {
  194. + case TranslatedValue::kInt32:
  195. + number = int32_value();
  196. + break;
  197. + case TranslatedValue::kInt64:
  198. + number = int64_value();
  199. + break;
  200. + case TranslatedValue::kUInt32:
  201. + number = uint32_value();
  202. + break;
  203. + case TranslatedValue::kFloat:
  204. + number = float_value().get_scalar();
  205. break;
  206. + case TranslatedValue::kDouble:
  207. + number = double_value().get_scalar();
  208. + break;
  209. + default:
  210. + UNREACHABLE();
  211. }
  212. + DCHECK(!IsSmiDouble(number));
  213. + set_initialized_storage(isolate()->factory()->NewHeapNumber(number));
  214. + return storage_;
  215. }
  216. bool TranslatedValue::IsMaterializedObject() const {
  217. @@ -2571,8 +2558,9 @@ Float64 TranslatedState::GetDoubleSlot(Address fp, int slot_offset) {
  218. }
  219. void TranslatedValue::Handlify() {
  220. - if (kind() == kTagged) {
  221. - set_initialized_storage(Handle<Object>(raw_literal(), isolate()));
  222. + if (kind() == kTagged && raw_literal().IsHeapObject()) {
  223. + set_initialized_storage(
  224. + Handle<HeapObject>(HeapObject::cast(raw_literal()), isolate()));
  225. raw_literal_ = Object();
  226. }
  227. }
  228. @@ -3323,7 +3311,7 @@ TranslatedValue* TranslatedState::GetValueByObjectIndex(int object_index) {
  229. return &(frames_[pos.frame_index_].values_[pos.value_index_]);
  230. }
  231. -Handle<Object> TranslatedState::InitializeObjectAt(TranslatedValue* slot) {
  232. +Handle<HeapObject> TranslatedState::InitializeObjectAt(TranslatedValue* slot) {
  233. slot = ResolveCapturedObject(slot);
  234. DisallowHeapAllocation no_allocation;
  235. @@ -3338,7 +3326,7 @@ Handle<Object> TranslatedState::InitializeObjectAt(TranslatedValue* slot) {
  236. InitializeCapturedObjectAt(index, &worklist, no_allocation);
  237. }
  238. }
  239. - return slot->GetStorage();
  240. + return slot->storage();
  241. }
  242. void TranslatedState::InitializeCapturedObjectAt(
  243. @@ -3438,11 +3426,17 @@ void TranslatedState::EnsureObjectAllocatedAt(TranslatedValue* slot) {
  244. }
  245. }
  246. +int TranslatedValue::GetSmiValue() const {
  247. + Object value = GetRawValue();
  248. + CHECK(value.IsSmi());
  249. + return Smi::cast(value).value();
  250. +}
  251. +
  252. void TranslatedState::MaterializeFixedDoubleArray(TranslatedFrame* frame,
  253. int* value_index,
  254. TranslatedValue* slot,
  255. Handle<Map> map) {
  256. - int length = Smi::cast(frame->values_[*value_index].GetRawValue()).value();
  257. + int length = frame->values_[*value_index].GetSmiValue();
  258. (*value_index)++;
  259. Handle<FixedDoubleArray> array = Handle<FixedDoubleArray>::cast(
  260. isolate()->factory()->NewFixedDoubleArray(length));
  261. @@ -3476,10 +3470,10 @@ void TranslatedState::MaterializeHeapNumber(TranslatedFrame* frame,
  262. namespace {
  263. -enum DoubleStorageKind : uint8_t {
  264. +enum StorageKind : uint8_t {
  265. kStoreTagged,
  266. kStoreUnboxedDouble,
  267. - kStoreMutableHeapNumber,
  268. + kStoreHeapObject
  269. };
  270. } // namespace
  271. @@ -3551,9 +3545,7 @@ void TranslatedState::EnsureCapturedObjectAllocatedAt(
  272. case SIMPLE_NUMBER_DICTIONARY_TYPE:
  273. case STRING_TABLE_TYPE: {
  274. // Check we have the right size.
  275. - int array_length =
  276. - Smi::cast(frame->values_[value_index].GetRawValue()).value();
  277. -
  278. + int array_length = frame->values_[value_index].GetSmiValue();
  279. int instance_size = FixedArray::SizeFor(array_length);
  280. CHECK_EQ(instance_size, slot->GetChildrenCount() * kTaggedSize);
  281. @@ -3572,13 +3564,13 @@ void TranslatedState::EnsureCapturedObjectAllocatedAt(
  282. case PROPERTY_ARRAY_TYPE: {
  283. // Check we have the right size.
  284. - int length_or_hash =
  285. - Smi::cast(frame->values_[value_index].GetRawValue()).value();
  286. + int length_or_hash = frame->values_[value_index].GetSmiValue();
  287. int array_length = PropertyArray::LengthField::decode(length_or_hash);
  288. int instance_size = PropertyArray::SizeFor(array_length);
  289. CHECK_EQ(instance_size, slot->GetChildrenCount() * kTaggedSize);
  290. slot->set_storage(AllocateStorageFor(slot));
  291. +
  292. // Make sure all the remaining children (after the map) are allocated.
  293. return EnsureChildrenAllocated(slot->GetChildrenCount() - 1, frame,
  294. &value_index, worklist);
  295. @@ -3623,7 +3615,7 @@ void TranslatedState::EnsureChildrenAllocated(int count, TranslatedFrame* frame,
  296. } else {
  297. // Make sure the simple values (heap numbers, etc.) are properly
  298. // initialized.
  299. - child_slot->MaterializeSimple();
  300. + child_slot->GetValue();
  301. }
  302. SkipSlots(1, frame, value_index);
  303. }
  304. @@ -3638,17 +3630,18 @@ void TranslatedState::EnsurePropertiesAllocatedAndMarked(
  305. properties_slot->mark_allocated();
  306. properties_slot->set_storage(object_storage);
  307. - // Set markers for the double properties.
  308. + // Set markers for out-of-object properties.
  309. Handle<DescriptorArray> descriptors(map->instance_descriptors(), isolate());
  310. int field_count = map->NumberOfOwnDescriptors();
  311. for (int i = 0; i < field_count; i++) {
  312. FieldIndex index = FieldIndex::ForDescriptor(*map, i);
  313. - if (descriptors->GetDetails(i).representation().IsDouble() &&
  314. - !index.is_inobject()) {
  315. + Representation representation = descriptors->GetDetails(i).representation();
  316. + if (!index.is_inobject() &&
  317. + (representation.IsDouble() || representation.IsHeapObject())) {
  318. CHECK(!map->IsUnboxedDoubleField(index));
  319. int outobject_index = index.outobject_array_index();
  320. int array_index = outobject_index * kTaggedSize;
  321. - object_storage->set(array_index, kStoreMutableHeapNumber);
  322. + object_storage->set(array_index, kStoreHeapObject);
  323. }
  324. }
  325. }
  326. @@ -3678,28 +3671,41 @@ void TranslatedState::EnsureJSObjectAllocated(TranslatedValue* slot,
  327. // Set markers for the double properties.
  328. for (int i = 0; i < field_count; i++) {
  329. FieldIndex index = FieldIndex::ForDescriptor(*map, i);
  330. - if (descriptors->GetDetails(i).representation().IsDouble() &&
  331. - index.is_inobject()) {
  332. + Representation representation = descriptors->GetDetails(i).representation();
  333. + if (index.is_inobject() &&
  334. + (representation.IsDouble() || representation.IsHeapObject())) {
  335. CHECK_GE(index.index(), FixedArray::kHeaderSize / kTaggedSize);
  336. int array_index = index.index() * kTaggedSize - FixedArray::kHeaderSize;
  337. - uint8_t marker = map->IsUnboxedDoubleField(index)
  338. - ? kStoreUnboxedDouble
  339. - : kStoreMutableHeapNumber;
  340. + uint8_t marker = map->IsUnboxedDoubleField(index) ? kStoreUnboxedDouble
  341. + : kStoreHeapObject;
  342. object_storage->set(array_index, marker);
  343. }
  344. }
  345. slot->set_storage(object_storage);
  346. }
  347. -Handle<Object> TranslatedState::GetValueAndAdvance(TranslatedFrame* frame,
  348. - int* value_index) {
  349. - TranslatedValue* slot = frame->ValueAt(*value_index);
  350. - SkipSlots(1, frame, value_index);
  351. +TranslatedValue* TranslatedState::GetResolvedSlot(TranslatedFrame* frame,
  352. + int value_index) {
  353. + TranslatedValue* slot = frame->ValueAt(value_index);
  354. if (slot->kind() == TranslatedValue::kDuplicatedObject) {
  355. slot = ResolveCapturedObject(slot);
  356. }
  357. - CHECK_NE(TranslatedValue::kUninitialized, slot->materialization_state());
  358. - return slot->GetStorage();
  359. + CHECK_NE(slot->materialization_state(), TranslatedValue::kUninitialized);
  360. + return slot;
  361. +}
  362. +
  363. +TranslatedValue* TranslatedState::GetResolvedSlotAndAdvance(
  364. + TranslatedFrame* frame, int* value_index) {
  365. + TranslatedValue* slot = GetResolvedSlot(frame, *value_index);
  366. + SkipSlots(1, frame, value_index);
  367. + return slot;
  368. +}
  369. +
  370. +Handle<Object> TranslatedState::GetValueAndAdvance(TranslatedFrame* frame,
  371. + int* value_index) {
  372. + TranslatedValue* slot = GetResolvedSlot(frame, *value_index);
  373. + SkipSlots(1, frame, value_index);
  374. + return slot->GetValue();
  375. }
  376. void TranslatedState::InitializeJSObjectAt(
  377. @@ -3728,29 +3734,25 @@ void TranslatedState::InitializeJSObjectAt(
  378. // marker to see if we store an unboxed double.
  379. DCHECK_EQ(kTaggedSize, JSObject::kPropertiesOrHashOffset);
  380. for (int i = 2; i < slot->GetChildrenCount(); i++) {
  381. - // Initialize and extract the value from its slot.
  382. - Handle<Object> field_value = GetValueAndAdvance(frame, value_index);
  383. -
  384. + TranslatedValue* slot = GetResolvedSlotAndAdvance(frame, value_index);
  385. // Read out the marker and ensure the field is consistent with
  386. // what the markers in the storage say (note that all heap numbers
  387. // should be fully initialized by now).
  388. int offset = i * kTaggedSize;
  389. uint8_t marker = object_storage->ReadField<uint8_t>(offset);
  390. if (marker == kStoreUnboxedDouble) {
  391. - double double_field_value;
  392. - if (field_value->IsSmi()) {
  393. - double_field_value = Smi::cast(*field_value).value();
  394. - } else {
  395. - CHECK(field_value->IsHeapNumber());
  396. - double_field_value = HeapNumber::cast(*field_value).value();
  397. - }
  398. - object_storage->WriteField<double>(offset, double_field_value);
  399. - } else if (marker == kStoreMutableHeapNumber) {
  400. + Handle<HeapObject> field_value = slot->storage();
  401. CHECK(field_value->IsHeapNumber());
  402. + object_storage->WriteField<double>(offset, field_value->Number());
  403. + } else if (marker == kStoreHeapObject) {
  404. + Handle<HeapObject> field_value = slot->storage();
  405. WRITE_FIELD(*object_storage, offset, *field_value);
  406. WRITE_BARRIER(*object_storage, offset, *field_value);
  407. } else {
  408. CHECK_EQ(kStoreTagged, marker);
  409. + Handle<Object> field_value = slot->GetValue();
  410. + DCHECK_IMPLIES(field_value->IsHeapNumber(),
  411. + !IsSmiDouble(field_value->Number()));
  412. WRITE_FIELD(*object_storage, offset, *field_value);
  413. WRITE_BARRIER(*object_storage, offset, *field_value);
  414. }
  415. @@ -3777,15 +3779,18 @@ void TranslatedState::InitializeObjectWithTaggedFieldsAt(
  416. // Write the fields to the object.
  417. for (int i = 1; i < slot->GetChildrenCount(); i++) {
  418. - Handle<Object> field_value = GetValueAndAdvance(frame, value_index);
  419. + TranslatedValue* slot = GetResolvedSlotAndAdvance(frame, value_index);
  420. int offset = i * kTaggedSize;
  421. uint8_t marker = object_storage->ReadField<uint8_t>(offset);
  422. - if (i > 1 && marker == kStoreMutableHeapNumber) {
  423. - CHECK(field_value->IsHeapNumber());
  424. + Handle<Object> field_value;
  425. + if (i > 1 && marker == kStoreHeapObject) {
  426. + field_value = slot->storage();
  427. } else {
  428. CHECK(marker == kStoreTagged || i == 1);
  429. + field_value = slot->GetValue();
  430. + DCHECK_IMPLIES(field_value->IsHeapNumber(),
  431. + !IsSmiDouble(field_value->Number()));
  432. }
  433. -
  434. WRITE_FIELD(*object_storage, offset, *field_value);
  435. WRITE_BARRIER(*object_storage, offset, *field_value);
  436. }
  437. @@ -3852,10 +3857,7 @@ TranslatedFrame* TranslatedState::GetArgumentsInfoFromJSFrameIndex(
  438. // argument (the receiver).
  439. static constexpr int kTheContext = 1;
  440. const int height = frames_[i].height() + kTheContext;
  441. - Object argc_object = frames_[i].ValueAt(height - 1)->GetRawValue();
  442. - CHECK(argc_object.IsSmi());
  443. - *args_count = Smi::ToInt(argc_object);
  444. -
  445. + *args_count = frames_[i].ValueAt(height - 1)->GetSmiValue();
  446. DCHECK_EQ(*args_count, 1);
  447. } else {
  448. *args_count = InternalFormalParameterCountWithReceiver(
  449. @@ -3897,21 +3899,30 @@ void TranslatedState::StoreMaterializedValuesAndDeopt(JavaScriptFrame* frame) {
  450. CHECK(value_info->IsMaterializedObject());
  451. - // Skip duplicate objects (i.e., those that point to some
  452. - // other object id).
  453. + // Skip duplicate objects (i.e., those that point to some other object id).
  454. if (value_info->object_index() != i) continue;
  455. + Handle<Object> previous_value(previously_materialized_objects->get(i),
  456. + isolate_);
  457. Handle<Object> value(value_info->GetRawValue(), isolate_);
  458. - if (!value.is_identical_to(marker)) {
  459. - if (previously_materialized_objects->get(i) == *marker) {
  460. + if (value.is_identical_to(marker)) {
  461. + DCHECK_EQ(*previous_value, *marker);
  462. + } else {
  463. + if (*previous_value == *marker) {
  464. + if (value->IsSmi()) {
  465. + value = isolate()->factory()->NewHeapNumber(value->Number());
  466. + }
  467. previously_materialized_objects->set(i, *value);
  468. value_changed = true;
  469. } else {
  470. - CHECK(previously_materialized_objects->get(i) == *value);
  471. + CHECK(*previous_value == *value ||
  472. + (previous_value->IsHeapNumber() && value->IsSmi() &&
  473. + previous_value->Number() == value->Number()));
  474. }
  475. }
  476. }
  477. +
  478. if (new_store && value_changed) {
  479. materialized_store->Set(stack_frame_pointer_,
  480. previously_materialized_objects);
  481. @@ -3945,8 +3956,10 @@ void TranslatedState::UpdateFromPreviouslyMaterializedObjects() {
  482. CHECK(value_info->IsMaterializedObject());
  483. if (value_info->kind() == TranslatedValue::kCapturedObject) {
  484. - value_info->set_initialized_storage(
  485. - Handle<Object>(previously_materialized_objects->get(i), isolate_));
  486. + Handle<Object> object(previously_materialized_objects->get(i),
  487. + isolate_);
  488. + CHECK(object->IsHeapObject());
  489. + value_info->set_initialized_storage(Handle<HeapObject>::cast(object));
  490. }
  491. }
  492. }
  493. @@ -3960,7 +3973,7 @@ void TranslatedState::VerifyMaterializedObjects() {
  494. if (slot->kind() == TranslatedValue::kCapturedObject) {
  495. CHECK_EQ(slot, GetValueByObjectIndex(slot->object_index()));
  496. if (slot->materialization_state() == TranslatedValue::kFinished) {
  497. - slot->GetStorage()->ObjectVerify(isolate());
  498. + slot->storage()->ObjectVerify(isolate());
  499. } else {
  500. CHECK_EQ(slot->materialization_state(),
  501. TranslatedValue::kUninitialized);
  502. diff --git a/src/deoptimizer/deoptimizer.h b/src/deoptimizer/deoptimizer.h
  503. index 6d0a350aaceb59fa6486d41566ad22ee3fbe1bdd..3d8155616cfa4730c1a8665f3180856eaf7f9133 100644
  504. --- a/src/deoptimizer/deoptimizer.h
  505. +++ b/src/deoptimizer/deoptimizer.h
  506. @@ -39,13 +39,17 @@ enum class BuiltinContinuationMode;
  507. class TranslatedValue {
  508. public:
  509. - // Allocation-less getter of the value.
  510. + // Allocation-free getter of the value.
  511. // Returns ReadOnlyRoots::arguments_marker() if allocation would be necessary
  512. - // to get the value.
  513. + // to get the value. In the case of numbers, returns a Smi if possible.
  514. Object GetRawValue() const;
  515. - // Getter for the value, takes care of materializing the subgraph
  516. - // reachable from this value.
  517. + // Convenience wrapper around GetRawValue (checked).
  518. + int GetSmiValue() const;
  519. +
  520. + // Returns the value, possibly materializing it first (and the whole subgraph
  521. + // reachable from this value). In the case of numbers, returns a Smi if
  522. + // possible.
  523. Handle<Object> GetValue();
  524. bool IsMaterializedObject() const;
  525. @@ -102,15 +106,14 @@ class TranslatedValue {
  526. static TranslatedValue NewInvalid(TranslatedState* container);
  527. Isolate* isolate() const;
  528. - void MaterializeSimple();
  529. void set_storage(Handle<HeapObject> storage) { storage_ = storage; }
  530. - void set_initialized_storage(Handle<Object> storage);
  531. + void set_initialized_storage(Handle<HeapObject> storage);
  532. void mark_finished() { materialization_state_ = kFinished; }
  533. void mark_allocated() { materialization_state_ = kAllocated; }
  534. - Handle<Object> GetStorage() {
  535. - DCHECK_NE(kUninitialized, materialization_state());
  536. + Handle<HeapObject> storage() {
  537. + DCHECK_NE(materialization_state(), kUninitialized);
  538. return storage_;
  539. }
  540. @@ -120,9 +123,9 @@ class TranslatedValue {
  541. // objects and constructing handles (to get
  542. // to the isolate).
  543. - Handle<Object> storage_; // Contains the materialized value or the
  544. - // byte-array that will be later morphed into
  545. - // the materialized object.
  546. + Handle<HeapObject> storage_; // Contains the materialized value or the
  547. + // byte-array that will be later morphed into
  548. + // the materialized object.
  549. struct MaterializedObjectInfo {
  550. int id_;
  551. @@ -376,7 +379,7 @@ class TranslatedState {
  552. int* value_index, std::stack<int>* worklist);
  553. void EnsureCapturedObjectAllocatedAt(int object_index,
  554. std::stack<int>* worklist);
  555. - Handle<Object> InitializeObjectAt(TranslatedValue* slot);
  556. + Handle<HeapObject> InitializeObjectAt(TranslatedValue* slot);
  557. void InitializeCapturedObjectAt(int object_index, std::stack<int>* worklist,
  558. const DisallowHeapAllocation& no_allocation);
  559. void InitializeJSObjectAt(TranslatedFrame* frame, int* value_index,
  560. @@ -392,6 +395,9 @@ class TranslatedState {
  561. TranslatedValue* ResolveCapturedObject(TranslatedValue* slot);
  562. TranslatedValue* GetValueByObjectIndex(int object_index);
  563. Handle<Object> GetValueAndAdvance(TranslatedFrame* frame, int* value_index);
  564. + TranslatedValue* GetResolvedSlot(TranslatedFrame* frame, int value_index);
  565. + TranslatedValue* GetResolvedSlotAndAdvance(TranslatedFrame* frame,
  566. + int* value_index);
  567. static uint32_t GetUInt32Slot(Address fp, int slot_index);
  568. static uint64_t GetUInt64Slot(Address fp, int slot_index);
  569. @@ -772,7 +778,7 @@ class FrameDescription {
  570. intptr_t continuation_;
  571. // This must be at the end of the object as the object is allocated larger
  572. - // than it's definition indicate to extend this array.
  573. + // than its definition indicates to extend this array.
  574. intptr_t frame_content_[1];
  575. intptr_t* GetFrameSlotPointer(unsigned offset) {
  576. diff --git a/test/mjsunit/compiler/regress-1084820.js b/test/mjsunit/compiler/regress-1084820.js
  577. new file mode 100644
  578. index 0000000000000000000000000000000000000000..beb168b413ff045c5aff8e68d2e6da32b27800d6
  579. --- /dev/null
  580. +++ b/test/mjsunit/compiler/regress-1084820.js
  581. @@ -0,0 +1,27 @@
  582. +// Copyright 2020 the V8 project authors. All rights reserved.
  583. +// Use of this source code is governed by a BSD-style license that can be
  584. +// found in the LICENSE file.
  585. +
  586. +// Flags: --allow-natives-syntax
  587. +
  588. +// Create a map where 'my_property' has HeapObject representation.
  589. +const dummy_obj = {};
  590. +dummy_obj.my_property = 'some HeapObject';
  591. +dummy_obj.my_property = 'some other HeapObject';
  592. +
  593. +function gaga() {
  594. + const obj = {};
  595. + // Store a HeapNumber and then a Smi.
  596. + // This must happen in a loop, even if it's only 2 iterations:
  597. + for (let j = -3_000_000_000; j <= -1_000_000_000; j += 2_000_000_000) {
  598. + obj.my_property = j;
  599. + }
  600. + // Trigger (soft) deopt.
  601. + if (!%IsBeingInterpreted()) obj + obj;
  602. +}
  603. +
  604. +%PrepareFunctionForOptimization(gaga);
  605. +gaga();
  606. +gaga();
  607. +%OptimizeFunctionOnNextCall(gaga);
  608. +gaga();
  609. diff --git a/test/mjsunit/compiler/regress-1092650.js b/test/mjsunit/compiler/regress-1092650.js
  610. new file mode 100644
  611. index 0000000000000000000000000000000000000000..ba94375aeb8262536e28d5d409d69115e385c3b3
  612. --- /dev/null
  613. +++ b/test/mjsunit/compiler/regress-1092650.js
  614. @@ -0,0 +1,23 @@
  615. +// Copyright 2020 the V8 project authors. All rights reserved.
  616. +// Use of this source code is governed by a BSD-style license that can be
  617. +// found in the LICENSE file.
  618. +
  619. +// Flags: --allow-natives-syntax
  620. +
  621. +// Create map with HeapNumber in field 'a'
  622. +({a: 2**30});
  623. +
  624. +function foo() {
  625. + return foo.arguments[0];
  626. +}
  627. +
  628. +function main() {
  629. + foo({a: 42});
  630. +}
  631. +
  632. +%PrepareFunctionForOptimization(foo);
  633. +%PrepareFunctionForOptimization(main);
  634. +main();
  635. +main();
  636. +%OptimizeFunctionOnNextCall(main);
  637. +main();
  638. diff --git a/test/mjsunit/compiler/regress-1094132.js b/test/mjsunit/compiler/regress-1094132.js
  639. new file mode 100644
  640. index 0000000000000000000000000000000000000000..418637d86f8c363b9c0c41c450914e758ff73e9c
  641. --- /dev/null
  642. +++ b/test/mjsunit/compiler/regress-1094132.js
  643. @@ -0,0 +1,78 @@
  644. +// Copyright 2020 the V8 project authors. All rights reserved.
  645. +// Use of this source code is governed by a BSD-style license that can be
  646. +// found in the LICENSE file.
  647. +
  648. +// Flags: --allow-natives-syntax
  649. +
  650. +function prettyPrinted() {}
  651. +
  652. +function formatFailureText() {
  653. + if (expectedText.length <= 40 && foundText.length <= 40) {
  654. + message += ": expected <" + expectedText + "> found <" + foundText + ">";
  655. + message += ":\nexpected:\n" + expectedText + "\nfound:\n" + foundText;
  656. + }
  657. +}
  658. +
  659. +function fail(expectedText, found, name_opt) {
  660. + formatFailureText(expectedText, found, name_opt);
  661. + if (!a[aProps[i]][aProps[i]]) { }
  662. +}
  663. +
  664. +function deepEquals(a, b) {
  665. + if (a === 0) return 1 / a === 1 / b;
  666. + if (typeof a !== typeof a) return false;
  667. + if (typeof a !== "object" && typeof a !== "function") return false;
  668. + if (objectClass !== classOf()) return false;
  669. + if (objectClass === "RegExp") { }
  670. +}
  671. +
  672. +function assertEquals() {
  673. + if (!deepEquals()) {
  674. + fail(prettyPrinted(), undefined, undefined);
  675. + }
  676. +}
  677. +
  678. +({y: {}, x: 0.42});
  679. +
  680. +function gaga() {
  681. + return {gx: bar.arguments[0], hx: baz.arguments[0]};
  682. +}
  683. +
  684. +function baz() {
  685. + return gaga();
  686. +}
  687. +
  688. +function bar(obj) {
  689. + return baz(obj.y);
  690. +}
  691. +
  692. +function foo() {
  693. + bar({y: {}, x: 42});
  694. + try { assertEquals() } catch (e) {}
  695. + try { assertEquals() } catch (e) {}
  696. + assertEquals();
  697. +}
  698. +
  699. +%PrepareFunctionForOptimization(prettyPrinted);
  700. +%PrepareFunctionForOptimization(formatFailureText);
  701. +%PrepareFunctionForOptimization(fail);
  702. +%PrepareFunctionForOptimization(deepEquals);
  703. +%PrepareFunctionForOptimization(assertEquals);
  704. +%PrepareFunctionForOptimization(gaga);
  705. +%PrepareFunctionForOptimization(baz);
  706. +%PrepareFunctionForOptimization(bar);
  707. +%PrepareFunctionForOptimization(foo);
  708. +try { foo() } catch (e) {}
  709. +%OptimizeFunctionOnNextCall(foo);
  710. +try { foo() } catch (e) {}
  711. +%PrepareFunctionForOptimization(prettyPrinted);
  712. +%PrepareFunctionForOptimization(formatFailureText);
  713. +%PrepareFunctionForOptimization(fail);
  714. +%PrepareFunctionForOptimization(deepEquals);
  715. +%PrepareFunctionForOptimization(assertEquals);
  716. +%PrepareFunctionForOptimization(gaga);
  717. +%PrepareFunctionForOptimization(baz);
  718. +%PrepareFunctionForOptimization(bar);
  719. +%PrepareFunctionForOptimization(foo);
  720. +%OptimizeFunctionOnNextCall(foo);
  721. +try { foo() } catch (e) {}