Skip to content
This repository was archived by the owner on Oct 15, 2020. It is now read-only.

Commit e34de77

Browse files
committed
meta: merge node/master into node-chakracore/master
Merge 8599465 as of 2017-12-23 This commit was automatically generated. For any problems, please contact jackhorton Reviewed-By: Taylor Woll <tawoll@ntdev.microsoft.com>
2 parents 86c1bc3 + 8599465 commit e34de77

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

63 files changed

+1422
-804
lines changed

COLLABORATOR_GUIDE.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -142,6 +142,7 @@ test should *fail* before the change, and *pass* after the change.
142142
All pull requests that modify executable code should be subjected to
143143
continuous integration tests on the
144144
[project CI server](https://ci.nodejs.org/).
145+
The pull request should have a CI status indicator if possible.
145146

146147
#### Useful CI Jobs
147148

common.gypi

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929

3030
# Reset this number to 0 on major V8 upgrades.
3131
# Increment by one for each non-official patch applied to deps/v8.
32-
'v8_embedder_string': '-node.4',
32+
'v8_embedder_string': '-node.5',
3333

3434
# Enable disassembler for `--print-code` v8 options
3535
'v8_enable_disassembler': 1,

deps/v8/src/builtins/arm/builtins-arm.cc

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -782,22 +782,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
782782
Runtime::kCompileOptimized_Concurrent);
783783

784784
{
785-
// Otherwise, the marker is InOptimizationQueue.
785+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
786+
// that an interrupt will eventually update the slot with optimized code.
786787
if (FLAG_debug_code) {
787788
__ cmp(
788789
optimized_code_entry,
789790
Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
790791
__ Assert(eq, kExpectedOptimizationSentinel);
791792
}
792-
// Checking whether the queued function is ready for install is
793-
// optional, since we come across interrupts and stack checks elsewhere.
794-
// However, not checking may delay installing ready functions, and
795-
// always checking would be quite expensive. A good compromise is to
796-
// first check against stack limit as a cue for an interrupt signal.
797-
__ LoadRoot(scratch2, Heap::kStackLimitRootIndex);
798-
__ cmp(sp, Operand(scratch2));
799-
__ b(hs, &fallthrough);
800-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
793+
__ jmp(&fallthrough);
801794
}
802795
}
803796

deps/v8/src/builtins/arm64/builtins-arm64.cc

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -788,22 +788,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
788788
Runtime::kCompileOptimized_Concurrent);
789789

790790
{
791-
// Otherwise, the marker is InOptimizationQueue.
791+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
792+
// that an interrupt will eventually update the slot with optimized code.
792793
if (FLAG_debug_code) {
793794
__ Cmp(
794795
optimized_code_entry,
795796
Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
796797
__ Assert(eq, kExpectedOptimizationSentinel);
797798
}
798-
799-
// Checking whether the queued function is ready for install is optional,
800-
// since we come across interrupts and stack checks elsewhere. However,
801-
// not checking may delay installing ready functions, and always checking
802-
// would be quite expensive. A good compromise is to first check against
803-
// stack limit as a cue for an interrupt signal.
804-
__ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
805-
__ B(hs, &fallthrough);
806-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
799+
__ B(&fallthrough);
807800
}
808801
}
809802

deps/v8/src/builtins/ia32/builtins-ia32.cc

Lines changed: 3 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -698,24 +698,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
698698
Runtime::kCompileOptimized_Concurrent);
699699

700700
{
701-
// Otherwise, the marker is InOptimizationQueue.
701+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
702+
// that an interrupt will eventually update the slot with optimized code.
702703
if (FLAG_debug_code) {
703704
__ cmp(
704705
optimized_code_entry,
705706
Immediate(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
706707
__ Assert(equal, kExpectedOptimizationSentinel);
707708
}
708-
709-
// Checking whether the queued function is ready for install is optional,
710-
// since we come across interrupts and stack checks elsewhere. However,
711-
// not checking may delay installing ready functions, and always checking
712-
// would be quite expensive. A good compromise is to first check against
713-
// stack limit as a cue for an interrupt signal.
714-
ExternalReference stack_limit =
715-
ExternalReference::address_of_stack_limit(masm->isolate());
716-
__ cmp(esp, Operand::StaticVariable(stack_limit));
717-
__ j(above_equal, &fallthrough);
718-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
709+
__ jmp(&fallthrough);
719710
}
720711
}
721712

deps/v8/src/builtins/mips/builtins-mips.cc

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -760,21 +760,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
760760
Runtime::kCompileOptimized_Concurrent);
761761

762762
{
763-
// Otherwise, the marker is InOptimizationQueue.
763+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
764+
// that an interrupt will eventually update the slot with optimized code.
764765
if (FLAG_debug_code) {
765766
__ Assert(
766767
eq, kExpectedOptimizationSentinel, optimized_code_entry,
767768
Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
768769
}
769-
770-
// Checking whether the queued function is ready for install is optional,
771-
// since we come across interrupts and stack checks elsewhere. However,
772-
// not checking may delay installing ready functions, and always checking
773-
// would be quite expensive. A good compromise is to first check against
774-
// stack limit as a cue for an interrupt signal.
775-
__ LoadRoot(at, Heap::kStackLimitRootIndex);
776-
__ Branch(&fallthrough, hs, sp, Operand(at));
777-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
770+
__ jmp(&fallthrough);
778771
}
779772
}
780773

deps/v8/src/builtins/mips64/builtins-mips64.cc

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -760,21 +760,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
760760
Runtime::kCompileOptimized_Concurrent);
761761

762762
{
763-
// Otherwise, the marker is InOptimizationQueue.
763+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
764+
// that an interrupt will eventually update the slot with optimized code.
764765
if (FLAG_debug_code) {
765766
__ Assert(
766767
eq, kExpectedOptimizationSentinel, optimized_code_entry,
767768
Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
768769
}
769-
770-
// Checking whether the queued function is ready for install is optional,
771-
// since we come across interrupts and stack checks elsewhere. However,
772-
// not checking may delay installing ready functions, and always checking
773-
// would be quite expensive. A good compromise is to first check against
774-
// stack limit as a cue for an interrupt signal.
775-
__ LoadRoot(t0, Heap::kStackLimitRootIndex);
776-
__ Branch(&fallthrough, hs, sp, Operand(t0));
777-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
770+
__ jmp(&fallthrough);
778771
}
779772
}
780773

deps/v8/src/builtins/ppc/builtins-ppc.cc

Lines changed: 3 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -780,23 +780,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
780780
Runtime::kCompileOptimized_Concurrent);
781781

782782
{
783-
// Otherwise, the marker is InOptimizationQueue.
783+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
784+
// that an interrupt will eventually update the slot with optimized code.
784785
if (FLAG_debug_code) {
785786
__ CmpSmiLiteral(
786787
optimized_code_entry,
787788
Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0);
788789
__ Assert(eq, kExpectedOptimizationSentinel);
789790
}
790-
791-
// Checking whether the queued function is ready for install is optional,
792-
// since we come across interrupts and stack checks elsewhere. However,
793-
// not checking may delay installing ready functions, and always checking
794-
// would be quite expensive. A good compromise is to first check against
795-
// stack limit as a cue for an interrupt signal.
796-
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
797-
__ cmpl(sp, ip);
798-
__ bge(&fallthrough);
799-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
791+
__ b(&fallthrough);
800792
}
801793
}
802794

deps/v8/src/builtins/s390/builtins-s390.cc

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -783,22 +783,15 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
783783
Runtime::kCompileOptimized_Concurrent);
784784

785785
{
786-
// Otherwise, the marker is InOptimizationQueue.
786+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
787+
// that an interrupt will eventually update the slot with optimized code.
787788
if (FLAG_debug_code) {
788789
__ CmpSmiLiteral(
789790
optimized_code_entry,
790791
Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0);
791792
__ Assert(eq, kExpectedOptimizationSentinel);
792793
}
793-
794-
// Checking whether the queued function is ready for install is optional,
795-
// since we come across interrupts and stack checks elsewhere. However,
796-
// not checking may delay installing ready functions, and always checking
797-
// would be quite expensive. A good compromise is to first check against
798-
// stack limit as a cue for an interrupt signal.
799-
__ CmpLogicalP(sp, RootMemOperand(Heap::kStackLimitRootIndex));
800-
__ bge(&fallthrough, Label::kNear);
801-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
794+
__ b(&fallthrough, Label::kNear);
802795
}
803796
}
804797

deps/v8/src/builtins/x64/builtins-x64.cc

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -781,21 +781,14 @@ static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
781781
Runtime::kCompileOptimized_Concurrent);
782782

783783
{
784-
// Otherwise, the marker is InOptimizationQueue.
784+
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
785+
// that an interrupt will eventually update the slot with optimized code.
785786
if (FLAG_debug_code) {
786787
__ SmiCompare(optimized_code_entry,
787788
Smi::FromEnum(OptimizationMarker::kInOptimizationQueue));
788789
__ Assert(equal, kExpectedOptimizationSentinel);
789790
}
790-
791-
// Checking whether the queued function is ready for install is optional,
792-
// since we come across interrupts and stack checks elsewhere. However,
793-
// not checking may delay installing ready functions, and always checking
794-
// would be quite expensive. A good compromise is to first check against
795-
// stack limit as a cue for an interrupt signal.
796-
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
797-
__ j(above_equal, &fallthrough);
798-
GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
791+
__ jmp(&fallthrough);
799792
}
800793
}
801794

0 commit comments

Comments
 (0)