@@ -665,13 +665,19 @@ class StaticMarkingVisitor : public StaticVisitorBase {
665
665
}
666
666
667
667
// Only flush code for functions.
668
- if (shared_info->code ()->kind () != Code::FUNCTION) return false ;
668
+ if (shared_info->code ()->kind () != Code::FUNCTION) {
669
+ return false ;
670
+ }
669
671
670
672
// Function must be lazy compilable.
671
- if (!shared_info->allows_lazy_compilation ()) return false ;
673
+ if (!shared_info->allows_lazy_compilation ()) {
674
+ return false ;
675
+ }
672
676
673
677
// If this is a full script wrapped in a function we do no flush the code.
674
- if (shared_info->is_toplevel ()) return false ;
678
+ if (shared_info->is_toplevel ()) {
679
+ return false ;
680
+ }
675
681
676
682
// Age this shared function info.
677
683
if (shared_info->code_age () < kCodeAgeThreshold ) {
@@ -864,21 +870,7 @@ class StaticMarkingVisitor : public StaticVisitorBase {
864
870
collector->MarkObject (jsfunction->unchecked_shared ()->unchecked_code ());
865
871
866
872
if (jsfunction->unchecked_code ()->kind () == Code::OPTIMIZED_FUNCTION) {
867
- // For optimized functions we should retain both non-optimized version
868
- // of it's code and non-optimized version of all inlined functions.
869
- // This is required to support bailing out from inlined code.
870
- DeoptimizationInputData* data =
871
- reinterpret_cast <DeoptimizationInputData*>(
872
- jsfunction->unchecked_code ()->unchecked_deoptimization_data ());
873
-
874
- FixedArray* literals = data->UncheckedLiteralArray ();
875
-
876
- for (int i = 0 , count = data->InlinedFunctionCount ()->value ();
877
- i < count;
878
- i++) {
879
- JSFunction* inlined = reinterpret_cast <JSFunction*>(literals->get (i));
880
- collector->MarkObject (inlined->unchecked_shared ()->unchecked_code ());
881
- }
873
+ collector->MarkInlinedFunctionsCode (jsfunction->unchecked_code ());
882
874
}
883
875
}
884
876
@@ -994,9 +986,7 @@ class CodeMarkingVisitor : public ThreadVisitor {
994
986
: collector_(collector) {}
995
987
996
988
void VisitThread (Isolate* isolate, ThreadLocalTop* top) {
997
- for (StackFrameIterator it (isolate, top); !it.done (); it.Advance ()) {
998
- collector_->MarkObject (it.frame ()->unchecked_code ());
999
- }
989
+ collector_->PrepareThreadForCodeFlushing (isolate, top);
1000
990
}
1001
991
1002
992
private:
@@ -1027,6 +1017,42 @@ class SharedFunctionInfoMarkingVisitor : public ObjectVisitor {
1027
1017
};
1028
1018
1029
1019
1020
+ void MarkCompactCollector::MarkInlinedFunctionsCode (Code* code) {
1021
+ // For optimized functions we should retain both non-optimized version
1022
+ // of it's code and non-optimized version of all inlined functions.
1023
+ // This is required to support bailing out from inlined code.
1024
+ DeoptimizationInputData* data =
1025
+ reinterpret_cast <DeoptimizationInputData*>(
1026
+ code->unchecked_deoptimization_data ());
1027
+
1028
+ FixedArray* literals = data->UncheckedLiteralArray ();
1029
+
1030
+ for (int i = 0 , count = data->InlinedFunctionCount ()->value ();
1031
+ i < count;
1032
+ i++) {
1033
+ JSFunction* inlined = reinterpret_cast <JSFunction*>(literals->get (i));
1034
+ MarkObject (inlined->unchecked_shared ()->unchecked_code ());
1035
+ }
1036
+ }
1037
+
1038
+
1039
+ void MarkCompactCollector::PrepareThreadForCodeFlushing (Isolate* isolate,
1040
+ ThreadLocalTop* top) {
1041
+ for (StackFrameIterator it (isolate, top); !it.done (); it.Advance ()) {
1042
+ // Note: for the frame that has a pending lazy deoptimization
1043
+ // StackFrame::unchecked_code will return a non-optimized code object for
1044
+ // the outermost function and StackFrame::LookupCode will return
1045
+ // actual optimized code object.
1046
+ StackFrame* frame = it.frame ();
1047
+ Code* code = frame->unchecked_code ();
1048
+ MarkObject (code);
1049
+ if (frame->is_optimized ()) {
1050
+ MarkInlinedFunctionsCode (frame->LookupCode ());
1051
+ }
1052
+ }
1053
+ }
1054
+
1055
+
1030
1056
void MarkCompactCollector::PrepareForCodeFlushing () {
1031
1057
ASSERT (heap () == Isolate::Current ()->heap ());
1032
1058
@@ -1050,9 +1076,8 @@ void MarkCompactCollector::PrepareForCodeFlushing() {
1050
1076
1051
1077
// Make sure we are not referencing the code from the stack.
1052
1078
ASSERT (this == heap ()->mark_compact_collector ());
1053
- for (StackFrameIterator it; !it.done (); it.Advance ()) {
1054
- MarkObject (it.frame ()->unchecked_code ());
1055
- }
1079
+ PrepareThreadForCodeFlushing (heap ()->isolate (),
1080
+ heap ()->isolate ()->thread_local_top ());
1056
1081
1057
1082
// Iterate the archived stacks in all threads to check if
1058
1083
// the code is referenced.
0 commit comments