aboutsummaryrefslogtreecommitdiffstats
path: root/mps/code
diff options
context:
space:
mode:
authorDavid Lovemore2012-09-03 17:05:51 +0100
committerDavid Lovemore2012-09-03 17:05:51 +0100
commit886e96ba8e0bc2fbda89ce7c8e417ff5c5df23a8 (patch)
treece26936b76bfa4f1638ea1678fd8718598d65710 /mps/code
parent274979e8cbb97f74cc448f5e6e8cdbcaf8d033f9 (diff)
downloademacs-886e96ba8e0bc2fbda89ce7c8e417ff5c5df23a8.tar.gz
emacs-886e96ba8e0bc2fbda89ce7c8e417ff5c5df23a8.zip
Fix weak band assert. we now trace weak segments as weak on a fault if we are in the weak band.
Copied from Perforce Change: 179210 ServerID: perforce.ravenbrook.com
Diffstat (limited to 'mps/code')
-rw-r--r--mps/code/mpm.h1
-rw-r--r--mps/code/pool.c10
-rw-r--r--mps/code/poolabs.c7
-rw-r--r--mps/code/trace.c89
4 files changed, 89 insertions, 18 deletions
diff --git a/mps/code/mpm.h b/mps/code/mpm.h
index 263cb795e5d..d9e379a3537 100644
--- a/mps/code/mpm.h
+++ b/mps/code/mpm.h
@@ -378,6 +378,7 @@ extern void TraceStart(Trace trace, double mortality,
378 double finishingTime); 378 double finishingTime);
379extern Size TracePoll(Globals globals); 379extern Size TracePoll(Globals globals);
380 380
381extern Rank TraceRankForAccess(Arena arena, Seg seg);
381extern void TraceSegAccess(Arena arena, Seg seg, AccessSet mode); 382extern void TraceSegAccess(Arena arena, Seg seg, AccessSet mode);
382extern Res TraceFix(ScanState ss, Ref *refIO); 383extern Res TraceFix(ScanState ss, Ref *refIO);
383extern Res TraceFixEmergency(ScanState ss, Ref *refIO); 384extern Res TraceFixEmergency(ScanState ss, Ref *refIO);
diff --git a/mps/code/pool.c b/mps/code/pool.c
index 55e70ff1462..ad96054f7f0 100644
--- a/mps/code/pool.c
+++ b/mps/code/pool.c
@@ -393,12 +393,10 @@ Res PoolScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)
393 /* The segment must belong to the pool. */ 393 /* The segment must belong to the pool. */
394 AVER(pool == SegPool(seg)); 394 AVER(pool == SegPool(seg));
395 395
396 /* We actually want to check that the rank we are scanning at */ 396 /* We check that either ss->rank is in the segment's
397 /* (ss->rank) is at least as big as all the ranks in */ 397 * ranks, or that ss->rank is exact. The check is more complicated if
398 /* the segment (SegRankSet(seg)). It is tricky to check that, */ 398 * we actually have multiple ranks in a seg.
399 /* so we only check that either ss->rank is in the segment's */ 399 * See <code/trace.c#scan.conservative> */
400 /* ranks, or that ss->rank is exact. */
401 /* See <code/trace.c#scan.conservative> */
402 AVER(ss->rank == RankEXACT || RankSetIsMember(SegRankSet(seg), ss->rank)); 400 AVER(ss->rank == RankEXACT || RankSetIsMember(SegRankSet(seg), ss->rank));
403 401
404 /* Should only scan segments which contain grey objects. */ 402 /* Should only scan segments which contain grey objects. */
diff --git a/mps/code/poolabs.c b/mps/code/poolabs.c
index 80b7f5323be..9834feb711d 100644
--- a/mps/code/poolabs.c
+++ b/mps/code/poolabs.c
@@ -413,9 +413,12 @@ Res PoolSingleAccess(Pool pool, Seg seg, Addr addr,
413 /* Check that the reference is aligned to a word boundary */ 413 /* Check that the reference is aligned to a word boundary */
414 /* (we assume it is not a reference otherwise). */ 414 /* (we assume it is not a reference otherwise). */
415 if(WordIsAligned((Word)ref, sizeof(Word))) { 415 if(WordIsAligned((Word)ref, sizeof(Word))) {
416 /* See the note in TraceSegAccess about using RankEXACT here */ 416 Rank rank;
417 /* See the note in TraceRankForAccess */
417 /* (<code/trace.c#scan.conservative>). */ 418 /* (<code/trace.c#scan.conservative>). */
418 TraceScanSingleRef(arena->flippedTraces, RankEXACT, arena, 419
420 rank = TraceRankForAccess(arena, seg);
421 TraceScanSingleRef(arena->flippedTraces, rank, arena,
419 seg, (Ref *)addr); 422 seg, (Ref *)addr);
420 } 423 }
421 } 424 }
diff --git a/mps/code/trace.c b/mps/code/trace.c
index 0f8d84ba3ac..3fddaec8d33 100644
--- a/mps/code/trace.c
+++ b/mps/code/trace.c
@@ -827,7 +827,75 @@ static void traceReclaim(Trace trace)
827 (void)TraceIdMessagesCreate(arena, trace->ti); 827 (void)TraceIdMessagesCreate(arena, trace->ti);
828} 828}
829 829
830/* TraceRankForAccess -- Returns rank to scan at if we hit a barrier.
831 *
832 * We assume a single trace as otherwise we need to implement rank
833 * filters on scanning.
834 *
835 * .scan.conservative: It's safe to scan at EXACT unless the band is
836 * WEAK and in that case the segment should be weak.
837 *
838 * If the trace band is EXACT then we scan EXACT. This might prevent
839 * finalisation messages and may preserve objects pointed to only by weak
840 * references but tough luck -- the mutator wants to look.
841 *
842 * If the trace band is FINAL and the segment is FINAL, we scan it FINAL.
843 * Any objects not yet preserved deserve to die, and we're only giving
844 * them a temporary reprieve. All the objects on the segment should be FINAL,
845 * otherwise they might get sent finalization messages.
846 *
847 * If the trace band is FINAL, and the segment is not FINAL, we scan at EXACT.
848 * This is safe to do for FINAL and WEAK references.
849 *
850 * If the trace band is WEAK then the segment must be weak only, and we
851 * scan at WEAK. All other segments for this trace should be scanned by now.
852 * We must scan at WEAK to avoid bringing any objects back to life.
853 *
854 * See the message <http://info.ravenbrook.com/mail/2012/08/30/16-46-42/0.txt>
855 * for a description of these semantics.
856 */
857Rank TraceRankForAccess(Arena arena, Seg seg)
858{
859 TraceSet ts;
860 Trace trace;
861 TraceId ti;
862 Rank band;
863 RankSet rankSet;
830 864
865 AVERT(Arena, arena);
866 AVERT(Seg, seg);
867
868 band = RankAMBIG; /* initialize band to avoid warning */
869 ts = arena->flippedTraces;
870 AVER(TraceSetIsSingle(ts));
871 TRACE_SET_ITER(ti, trace, ts, arena)
872 band = traceBand(trace);
873 TRACE_SET_ITER_END(ti, trace, ts, arena);
874 rankSet = SegRankSet(seg);
875 switch(band) {
876 case RankAMBIG:
877 NOTREACHED;
878 break;
879 case RankEXACT:
880 return RankEXACT;
881 case RankFINAL:
882 if(rankSet == RankSetSingle(RankFINAL)) {
883 return RankFINAL;
884 }
885 /* It's safe to scan at exact in the final band so do so if there are
886 * any non-final references. */
887 return RankEXACT;
888 case RankWEAK:
889 AVER(rankSet == RankSetSingle(RankWEAK));
890 return RankWEAK;
891 default:
892 NOTREACHED;
893 break;
894 }
895 NOTREACHED;
896 return RankEXACT;
897}
898
831/* traceFindGrey -- find a grey segment 899/* traceFindGrey -- find a grey segment
832 * 900 *
833 * This function finds the next segment to scan. It does this according 901 * This function finds the next segment to scan. It does this according
@@ -870,7 +938,10 @@ static void traceReclaim(Trace trace)
870 * whilst working in this band. That's what we check, although we 938 * whilst working in this band. That's what we check, although we
871 * expect to have to change the check if we introduce more ranks, or 939 * expect to have to change the check if we introduce more ranks, or
872 * start changing the semantics of them. A flag is used to implement 940 * start changing the semantics of them. A flag is used to implement
873 * this check. 941 * this check. See <http://info.ravenbrook.com/project/mps/issue/job001658/>.
942 *
943 * For further discussion on the semantics of rank based tracing see
944 * <http://info.ravenbrook.com/mail/2007/06/25/11-35-57/0.txt>
874 */ 945 */
875 946
876static Bool traceFindGrey(Seg *segReturn, Rank *rankReturn, 947static Bool traceFindGrey(Seg *segReturn, Rank *rankReturn,
@@ -1131,8 +1202,6 @@ static void traceScanSeg(TraceSet ts, Rank rank, Arena arena, Seg seg)
1131 1202
1132void TraceSegAccess(Arena arena, Seg seg, AccessSet mode) 1203void TraceSegAccess(Arena arena, Seg seg, AccessSet mode)
1133{ 1204{
1134 TraceId ti;
1135
1136 AVERT(Arena, arena); 1205 AVERT(Arena, arena);
1137 AVERT(Seg, seg); 1206 AVERT(Seg, seg);
1138 1207
@@ -1149,13 +1218,15 @@ void TraceSegAccess(Arena arena, Seg seg, AccessSet mode)
1149 EVENT_PPU(TraceAccess, arena, seg, mode); 1218 EVENT_PPU(TraceAccess, arena, seg, mode);
1150 1219
1151 if((mode & SegSM(seg) & AccessREAD) != 0) { /* read barrier? */ 1220 if((mode & SegSM(seg) & AccessREAD) != 0) { /* read barrier? */
1221 Trace trace;
1222 TraceId ti;
1223 Rank rank;
1224
1152 /* Pick set of traces to scan for: */ 1225 /* Pick set of traces to scan for: */
1153 TraceSet traces = arena->flippedTraces; 1226 TraceSet traces = arena->flippedTraces;
1154 1227
1155 /* .scan.conservative: At the moment we scan at RankEXACT. Really */ 1228 rank = TraceRankForAccess(arena, seg);
1156 /* we should be scanning at the "phase" of the trace, which is the */ 1229 traceScanSeg(traces, rank, arena, seg);
1157 /* minimum rank of all grey segments. (see request.mps.170160) */
1158 traceScanSeg(traces, RankEXACT, arena, seg);
1159 1230
1160 /* The pool should've done the job of removing the greyness that */ 1231 /* The pool should've done the job of removing the greyness that */
1161 /* was causing the segment to be protected, so that the mutator */ 1232 /* was causing the segment to be protected, so that the mutator */
@@ -1163,8 +1234,6 @@ void TraceSegAccess(Arena arena, Seg seg, AccessSet mode)
1163 AVER(TraceSetInter(SegGrey(seg), traces) == TraceSetEMPTY); 1234 AVER(TraceSetInter(SegGrey(seg), traces) == TraceSetEMPTY);
1164 1235
1165 STATISTIC_STAT({ 1236 STATISTIC_STAT({
1166 Trace trace;
1167
1168 TRACE_SET_ITER(ti, trace, traces, arena) 1237 TRACE_SET_ITER(ti, trace, traces, arena)
1169 ++trace->readBarrierHitCount; 1238 ++trace->readBarrierHitCount;
1170 TRACE_SET_ITER_END(ti, trace, traces, arena); 1239 TRACE_SET_ITER_END(ti, trace, traces, arena);