aboutsummaryrefslogtreecommitdiffstats
path: root/mps/code/trace.c
diff options
context:
space:
mode:
authorRichard Brooksby2012-09-06 17:17:18 +0100
committerRichard Brooksby2012-09-06 17:17:18 +0100
commit858e4ac0ac8ee684f48f0edd9d80ae28b17aee53 (patch)
tree5034519c869b370df2c87394c03f7f30e78945b9 /mps/code/trace.c
parent383335816d888b5f28fe7b034106dc2056f56620 (diff)
downloademacs-858e4ac0ac8ee684f48f0edd9d80ae28b17aee53.tar.gz
emacs-858e4ac0ac8ee684f48f0edd9d80ae28b17aee53.zip
Partial merge of branch/2012-07-23/cet-transform, excluding cet-specific parts.
Copied from Perforce Change: 179309 ServerID: perforce.ravenbrook.com
Diffstat (limited to 'mps/code/trace.c')
-rw-r--r--mps/code/trace.c291
1 files changed, 173 insertions, 118 deletions
diff --git a/mps/code/trace.c b/mps/code/trace.c
index 8ff9bf8bdf1..9771797337d 100644
--- a/mps/code/trace.c
+++ b/mps/code/trace.c
@@ -39,6 +39,7 @@ Bool ScanStateCheck(ScanState ss)
39 39
40 CHECKS(ScanState, ss); 40 CHECKS(ScanState, ss);
41 CHECKL(FUNCHECK(ss->fix)); 41 CHECKL(FUNCHECK(ss->fix));
42 /* Can't check ss->fixClosure. */
42 CHECKL(ss->zoneShift == ss->arena->zoneShift); 43 CHECKL(ss->zoneShift == ss->arena->zoneShift);
43 white = ZoneSetEMPTY; 44 white = ZoneSetEMPTY;
44 TRACE_SET_ITER(ti, trace, ss->traces, ss->arena) 45 TRACE_SET_ITER(ti, trace, ss->traces, ss->arena)
@@ -69,12 +70,28 @@ void ScanStateInit(ScanState ss, TraceSet ts, Arena arena,
69 AVER(RankCheck(rank)); 70 AVER(RankCheck(rank));
70 /* white is arbitrary and can't be checked */ 71 /* white is arbitrary and can't be checked */
71 72
72 ss->fix = TraceFix; 73 /* NOTE: We can only currently support scanning for a set of traces with
73 TRACE_SET_ITER(ti, trace, ts, arena) 74 the same fix method and closure. To remove this restriction,
74 if(trace->emergency) { 75 it would be necessary to dispatch to the fix methods of sets of traces
75 ss->fix = TraceFixEmergency; 76 in TraceFix. */
77 ss->fix = NULL;
78 ss->fixClosure = NULL;
79 TRACE_SET_ITER(ti, trace, ts, arena) {
80 if (ss->fix == NULL) {
81 ss->fix = trace->fix;
82 ss->fixClosure = trace->fixClosure;
83 } else {
84 AVER(ss->fix == trace->fix);
85 AVER(ss->fixClosure == trace->fixClosure);
76 } 86 }
77 TRACE_SET_ITER_END(ti, trace, ts, arena); 87 } TRACE_SET_ITER_END(ti, trace, ts, arena);
88 AVER(ss->fix != NULL);
89
90 /* If the fix method is the normal GC fix, then we optimise the test for
91 whether it's an emergency or not by updating the dispatch here, once. */
92 if (ss->fix == PoolFix && ArenaEmergency(arena))
93 ss->fix = PoolFixEmergency;
94
78 ss->rank = rank; 95 ss->rank = rank;
79 ss->traces = ts; 96 ss->traces = ts;
80 ss->zoneShift = arena->zoneShift; 97 ss->zoneShift = arena->zoneShift;
@@ -174,10 +191,11 @@ Bool TraceCheck(Trace trace)
174 if(trace->state == TraceFLIPPED) { 191 if(trace->state == TraceFLIPPED) {
175 CHECKL(RankCheck(trace->band)); 192 CHECKL(RankCheck(trace->band));
176 } 193 }
177 CHECKL(BoolCheck(trace->emergency));
178 if(trace->chain != NULL) { 194 if(trace->chain != NULL) {
179 CHECKU(Chain, trace->chain); 195 CHECKU(Chain, trace->chain);
180 } 196 }
197 CHECKL(FUNCHECK(trace->fix));
198 /* Can't check trace->fixClosure. */
181 199
182 /* @@@@ checks for counts missing */ 200 /* @@@@ checks for counts missing */
183 201
@@ -303,24 +321,6 @@ static void traceSetUpdateCounts(TraceSet ts, Arena arena, ScanState ss,
303} 321}
304 322
305 323
306/* traceSetSignalEmergency -- move a set of traces into emergency mode. */
307
308static void traceSetSignalEmergency(TraceSet ts, Arena arena)
309{
310 TraceId ti;
311 Trace trace;
312
313 DIAG_SINGLEF(( "traceSetSignalEmergency",
314 "traceSet: $B", (WriteFB)ts, NULL ));
315
316 TRACE_SET_ITER(ti, trace, ts, arena)
317 trace->emergency = TRUE;
318 TRACE_SET_ITER_END(ti, trace, ts, arena);
319
320 return;
321}
322
323
324/* traceSetWhiteUnion 324/* traceSetWhiteUnion
325 * 325 *
326 * Returns a ZoneSet describing the union of the white sets of all the 326 * Returns a ZoneSet describing the union of the white sets of all the
@@ -360,7 +360,7 @@ Res TraceAddWhite(Trace trace, Seg seg)
360 if(res != ResOK) 360 if(res != ResOK)
361 return res; 361 return res;
362 362
363 /* Add the segment to the approximation of the white set the */ 363 /* Add the segment to the approximation of the white set if the */
364 /* pool made it white. */ 364 /* pool made it white. */
365 if(TraceSetIsMember(SegWhite(seg), trace)) { 365 if(TraceSetIsMember(SegWhite(seg), trace)) {
366 trace->white = ZoneSetUnion(trace->white, ZoneSetOfSeg(trace->arena, seg)); 366 trace->white = ZoneSetUnion(trace->white, ZoneSetOfSeg(trace->arena, seg));
@@ -474,23 +474,23 @@ static Res traceScanRootRes(TraceSet ts, Rank rank, Arena arena, Root root)
474 474
475/* traceScanRoot 475/* traceScanRoot
476 * 476 *
477 * Scan a root without fail. The traces may enter emergency mode to 477 * Scan a root, entering emergency mode on allocation failure.
478 * ensure this. */ 478 */
479 479
480static void traceScanRoot(TraceSet ts, Rank rank, Arena arena, Root root) 480static Res traceScanRoot(TraceSet ts, Rank rank, Arena arena, Root root)
481{ 481{
482 Res res; 482 Res res;
483 483
484 res = traceScanRootRes(ts, rank, arena, root); 484 res = traceScanRootRes(ts, rank, arena, root);
485 if(res != ResOK) { 485
486 AVER(ResIsAllocFailure(res)); 486 if (ResIsAllocFailure(res)) {
487 traceSetSignalEmergency(ts, arena); 487 ArenaSetEmergency(arena, TRUE);
488 res = traceScanRootRes(ts, rank, arena, root); 488 res = traceScanRootRes(ts, rank, arena, root);
489 /* Should be OK in emergency mode */ 489 /* Should be OK in emergency mode */
490 AVER(!ResIsAllocFailure(res));
490 } 491 }
491 AVER(ResOK == res);
492 492
493 return; 493 return res;
494} 494}
495 495
496 496
@@ -505,6 +505,7 @@ struct rootFlipClosureStruct {
505static Res rootFlip(Root root, void *p) 505static Res rootFlip(Root root, void *p)
506{ 506{
507 struct rootFlipClosureStruct *rf = (struct rootFlipClosureStruct *)p; 507 struct rootFlipClosureStruct *rf = (struct rootFlipClosureStruct *)p;
508 Res res;
508 509
509 AVERT(Root, root); 510 AVERT(Root, root);
510 AVER(p != NULL); 511 AVER(p != NULL);
@@ -514,18 +515,47 @@ static Res rootFlip(Root root, void *p)
514 515
515 AVER(RootRank(root) <= RankEXACT); /* see .root.rank */ 516 AVER(RootRank(root) <= RankEXACT); /* see .root.rank */
516 517
517 if(RootRank(root) == rf->rank) 518 if(RootRank(root) == rf->rank) {
518 traceScanRoot(rf->ts, rf->rank, rf->arena, root); 519 res = traceScanRoot(rf->ts, rf->rank, rf->arena, root);
520 if (res != ResOK)
521 return res;
522 }
519 523
520 return ResOK; 524 return ResOK;
521} 525}
522 526
523static void traceFlip(Trace trace) 527
528/* traceFlip -- flip the mutator from grey to black w.r.t. a trace
529 *
530 * The main job of traceFlip is to scan references which can't be protected
531 * from the mutator, changing the colour of the mutator from grey to black
532 * with respect to a trace. The mutator threads are suspended while this
533 * is happening, and the mutator perceives and instantaneous change in all
534 * the references, enforced by the shield (barrier) system.
535 *
536 * NOTE: We don't have a way to shield the roots, so they are all scanned
537 * here. This is a coincidence. There is no particular reason that the
538 * roots have to be scanned at flip time. (The thread registers are unlikely
539 * ever to be protectable on stock hardware, however.)
540 *
541 * NOTE: Ambiguous references may only exist in roots, because we can't
542 * shield the exact roots and defer them for later scanning (after ambiguous
543 * heap references).
544 *
545 * NOTE: We don't support weak or final roots because we can't shield them
546 * and defer scanning until later. See above.
547 *
548 * If roots and segments were more similar, we could melt a lot of these
549 * problems.
550 */
551
552static Res traceFlip(Trace trace)
524{ 553{
525 Ring node, nextNode; 554 Ring node, nextNode;
526 Arena arena; 555 Arena arena;
527 Rank rank; 556 Rank rank;
528 struct rootFlipClosureStruct rfc; 557 struct rootFlipClosureStruct rfc;
558 Res res;
529 559
530 AVERT(Trace, trace); 560 AVERT(Trace, trace);
531 rfc.ts = TraceSetSingle(trace); 561 rfc.ts = TraceSetSingle(trace);
@@ -555,11 +585,10 @@ static void traceFlip(Trace trace)
555 /* higher ranking roots than data in pools. */ 585 /* higher ranking roots than data in pools. */
556 586
557 for(rank = RankAMBIG; rank <= RankEXACT; ++rank) { 587 for(rank = RankAMBIG; rank <= RankEXACT; ++rank) {
558 Res res;
559
560 rfc.rank = rank; 588 rfc.rank = rank;
561 res = RootsIterate(ArenaGlobals(arena), rootFlip, (void *)&rfc); 589 res = RootsIterate(ArenaGlobals(arena), rootFlip, (void *)&rfc);
562 AVER(res == ResOK); 590 if (res != ResOK)
591 goto failRootFlip;
563 } 592 }
564 593
565 /* .flip.alloc: Allocation needs to become black now. While we flip */ 594 /* .flip.alloc: Allocation needs to become black now. While we flip */
@@ -595,8 +624,11 @@ static void traceFlip(Trace trace)
595 EVENT2(TraceFlipEnd, trace, arena); 624 EVENT2(TraceFlipEnd, trace, arena);
596 625
597 ShieldResume(arena); 626 ShieldResume(arena);
627 return ResOK;
598 628
599 return; 629failRootFlip:
630 ShieldResume(arena);
631 return res;
600} 632}
601 633
602/* traceCopySizes -- preserve size information for later use 634/* traceCopySizes -- preserve size information for later use
@@ -668,7 +700,8 @@ found:
668 trace->ti = ti; 700 trace->ti = ti;
669 trace->state = TraceINIT; 701 trace->state = TraceINIT;
670 trace->band = RankAMBIG; /* Required to be the earliest rank. */ 702 trace->band = RankAMBIG; /* Required to be the earliest rank. */
671 trace->emergency = FALSE; 703 trace->fix = PoolFix;
704 trace->fixClosure = NULL;
672 trace->chain = NULL; 705 trace->chain = NULL;
673 STATISTIC(trace->preTraceArenaReserved = ArenaReserved(arena)); 706 STATISTIC(trace->preTraceArenaReserved = ArenaReserved(arena));
674 trace->condemned = (Size)0; /* nothing condemned yet */ 707 trace->condemned = (Size)0; /* nothing condemned yet */
@@ -908,7 +941,7 @@ Rank TraceRankForAccess(Arena arena, Seg seg)
908 * 941 *
909 * .check.ambig.not: RankAMBIG segments never appear on the grey ring. 942 * .check.ambig.not: RankAMBIG segments never appear on the grey ring.
910 * The current tracer cannot support ambiguous reference except as 943 * The current tracer cannot support ambiguous reference except as
911 * roots, so it's a buf if we ever find any. This behaviour is not set 944 * roots, so it's a bug if we ever find any. This behaviour is not set
912 * in stone, it's possible to imagine changing the tracer so that we can 945 * in stone, it's possible to imagine changing the tracer so that we can
913 * support ambiguous objects one day. For example, a fully conservative 946 * support ambiguous objects one day. For example, a fully conservative
914 * non-moving mode. 947 * non-moving mode.
@@ -1180,23 +1213,23 @@ static Res traceScanSegRes(TraceSet ts, Rank rank, Arena arena, Seg seg)
1180 1213
1181/* traceScanSeg 1214/* traceScanSeg
1182 * 1215 *
1183 * Scans a segment without fail. May put the traces into emergency mode 1216 * Scans a segment, switching to emergency mode if there is an allocation
1184 * to ensure this. */ 1217 * failure.
1218 */
1185 1219
1186static void traceScanSeg(TraceSet ts, Rank rank, Arena arena, Seg seg) 1220static Res traceScanSeg(TraceSet ts, Rank rank, Arena arena, Seg seg)
1187{ 1221{
1188 Res res; 1222 Res res;
1189 1223
1190 res = traceScanSegRes(ts, rank, arena, seg); 1224 res = traceScanSegRes(ts, rank, arena, seg);
1191 if(res != ResOK) { 1225 if(ResIsAllocFailure(res)) {
1192 AVER(ResIsAllocFailure(res)); 1226 ArenaSetEmergency(arena, TRUE);
1193 traceSetSignalEmergency(ts, arena);
1194 res = traceScanSegRes(ts, rank, arena, seg); 1227 res = traceScanSegRes(ts, rank, arena, seg);
1195 /* Should be OK in emergency mode. */ 1228 /* Should be OK in emergency mode. */
1229 AVER(!ResIsAllocFailure(res));
1196 } 1230 }
1197 AVER(ResOK == res);
1198 1231
1199 return; 1232 return res;
1200} 1233}
1201 1234
1202 1235
@@ -1204,6 +1237,8 @@ static void traceScanSeg(TraceSet ts, Rank rank, Arena arena, Seg seg)
1204 1237
1205void TraceSegAccess(Arena arena, Seg seg, AccessSet mode) 1238void TraceSegAccess(Arena arena, Seg seg, AccessSet mode)
1206{ 1239{
1240 Res res;
1241
1207 AVERT(Arena, arena); 1242 AVERT(Arena, arena);
1208 AVERT(Seg, seg); 1243 AVERT(Seg, seg);
1209 1244
@@ -1226,9 +1261,13 @@ void TraceSegAccess(Arena arena, Seg seg, AccessSet mode)
1226 1261
1227 /* Pick set of traces to scan for: */ 1262 /* Pick set of traces to scan for: */
1228 TraceSet traces = arena->flippedTraces; 1263 TraceSet traces = arena->flippedTraces;
1229
1230 rank = TraceRankForAccess(arena, seg); 1264 rank = TraceRankForAccess(arena, seg);
1231 traceScanSeg(traces, rank, arena, seg); 1265 res = traceScanSeg(traces, rank, arena, seg);
1266
1267 /* Allocation failures should be handled my emergency mode, and we don't
1268 expect any other kind of failure in a normal GC that causes access
1269 faults. */
1270 AVER(res == ResOK);
1232 1271
1233 /* The pool should've done the job of removing the greyness that */ 1272 /* The pool should've done the job of removing the greyness that */
1234 /* was causing the segment to be protected, so that the mutator */ 1273 /* was causing the segment to be protected, so that the mutator */
@@ -1254,20 +1293,31 @@ void TraceSegAccess(Arena arena, Seg seg, AccessSet mode)
1254} 1293}
1255 1294
1256 1295
1257/* TraceFix -- fix a reference */ 1296/* TraceFix2 -- second stage of fixing a reference
1297 *
1298 * TraceFix is on the critical path. A one-instruction difference in the
1299 * early parts of this code will have a significant impact on overall run
1300 * time. The priority is to eliminate irrelevant references early and fast
1301 * using the colour information stored in the tract table.
1302 */
1258 1303
1259Res TraceFix(ScanState ss, Ref *refIO) 1304static Res TraceFix2(ScanState ss, Ref *refIO)
1260{ 1305{
1261 Ref ref; 1306 Ref ref;
1262 Tract tract; 1307 Tract tract;
1263 Pool pool;
1264 1308
1309 /* Special AVER macros are used on the critical path. */
1265 /* See <design/trace/#fix.noaver> */ 1310 /* See <design/trace/#fix.noaver> */
1266 AVERT_CRITICAL(ScanState, ss); 1311 AVERT_CRITICAL(ScanState, ss);
1267 AVER_CRITICAL(refIO != NULL); 1312 AVER_CRITICAL(refIO != NULL);
1268 1313
1269 ref = *refIO; 1314 ref = *refIO;
1270 1315
1316 /* The zone test should already have been passed by MPS_FIX1 in mps.h. */
1317 AVER_CRITICAL(ZoneSetInter(ss->white,
1318 ZoneSetAdd(ss->arena, ZoneSetEMPTY, ref)) !=
1319 ZoneSetEMPTY);
1320
1271 STATISTIC(++ss->fixRefCount); 1321 STATISTIC(++ss->fixRefCount);
1272 EVENT4(TraceFix, ss, refIO, ref, ss->rank); 1322 EVENT4(TraceFix, ss, refIO, ref, ss->rank);
1273 1323
@@ -1277,17 +1327,18 @@ Res TraceFix(ScanState ss, Ref *refIO)
1277 Seg seg; 1327 Seg seg;
1278 if(TRACT_SEG(&seg, tract)) { 1328 if(TRACT_SEG(&seg, tract)) {
1279 Res res; 1329 Res res;
1330 Pool pool;
1280 STATISTIC(++ss->segRefCount); 1331 STATISTIC(++ss->segRefCount);
1281 STATISTIC(++ss->whiteSegRefCount); 1332 STATISTIC(++ss->whiteSegRefCount);
1282 EVENT1(TraceFixSeg, seg); 1333 EVENT1(TraceFixSeg, seg);
1283 EVENT0(TraceFixWhite); 1334 EVENT0(TraceFixWhite);
1284 pool = TractPool(tract); 1335 pool = TractPool(tract);
1285 /* Could move the rank switch here from the class-specific */ 1336 res = (*ss->fix)(pool, ss, seg, refIO);
1286 /* fix methods. */
1287 res = PoolFix(pool, ss, seg, refIO);
1288 if(res != ResOK) { 1337 if(res != ResOK) {
1289 /* Fix protocol (de facto): if Fix fails, ref must be unchanged */ 1338 /* PoolFixEmergency should never fail. */
1290 /* Justification for this restriction: 1339 AVER_CRITICAL(ss->fix != PoolFixEmergency);
1340 /* Fix protocol (de facto): if Fix fails, ref must be unchanged
1341 * Justification for this restriction:
1291 * A: it simplifies; 1342 * A: it simplifies;
1292 * B: it's reasonable (given what may cause Fix to fail); 1343 * B: it's reasonable (given what may cause Fix to fail);
1293 * C: the code (here) already assumes this: it returns without 1344 * C: the code (here) already assumes this: it returns without
@@ -1296,6 +1347,14 @@ Res TraceFix(ScanState ss, Ref *refIO)
1296 AVER(*refIO == ref); 1347 AVER(*refIO == ref);
1297 return res; 1348 return res;
1298 } 1349 }
1350 } else {
1351 /* Only tracts with segments ought to have been condemned. */
1352 /* SegOfAddr FALSE => a ref into a non-seg Tract (poolmv etc) */
1353 /* .notwhite: ...But it should NOT be white.
1354 * [I assert this both from logic, and from inspection of the
1355 * current condemn code. RHSK 2010-11-30]
1356 */
1357 NOTREACHED;
1299 } 1358 }
1300 } else { 1359 } else {
1301 /* Tract isn't white. Don't compute seg for non-statistical */ 1360 /* Tract isn't white. Don't compute seg for non-statistical */
@@ -1322,56 +1381,18 @@ Res TraceFix(ScanState ss, Ref *refIO)
1322} 1381}
1323 1382
1324 1383
1325/* TraceFixEmergency -- fix a reference in emergency mode */ 1384/* mps_fix2 -- external interface to TraceFix
1385 *
1386 * We rely on compiler inlining to make this equivalent to TraceFix, because
1387 * the name "TraceFix" is pervasive in the MPS. That's also why this
1388 * function is in trace.c and not mpsi.c.
1389 */
1326 1390
1327Res TraceFixEmergency(ScanState ss, Ref *refIO) 1391mps_res_t mps_fix2(mps_ss_t mps_ss, mps_addr_t *mps_ref_io)
1328{ 1392{
1329 Ref ref; 1393 ScanState ss = (ScanState)mps_ss;
1330 Tract tract; 1394 Ref *refIO = (Ref *)mps_ref_io;
1331 Pool pool; 1395 return TraceFix2(ss, refIO);
1332
1333 AVERT(ScanState, ss);
1334 AVER(refIO != NULL);
1335
1336 ref = *refIO;
1337
1338 STATISTIC(++ss->fixRefCount);
1339 EVENT4(TraceFix, ss, refIO, ref, ss->rank);
1340
1341 TRACT_OF_ADDR(&tract, ss->arena, ref);
1342 if(tract) {
1343 if(TraceSetInter(TractWhite(tract), ss->traces) != TraceSetEMPTY) {
1344 Seg seg;
1345 if(TRACT_SEG(&seg, tract)) {
1346 STATISTIC(++ss->segRefCount);
1347 STATISTIC(++ss->whiteSegRefCount);
1348 EVENT1(TraceFixSeg, seg);
1349 EVENT0(TraceFixWhite);
1350 pool = TractPool(tract);
1351 PoolFixEmergency(pool, ss, seg, refIO);
1352 }
1353 } else {
1354 /* Tract isn't white. Don't compute seg for non-statistical */
1355 /* variety. See <design/trace/#fix.tractofaddr> */
1356 STATISTIC_STAT
1357 ({
1358 Seg seg;
1359 if(TRACT_SEG(&seg, tract)) {
1360 ++ss->segRefCount;
1361 EVENT1(TraceFixSeg, seg);
1362 }
1363 });
1364 }
1365 } else {
1366 /* See <design/trace/#exact.legal> */
1367 AVER(ss->rank < RankEXACT ||
1368 !ArenaIsReservedAddr(ss->arena, ref));
1369 }
1370
1371 /* See <design/trace/#fix.fixed.all> */
1372 ss->fixedSummary = RefSetAdd(ss->arena, ss->fixedSummary, *refIO);
1373
1374 return ResOK;
1375} 1396}
1376 1397
1377 1398
@@ -1430,7 +1451,7 @@ void TraceScanSingleRef(TraceSet ts, Rank rank, Arena arena,
1430 1451
1431 res = traceScanSingleRefRes(ts, rank, arena, seg, refIO); 1452 res = traceScanSingleRefRes(ts, rank, arena, seg, refIO);
1432 if(res != ResOK) { 1453 if(res != ResOK) {
1433 traceSetSignalEmergency(ts, arena); 1454 ArenaSetEmergency(arena, TRUE);
1434 res = traceScanSingleRefRes(ts, rank, arena, seg, refIO); 1455 res = traceScanSingleRefRes(ts, rank, arena, seg, refIO);
1435 /* Ought to be OK in emergency mode now. */ 1456 /* Ought to be OK in emergency mode now. */
1436 } 1457 }
@@ -1652,7 +1673,18 @@ static void TraceStartGenDesc_diag(GenDesc desc, Bool top, Index i)
1652 } 1673 }
1653} 1674}
1654 1675
1655void TraceStart(Trace trace, double mortality, double finishingTime) 1676
1677/* TraceStart -- start a trace whose white set has been established
1678 *
1679 * The main job of TraceStart is to set up the grey list for a trace. The
1680 * trace is first created with TraceCreate, objects are whitened, then
1681 * TraceStart is called to initialise the tracing process.
1682 *
1683 * NOTE: At present, TraceStart also flips the mutator, so there is no
1684 * grey-mutator tracing.
1685 */
1686
1687Res TraceStart(Trace trace, double mortality, double finishingTime)
1656{ 1688{
1657 Arena arena; 1689 Arena arena;
1658 Res res; 1690 Res res;
@@ -1783,9 +1815,7 @@ void TraceStart(Trace trace, double mortality, double finishingTime)
1783 TracePostStartMessage(trace); 1815 TracePostStartMessage(trace);
1784 1816
1785 /* All traces must flip at beginning at the moment. */ 1817 /* All traces must flip at beginning at the moment. */
1786 traceFlip(trace); 1818 return traceFlip(trace);
1787
1788 return;
1789} 1819}
1790 1820
1791 1821
@@ -1801,6 +1831,7 @@ void TraceStart(Trace trace, double mortality, double finishingTime)
1801void TraceQuantum(Trace trace) 1831void TraceQuantum(Trace trace)
1802{ 1832{
1803 Size pollEnd; 1833 Size pollEnd;
1834 Arena arena = trace->arena;
1804 1835
1805 pollEnd = traceWorkClock(trace) + trace->rate; 1836 pollEnd = traceWorkClock(trace) + trace->rate;
1806 do { 1837 do {
@@ -1810,13 +1841,16 @@ void TraceQuantum(Trace trace)
1810 NOTREACHED; 1841 NOTREACHED;
1811 break; 1842 break;
1812 case TraceFLIPPED: { 1843 case TraceFLIPPED: {
1813 Arena arena = trace->arena;
1814 Seg seg; 1844 Seg seg;
1815 Rank rank; 1845 Rank rank;
1816 1846
1817 if(traceFindGrey(&seg, &rank, arena, trace->ti)) { 1847 if(traceFindGrey(&seg, &rank, arena, trace->ti)) {
1848 Res res;
1818 AVER((SegPool(seg)->class->attr & AttrSCAN) != 0); 1849 AVER((SegPool(seg)->class->attr & AttrSCAN) != 0);
1819 traceScanSeg(TraceSetSingle(trace), rank, arena, seg); 1850 res = traceScanSeg(TraceSetSingle(trace), rank, arena, seg);
1851 /* Allocation failures should be handled by emergency mode, and we
1852 don't expect any other error in a normal GC trace. */
1853 AVER(res == ResOK);
1820 } else { 1854 } else {
1821 trace->state = TraceRECLAIM; 1855 trace->state = TraceRECLAIM;
1822 } 1856 }
@@ -1830,7 +1864,7 @@ void TraceQuantum(Trace trace)
1830 break; 1864 break;
1831 } 1865 }
1832 } while(trace->state != TraceFINISHED 1866 } while(trace->state != TraceFINISHED
1833 && (trace->emergency || traceWorkClock(trace) < pollEnd)); 1867 && (ArenaEmergency(arena) || traceWorkClock(trace) < pollEnd));
1834} 1868}
1835 1869
1836/* TraceStartCollectAll: start a trace which condemns everything in 1870/* TraceStartCollectAll: start a trace which condemns everything in
@@ -1859,12 +1893,25 @@ Res TraceStartCollectAll(Trace *traceReturn, Arena arena, int why)
1859 /* Run out of time, should really try a smaller collection. @@@@ */ 1893 /* Run out of time, should really try a smaller collection. @@@@ */
1860 finishingTime = 0.0; 1894 finishingTime = 0.0;
1861 } 1895 }
1862 TraceStart(trace, TraceTopGenMortality, finishingTime); 1896 res = TraceStart(trace, TraceTopGenMortality, finishingTime);
1897 if (res != ResOK)
1898 goto failStart;
1863 *traceReturn = trace; 1899 *traceReturn = trace;
1864 return ResOK; 1900 return ResOK;
1865 1901
1902failStart:
1903 /* TODO: We can't back-out from a failed TraceStart that has
1904 already done some scanning, so this error path is somewhat bogus if it
1905 destroys the trace. In the current system, TraceStartCollectAll is
1906 only used for a normal GC, so TraceStart should not fail and this case
1907 should never be reached. There's a chance the mutator will survive
1908 if the assertion isn't hit, so drop through anyway. */
1909 NOTREACHED;
1866failCondemn: 1910failCondemn:
1867 TraceDestroy(trace); 1911 TraceDestroy(trace);
1912 /* We don't know how long it'll be before another collection. Make sure
1913 the next one starts in normal mode. */
1914 ArenaSetEmergency(arena, FALSE);
1868 return res; 1915 return res;
1869} 1916}
1870 1917
@@ -1933,7 +1980,9 @@ Size TracePoll(Globals globals)
1933 goto failCondemn; 1980 goto failCondemn;
1934 trace->chain = firstChain; 1981 trace->chain = firstChain;
1935 ChainStartGC(firstChain, trace); 1982 ChainStartGC(firstChain, trace);
1936 TraceStart(trace, mortality, trace->condemned * TraceWorkFactor); 1983 res = TraceStart(trace, mortality, trace->condemned * TraceWorkFactor);
1984 /* We don't expect normal GC traces to fail to start. */
1985 AVER(res == ResOK);
1937 scannedSize = traceWorkClock(trace); 1986 scannedSize = traceWorkClock(trace);
1938 } 1987 }
1939 } /* (dynamicDeferral > 0.0) */ 1988 } /* (dynamicDeferral > 0.0) */
@@ -1950,12 +1999,18 @@ Size TracePoll(Globals globals)
1950 scannedSize = traceWorkClock(trace) - oldScanned; 1999 scannedSize = traceWorkClock(trace) - oldScanned;
1951 if(trace->state == TraceFINISHED) { 2000 if(trace->state == TraceFINISHED) {
1952 TraceDestroy(trace); 2001 TraceDestroy(trace);
2002 /* A trace finished, and hopefully reclaimed some memory, so clear any
2003 emergency. */
2004 ArenaSetEmergency(arena, FALSE);
1953 } 2005 }
1954 } 2006 }
1955 return scannedSize; 2007 return scannedSize;
1956 2008
1957failCondemn: 2009failCondemn:
1958 TraceDestroy(trace); 2010 TraceDestroy(trace);
2011 /* This is an unlikely case, but clear the emergency flag so the next attempt
2012 starts normally. */
2013 ArenaSetEmergency(arena, FALSE);
1959failStart: 2014failStart:
1960 return (Size)0; 2015 return (Size)0;
1961} 2016}