aboutsummaryrefslogtreecommitdiffstats
path: root/mps/code/pool.c
diff options
context:
space:
mode:
authorDavid Lovemore2012-08-30 13:48:41 +0100
committerDavid Lovemore2012-08-30 13:48:41 +0100
commited8cd9432d6200bbd8e4f833fcd7b18e9f2af18c (patch)
tree2bea397e4d75c998b4b7e0db053c387a15d5ae61 /mps/code/pool.c
parentc986c195abc96dcffc6a33a19b05ef47e154fd83 (diff)
downloademacs-ed8cd9432d6200bbd8e4f833fcd7b18e9f2af18c.tar.gz
emacs-ed8cd9432d6200bbd8e4f833fcd7b18e9f2af18c.zip
In tracescanareatagged use the alignments of pools in the condemned set to determine mask.
Copied from Perforce Change: 179117 ServerID: perforce.ravenbrook.com
Diffstat (limited to 'mps/code/pool.c')
-rw-r--r--mps/code/pool.c2
1 files changed, 2 insertions, 0 deletions
diff --git a/mps/code/pool.c b/mps/code/pool.c
index e2578cf3497..55e70ff1462 100644
--- a/mps/code/pool.c
+++ b/mps/code/pool.c
@@ -305,6 +305,8 @@ Res PoolAlloc(Addr *pReturn, Pool pool, Size size,
305 /* .hasaddr.critical: The PoolHasAddr check is expensive, and in */ 305 /* .hasaddr.critical: The PoolHasAddr check is expensive, and in */
306 /* allocation-bound programs this is on the critical path. */ 306 /* allocation-bound programs this is on the critical path. */
307 AVER_CRITICAL(PoolHasAddr(pool, *pReturn)); 307 AVER_CRITICAL(PoolHasAddr(pool, *pReturn));
308 /* All allocations should be aligned to the pool's alignment */
309 AVER_CRITICAL(AddrIsAligned(*pReturn, pool->alignment));
308 310
309 /* All PoolAllocs should advance the allocation clock, so we count */ 311 /* All PoolAllocs should advance the allocation clock, so we count */
310 /* it all in the fillMutatorSize field. */ 312 /* it all in the fillMutatorSize field. */