diff options
| author | Nick Barnes | 2001-10-31 14:40:56 +0000 |
|---|---|---|
| committer | Nick Barnes | 2001-10-31 14:40:56 +0000 |
| commit | 7acfca905d76140f4cc0b09c9a12de237de364cd (patch) | |
| tree | 3ed8babfa3a73d30f29e08ca5d5adcda4ca4e826 /mps/code/shield.c | |
| parent | b7ce4893f9902d57cd67ac9a92fa6c3d5a8fc833 (diff) | |
| download | emacs-7acfca905d76140f4cc0b09c9a12de237de364cd.tar.gz emacs-7acfca905d76140f4cc0b09c9a12de237de364cd.zip | |
Branch imports for masters.
Copied from Perforce
Change: 23678
ServerID: perforce.ravenbrook.com
Diffstat (limited to 'mps/code/shield.c')
| -rw-r--r-- | mps/code/shield.c | 305 |
1 files changed, 305 insertions, 0 deletions
diff --git a/mps/code/shield.c b/mps/code/shield.c new file mode 100644 index 00000000000..c472e48d71e --- /dev/null +++ b/mps/code/shield.c | |||
| @@ -0,0 +1,305 @@ | |||
| 1 | /* impl.c.shield: SHIELD IMPLEMENTATION | ||
| 2 | * | ||
| 3 | * $HopeName: MMsrc!shield.c(trunk.15) $ | ||
| 4 | * Copyright (C) 1997 Harlequin Limited. All rights reserved. | ||
| 5 | * | ||
| 6 | * See: idea.shield, design.mps.shield. | ||
| 7 | * | ||
| 8 | * This implementation of the shield avoids suspending threads for | ||
| 9 | * as long as possible. When threads are suspended, it maintains a | ||
| 10 | * cache of covered segments where the desired and actual protection | ||
| 11 | * do not match. This cache is flushed on leaving the shield. | ||
| 12 | * | ||
| 13 | * | ||
| 14 | * Definitions | ||
| 15 | * | ||
| 16 | * .def.synced: a seg is synced if the prot and shield modes are the | ||
| 17 | * same, and unsynced otherwise. | ||
| 18 | * .def.depth: the depth of a segment is defined as | ||
| 19 | * depth == #exposes - #covers + I(in cache), where | ||
| 20 | * #exposes = the total number of times the seg has been exposed | ||
| 21 | * #covers = the total number of times the seg has been covered | ||
| 22 | * I(in cache) = 1 if the segment is in the cache | ||
| 23 | * 0 otherwise | ||
| 24 | * The cache is initially empty and cover should not be called | ||
| 25 | * without a matching expose, so this figure should always be | ||
| 26 | * non-negative. | ||
| 27 | * .def.total.depth: The total depth is the sum of the depth over | ||
| 28 | * all segments | ||
| 29 | * .def.outside: being outside the shield is being between calls | ||
| 30 | * to leave and enter, and similarly .def.inside: being inside the | ||
| 31 | * shield is being between calls to enter and leave. | ||
| 32 | * .def.suspended: suspended is true iff the threads are suspended | ||
| 33 | * | ||
| 34 | * | ||
| 35 | * Properties | ||
| 36 | * | ||
| 37 | * .prop.outside.running: The mutator may not be suspended while | ||
| 38 | * outside the shield. | ||
| 39 | * .prop.mutator.access: An attempt by the mutator to access | ||
| 40 | * shielded memory must cause an ArenaAccess. | ||
| 41 | * .prop.inside.access: Inside the shield it must be possible to access | ||
| 42 | * all unshielded segments and all exposed segments. | ||
| 43 | * | ||
| 44 | * | ||
| 45 | * Invariants | ||
| 46 | * | ||
| 47 | * These invariants are maintained by the code. | ||
| 48 | * | ||
| 49 | * .inv.outside.running: The mutator is running while outside the | ||
| 50 | * shield. | ||
| 51 | * .inv.unsynced.suspended: If any segment is not synced, | ||
| 52 | * the mutator is suspended. | ||
| 53 | * .inv.unsynced.depth: All unsynced segments have positive depth. | ||
| 54 | * .inv.outside.depth: The total depth is zero while outside the shield. | ||
| 55 | * .inv.prot.shield: The prot mode is never more than the shield mode. | ||
| 56 | * .inv.expose.prot: An exposed seg is not protected. | ||
| 57 | * | ||
| 58 | * Hints at proofs of properties from invariants | ||
| 59 | * | ||
| 60 | * inv.outside.running directly ensures prop.outside running. | ||
| 61 | * | ||
| 62 | * As the depth of a segment cannot be negative | ||
| 63 | * total depth == 0 => for all segments, depth == 0 | ||
| 64 | * => all segs are synced (by .inv.unsynced.depth) | ||
| 65 | * | ||
| 66 | * If the mutator is running then all segs must be synced | ||
| 67 | * (.inv.unsynced.suspend). Which means that the hardware protection | ||
| 68 | * (prot mode) must reflect the software protection (shield mode). | ||
| 69 | * Hence all shielded memory will be hardware protected while the | ||
| 70 | * mutator is running. This ensures .prop.mutator.access. | ||
| 71 | * | ||
| 72 | * inv.prot.shield and inv.expose.prot ensure prop.inside.access. | ||
| 73 | */ | ||
| 74 | |||
| 75 | #include "mpm.h" | ||
| 76 | |||
| 77 | SRCID(shield, "$HopeName: MMsrc!shield.c(trunk.15) $"); | ||
| 78 | |||
| 79 | |||
| 80 | void (ShieldSuspend)(Arena arena) | ||
| 81 | { | ||
| 82 | AVERT(Arena, arena); | ||
| 83 | AVER(arena->insideShield); | ||
| 84 | |||
| 85 | if (!arena->suspended) { | ||
| 86 | ThreadRingSuspend(ArenaThreadRing(arena)); | ||
| 87 | arena->suspended = TRUE; | ||
| 88 | } | ||
| 89 | } | ||
| 90 | |||
| 91 | |||
| 92 | void (ShieldResume)(Arena arena) | ||
| 93 | { | ||
| 94 | AVERT(Arena, arena); | ||
| 95 | AVER(arena->insideShield); | ||
| 96 | AVER(arena->suspended); | ||
| 97 | /* It is only correct to actually resume the mutator here if shDepth is 0 */ | ||
| 98 | } | ||
| 99 | |||
| 100 | |||
| 101 | /* This ensures actual prot mode does not include mode */ | ||
| 102 | static void protLower(Arena arena, Seg seg, AccessSet mode) | ||
| 103 | { | ||
| 104 | /* design.mps.trace.fix.noaver */ | ||
| 105 | AVERT_CRITICAL(Arena, arena); | ||
| 106 | UNUSED(arena); | ||
| 107 | AVERT_CRITICAL(Seg, seg); | ||
| 108 | |||
| 109 | if (SegPM(seg) & mode) { | ||
| 110 | SegSetPM(seg, SegPM(seg) & ~mode); | ||
| 111 | ProtSet(SegBase(seg), SegLimit(seg), SegPM(seg)); | ||
| 112 | } | ||
| 113 | } | ||
| 114 | |||
| 115 | |||
| 116 | static void sync(Arena arena, Seg seg) | ||
| 117 | { | ||
| 118 | AVERT(Arena, arena); | ||
| 119 | AVERT(Seg, seg); | ||
| 120 | |||
| 121 | if (SegPM(seg) != SegSM(seg)) { | ||
| 122 | ProtSet(SegBase(seg), SegLimit(seg), SegSM(seg)); | ||
| 123 | SegSetPM(seg, SegSM(seg)); | ||
| 124 | /* inv.prot.shield */ | ||
| 125 | } | ||
| 126 | } | ||
| 127 | |||
| 128 | |||
| 129 | static void flush(Arena arena, Size i) | ||
| 130 | { | ||
| 131 | Seg seg; | ||
| 132 | AVERT(Arena, arena); | ||
| 133 | AVER(i < arena->shCacheLimit); | ||
| 134 | |||
| 135 | seg = arena->shCache[i]; | ||
| 136 | if (seg == NULL) return; | ||
| 137 | AVERT(Seg, seg); | ||
| 138 | |||
| 139 | AVER(arena->shDepth > 0); | ||
| 140 | AVER(SegDepth(seg) > 0); | ||
| 141 | --arena->shDepth; | ||
| 142 | SegSetDepth(seg, SegDepth(seg) - 1); | ||
| 143 | |||
| 144 | if (SegDepth(seg) == 0) | ||
| 145 | sync(arena, seg); | ||
| 146 | |||
| 147 | arena->shCache[i] = NULL; | ||
| 148 | } | ||
| 149 | |||
| 150 | |||
| 151 | /* If the segment is out of sync, either sync it, or ensure | ||
| 152 | * depth > 0, and the arena is suspended. | ||
| 153 | */ | ||
| 154 | static void cache(Arena arena, Seg seg) | ||
| 155 | { | ||
| 156 | /* design.mps.trace.fix.noaver */ | ||
| 157 | AVERT_CRITICAL(Arena, arena); | ||
| 158 | AVERT_CRITICAL(Seg, seg); | ||
| 159 | |||
| 160 | if (SegSM(seg) == SegPM(seg)) return; | ||
| 161 | if (SegDepth(seg) > 0) { | ||
| 162 | ShieldSuspend(arena); | ||
| 163 | return; | ||
| 164 | } | ||
| 165 | if (ShieldCacheSIZE == 0 || !arena->suspended) | ||
| 166 | sync(arena, seg); | ||
| 167 | else { | ||
| 168 | SegSetDepth(seg, SegDepth(seg) + 1); | ||
| 169 | ++arena->shDepth; | ||
| 170 | AVER(arena->shDepth > 0); | ||
| 171 | AVER(SegDepth(seg) > 0); | ||
| 172 | AVER(arena->shCacheLimit <= ShieldCacheSIZE); | ||
| 173 | AVER(arena->shCacheI < arena->shCacheLimit); | ||
| 174 | flush(arena, arena->shCacheI); | ||
| 175 | arena->shCache[arena->shCacheI] = seg; | ||
| 176 | ++arena->shCacheI; | ||
| 177 | if (arena->shCacheI == ShieldCacheSIZE) | ||
| 178 | arena->shCacheI = 0; | ||
| 179 | if (arena->shCacheI == arena->shCacheLimit) | ||
| 180 | ++arena->shCacheLimit; | ||
| 181 | } | ||
| 182 | } | ||
| 183 | |||
| 184 | |||
| 185 | void (ShieldRaise) (Arena arena, Seg seg, AccessSet mode) | ||
| 186 | { | ||
| 187 | /* .seg.broken: Seg's shield invariants may not be true at */ | ||
| 188 | /* this point (this function is called to enforce them) so we */ | ||
| 189 | /* can't check seg. Nor can we check arena as that checks the */ | ||
| 190 | /* segs in the cache. */ | ||
| 191 | |||
| 192 | AVER((SegSM(seg) & mode) == AccessSetEMPTY); | ||
| 193 | SegSetSM(seg, SegSM(seg) | mode); /* inv.prot.shield preserved */ | ||
| 194 | |||
| 195 | /* ensure inv.unsynced.suspended & inv.unsynced.depth */ | ||
| 196 | cache(arena, seg); | ||
| 197 | AVERT(Arena, arena); | ||
| 198 | AVERT(Seg, seg); | ||
| 199 | } | ||
| 200 | |||
| 201 | |||
| 202 | void (ShieldLower)(Arena arena, Seg seg, AccessSet mode) | ||
| 203 | { | ||
| 204 | /* Don't check seg or arena, see .seg.broken */ | ||
| 205 | AVER((SegSM(seg) & mode) == mode); | ||
| 206 | /* synced(seg) is not changed by the following | ||
| 207 | * preserving inv.unsynced.suspended | ||
| 208 | * Also inv.prot.shield preserved | ||
| 209 | */ | ||
| 210 | SegSetSM(seg, SegSM(seg) & ~mode); | ||
| 211 | protLower(arena, seg, mode); | ||
| 212 | AVERT(Arena, arena); | ||
| 213 | AVERT(Seg, seg); | ||
| 214 | } | ||
| 215 | |||
| 216 | |||
| 217 | void (ShieldEnter)(Arena arena) | ||
| 218 | { | ||
| 219 | Size i; | ||
| 220 | |||
| 221 | AVERT(Arena, arena); | ||
| 222 | AVER(!arena->insideShield); | ||
| 223 | AVER(arena->shDepth == 0); | ||
| 224 | AVER(!arena->suspended); | ||
| 225 | AVER(arena->shCacheLimit <= ShieldCacheSIZE); | ||
| 226 | AVER(arena->shCacheI < arena->shCacheLimit); | ||
| 227 | for(i = 0; i < arena->shCacheLimit; i++) | ||
| 228 | AVER(arena->shCache[i] == NULL); | ||
| 229 | |||
| 230 | arena->shCacheI = (Size)0; | ||
| 231 | arena->shCacheLimit = (Size)1; | ||
| 232 | arena->insideShield = TRUE; | ||
| 233 | } | ||
| 234 | |||
| 235 | |||
| 236 | /* .shield.flush: Flush empties the shield cache. | ||
| 237 | * This needs to be called before segments are destroyed as there | ||
| 238 | * may be references to them in the cache. | ||
| 239 | */ | ||
| 240 | void (ShieldFlush)(Arena arena) | ||
| 241 | { | ||
| 242 | Size i; | ||
| 243 | |||
| 244 | for(i = 0; i < arena->shCacheLimit; ++i) { | ||
| 245 | if (arena->shDepth == 0) | ||
| 246 | break; | ||
| 247 | flush(arena, i); | ||
| 248 | } | ||
| 249 | } | ||
| 250 | |||
| 251 | |||
| 252 | void (ShieldLeave)(Arena arena) | ||
| 253 | { | ||
| 254 | AVERT(Arena, arena); | ||
| 255 | AVER(arena->insideShield); | ||
| 256 | |||
| 257 | ShieldFlush(arena); | ||
| 258 | /* Cache is empty so inv.outside.depth holds */ | ||
| 259 | AVER(arena->shDepth == 0); | ||
| 260 | |||
| 261 | /* Ensuring the mutator is running at this point | ||
| 262 | * guarantees inv.outside.running */ | ||
| 263 | if (arena->suspended) { | ||
| 264 | ThreadRingResume(ArenaThreadRing(arena)); | ||
| 265 | arena->suspended = FALSE; | ||
| 266 | } | ||
| 267 | arena->insideShield = FALSE; | ||
| 268 | } | ||
| 269 | |||
| 270 | |||
| 271 | void (ShieldExpose)(Arena arena, Seg seg) | ||
| 272 | { | ||
| 273 | AccessSet mode = AccessREAD | AccessWRITE; | ||
| 274 | /* design.mps.trace.fix.noaver */ | ||
| 275 | AVERT_CRITICAL(Arena, arena); | ||
| 276 | AVER_CRITICAL(arena->insideShield); | ||
| 277 | |||
| 278 | SegSetDepth(seg, SegDepth(seg) + 1); | ||
| 279 | ++arena->shDepth; | ||
| 280 | /* design.mps.trace.fix.noaver */ | ||
| 281 | AVER_CRITICAL(arena->shDepth > 0); | ||
| 282 | AVER_CRITICAL(SegDepth(seg) > 0); | ||
| 283 | if (SegPM(seg) & mode) | ||
| 284 | ShieldSuspend(arena); | ||
| 285 | |||
| 286 | /* This ensures inv.expose.prot */ | ||
| 287 | protLower(arena, seg, mode); | ||
| 288 | } | ||
| 289 | |||
| 290 | |||
| 291 | void (ShieldCover)(Arena arena, Seg seg) | ||
| 292 | { | ||
| 293 | /* design.mps.trace.fix.noaver */ | ||
| 294 | AVERT_CRITICAL(Arena, arena); | ||
| 295 | AVERT_CRITICAL(Seg, seg); | ||
| 296 | AVER_CRITICAL(SegPM(seg) == AccessSetEMPTY); | ||
| 297 | |||
| 298 | AVER_CRITICAL(arena->shDepth > 0); | ||
| 299 | AVER_CRITICAL(SegDepth(seg) > 0); | ||
| 300 | SegSetDepth(seg, SegDepth(seg) - 1); | ||
| 301 | --arena->shDepth; | ||
| 302 | |||
| 303 | /* ensure inv.unsynced.depth */ | ||
| 304 | cache(arena, seg); | ||
| 305 | } | ||