From 1ece8c4ccb23e7cda087e366f97f139cdb4ea948 Mon Sep 17 00:00:00 2001
From: David Lovemore
Date: Mon, 10 Sep 2012 15:17:38 +0100
Subject: [PATCH] Remove whiteminalign handling code which was used to
calculate a mask for scanning in tracescanareatagged. for now we use
sizeof(word)-1, which will work for dylan and configura.
Copied from Perforce
Change: 179393
ServerID: perforce.ravenbrook.com
---
mps/code/mpmst.h | 1 -
mps/code/trace.c | 36 ++++++++++--------------------------
2 files changed, 10 insertions(+), 27 deletions(-)
diff --git a/mps/code/mpmst.h b/mps/code/mpmst.h
index 6f23fab8052..5c21c60a6a0 100644
--- a/mps/code/mpmst.h
+++ b/mps/code/mpmst.h
@@ -474,7 +474,6 @@ typedef struct TraceStruct {
TraceId ti; /* index into TraceSets */
Arena arena; /* owning arena */
int why; /* why the trace began */
- Align whiteMinAlign; /* minimum alignment of references in white set */
ZoneSet white; /* zones in the white set */
ZoneSet mayMove; /* zones containing possibly moving objs */
TraceState state; /* current state of trace */
diff --git a/mps/code/trace.c b/mps/code/trace.c
index ffe800e151e..bb4d4f1cd99 100644
--- a/mps/code/trace.c
+++ b/mps/code/trace.c
@@ -155,7 +155,6 @@ Bool TraceCheck(Trace trace)
CHECKL(TraceIdCheck(trace->ti));
CHECKL(trace == &trace->arena->trace[trace->ti]);
CHECKL(TraceSetIsMember(trace->arena->busyTraces, trace));
- CHECKL(AlignCheck(trace->whiteMinAlign));
CHECKL(ZoneSetSub(trace->mayMove, trace->white));
/* Use trace->state to check more invariants. */
switch(trace->state) {
@@ -369,10 +368,6 @@ Res TraceAddWhite(Trace trace, Seg seg)
trace->mayMove = ZoneSetUnion(trace->mayMove,
ZoneSetOfSeg(trace->arena, seg));
}
- /* This is used to eliminate unaligned references in TraceScanAreaTagged */
- if(pool->alignment < trace->whiteMinAlign) {
- trace->whiteMinAlign = pool->alignment;
- }
}
return ResOK;
@@ -694,7 +689,6 @@ found:
trace->arena = arena;
trace->why = why;
- trace->whiteMinAlign = (Align)1 << (MPS_WORD_WIDTH - 1);
trace->white = ZoneSetEMPTY;
trace->mayMove = ZoneSetEMPTY;
trace->ti = ti;
@@ -1494,36 +1488,26 @@ Res TraceScanArea(ScanState ss, Addr *base, Addr *limit)
/* TraceScanAreaTagged -- scan contiguous area of tagged references
*
- * This is as TraceScanArea except words are only fixed they are tagged
- * as zero according to the minimum alignment of the condemned set.
- */
+ * .tagging: This is as TraceScanArea except words are only fixed they are
+ * tagged as zero according to the alignment of a Word.
+ *
+ * See also PoolSingleAccess .
+ *
+ * TODO: Generalise the handling of tags so that pools can decide how
+ * their objects are tagged. This may use the user defined format
+ * to describe how tags are done */
Res TraceScanAreaTagged(ScanState ss, Addr *base, Addr *limit)
{
- TraceSet ts;
- TraceId ti;
- Trace trace;
- Arena arena;
Word mask;
- AVERT(ScanState, ss);
-
- /* This calculation of the mask could be moved to ScanStateInit
- * but there is little point as we probably only do a couple of ambiguous
- * scan per thread per flip. */
/* NOTE: An optimisation that maybe worth considering is setting some of the
* top bits in the mask as an early catch of addresses outside the arena.
* This might help slightly on 64-bit windows. However these are picked up
* soon afterwards by later checks. The bottom bits are more important
* to check as we ignore them in AMCFix, so the non-reference could
* otherwise end up pinning an object. */
- mask = (Word)-1;
- ts = ss->traces;
- arena = ss->arena;
- TRACE_SET_ITER(ti, trace, ts, arena)
- AVER(WordIsP2(trace->whiteMinAlign));
- mask = mask & (trace->whiteMinAlign - 1);
- TRACE_SET_ITER_END(ti, trace, ts, arena);
-
+ mask = sizeof(Word) - 1;
+ AVER(WordIsP2(mask + 1));
return TraceScanAreaMasked(ss, base, limit, mask);
}