1
Fork 0
mirror of git://git.sv.gnu.org/emacs.git synced 2026-03-23 07:12:12 -07:00

Move scan method from pool class to segment class.

Copied from Perforce
 Change: 193012
 ServerID: perforce.ravenbrook.com
This commit is contained in:
Gareth Rees 2017-03-29 19:17:17 +01:00
parent db5db0a9f3
commit 00a4ade456
21 changed files with 276 additions and 317 deletions

View file

@ -223,7 +223,6 @@ extern void PoolFree(Pool pool, Addr old, Size size);
extern Res PoolTraceBegin(Pool pool, Trace trace);
extern Res PoolAccess(Pool pool, Seg seg, Addr addr,
AccessSet mode, MutatorContext context);
extern Res PoolScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg);
extern Res PoolFix(Pool pool, ScanState ss, Seg seg, Addr *refIO);
extern Res PoolFixEmergency(Pool pool, ScanState ss, Seg seg, Addr *refIO);
extern void PoolWalk(Pool pool, Seg seg, FormattedObjectsVisitor f,
@ -655,6 +654,7 @@ extern Res SegSplit(Seg *segLoReturn, Seg *segHiReturn, Seg seg, Addr at);
extern Res SegWhiten(Seg seg, Trace trace);
extern void SegGreyen(Seg seg, Trace trace);
extern void SegBlacken(Seg seg, TraceSet traceSet);
extern Res SegScan(Bool *totalReturn, Seg seg, ScanState ss);
extern void SegReclaim(Seg seg, Trace trace);
extern Res SegAbsDescribe(Inst seg, mps_lib_FILE *stream, Count depth);
extern Res SegDescribe(Seg seg, mps_lib_FILE *stream, Count depth);

View file

@ -60,7 +60,6 @@ typedef struct mps_pool_class_s {
PoolBufferFillMethod bufferFill; /* out-of-line reserve */
PoolBufferEmptyMethod bufferEmpty; /* out-of-line commit */
PoolAccessMethod access; /* handles read/write accesses */
PoolScanMethod scan; /* find references during tracing */
PoolFixMethod fix; /* referent reachable during tracing */
PoolFixMethod fixEmergency; /* as fix, no failure allowed */
PoolRampBeginMethod rampBegin;/* begin a ramp pattern */
@ -227,6 +226,7 @@ typedef struct SegClassStruct {
SegWhitenMethod whiten; /* whiten objects */
SegGreyenMethod greyen; /* greyen non-white objects */
SegBlackenMethod blacken; /* blacken grey objects without scanning */
SegScanMethod scan; /* find references during tracing */
SegReclaimMethod reclaim; /* reclaim dead objects after tracing */
Sig sig; /* .class.end-sig */
} SegClassStruct;

View file

@ -167,6 +167,7 @@ typedef Res (*SegSplitMethod)(Seg seg, Seg segHi,
typedef Res (*SegWhitenMethod)(Seg seg, Trace trace);
typedef void (*SegGreyenMethod)(Seg seg, Trace trace);
typedef void (*SegBlackenMethod)(Seg seg, TraceSet traceSet);
typedef Res (*SegScanMethod)(Bool *totalReturn, Seg seg, ScanState ss);
typedef void (*SegReclaimMethod)(Seg seg, Trace trace);
@ -197,8 +198,6 @@ typedef void (*PoolBufferEmptyMethod)(Pool pool, Buffer buffer,
Addr init, Addr limit);
typedef Res (*PoolAccessMethod)(Pool pool, Seg seg, Addr addr,
AccessSet mode, MutatorContext context);
typedef Res (*PoolScanMethod)(Bool *totalReturn, ScanState ss,
Pool pool, Seg seg);
typedef Res (*PoolFixMethod)(Pool pool, ScanState ss, Seg seg, Ref *refIO);
typedef void (*PoolRampBeginMethod)(Pool pool, Buffer buf, Bool collectAll);
typedef void (*PoolRampEndMethod)(Pool pool, Buffer buf);

View file

@ -48,7 +48,6 @@ Bool PoolClassCheck(PoolClass klass)
CHECKL(FUNCHECK(klass->bufferFill));
CHECKL(FUNCHECK(klass->bufferEmpty));
CHECKL(FUNCHECK(klass->access));
CHECKL(FUNCHECK(klass->scan));
CHECKL(FUNCHECK(klass->fix));
CHECKL(FUNCHECK(klass->fixEmergency));
CHECKL(FUNCHECK(klass->rampBegin));
@ -286,32 +285,6 @@ Res PoolAccess(Pool pool, Seg seg, Addr addr,
}
/* PoolScan -- scan a segment in the pool */
Res PoolScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)
{
AVER(totalReturn != NULL);
AVERT(ScanState, ss);
AVERT(Pool, pool);
AVERT(Seg, seg);
AVER(ss->arena == pool->arena);
/* The segment must belong to the pool. */
AVER(pool == SegPool(seg));
/* We check that either ss->rank is in the segment's
* ranks, or that ss->rank is exact. The check is more complicated if
* we actually have multiple ranks in a seg.
* See <code/trace.c#scan.conservative> */
AVER(ss->rank == RankEXACT || RankSetIsMember(SegRankSet(seg), ss->rank));
/* Should only scan segments which contain grey objects. */
AVER(TraceSetInter(SegGrey(seg), ss->traces) != TraceSetEMPTY);
return Method(Pool, pool, scan)(totalReturn, ss, pool, seg);
}
/* PoolFix* -- fix a reference to an object in this pool
*
* See <design/pool/#req.fix>.

View file

@ -60,9 +60,6 @@ void PoolClassMixInScan(PoolClass klass)
{
/* Can't check klass because it's not initialized yet */
klass->access = PoolSegAccess;
/* scan is part of the scanning protocol, but there is no useful
default method */
klass->scan = PoolNoScan;
}
@ -194,7 +191,6 @@ DEFINE_CLASS(Pool, AbstractPool, klass)
klass->bufferFill = PoolNoBufferFill;
klass->bufferEmpty = PoolNoBufferEmpty;
klass->access = PoolNoAccess;
klass->scan = PoolNoScan;
klass->fix = PoolNoFix;
klass->fixEmergency = PoolNoFix;
klass->rampBegin = PoolNoRampBegin;
@ -526,16 +522,6 @@ Res PoolSingleAccess(Pool pool, Seg seg, Addr addr,
}
Res PoolNoScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)
{
AVER(totalReturn != NULL);
AVERT(ScanState, ss);
AVERT(Pool, pool);
AVERT(Seg, seg);
NOTREACHED;
return ResUNIMPL;
}
Res PoolNoFix(Pool pool, ScanState ss, Seg seg, Ref *refIO)
{
AVERT(Pool, pool);

View file

@ -24,6 +24,7 @@ typedef Bool (*amcPinnedFunction)(AMC amc, Nailboard board, Addr base, Addr limi
/* forward declarations */
static Res amcSegWhiten(Seg seg, Trace trace);
static Res amcSegScan(Bool *totalReturn, Seg seg, ScanState ss);
static void amcSegReclaim(Seg seg, Trace trace);
static Bool amcSegHasNailboard(Seg seg);
static Nailboard amcSegNailboard(Seg seg);
@ -339,6 +340,7 @@ DEFINE_CLASS(Seg, amcSeg, klass)
klass->size = sizeof(amcSegStruct);
klass->init = AMCSegInit;
klass->whiten = amcSegWhiten;
klass->scan = amcSegScan;
klass->reclaim = amcSegReclaim;
}
@ -1221,16 +1223,15 @@ static Res amcSegWhiten(Seg seg, Trace trace)
}
/* amcScanNailedRange -- make one scanning pass over a range of
/* amcSegScanNailedRange -- make one scanning pass over a range of
* addresses in a nailed segment.
*
* *totalReturn is set to FALSE if not all the objects between base and
* limit have been scanned. It is not touched otherwise.
*/
static Res amcScanNailedRange(Bool *totalReturn, Bool *moreReturn,
ScanState ss,
AMC amc, Nailboard board,
Addr base, Addr limit)
static Res amcSegScanNailedRange(Bool *totalReturn, Bool *moreReturn,
ScanState ss, AMC amc, Nailboard board,
Addr base, Addr limit)
{
Format format;
Size headerSize;
@ -1261,7 +1262,7 @@ static Res amcScanNailedRange(Bool *totalReturn, Bool *moreReturn,
}
/* amcScanNailedOnce -- make one scanning pass over a nailed segment
/* amcSegScanNailedOnce -- make one scanning pass over a nailed segment
*
* *totalReturn is set to TRUE iff all objects in segment scanned.
* *moreReturn is set to FALSE only if there are no more objects
@ -1270,8 +1271,8 @@ static Res amcScanNailedRange(Bool *totalReturn, Bool *moreReturn,
* also if during emergency fixing any new marks got added to the
* nailboard.
*/
static Res amcScanNailedOnce(Bool *totalReturn, Bool *moreReturn,
ScanState ss, Seg seg, AMC amc)
static Res amcSegScanNailedOnce(Bool *totalReturn, Bool *moreReturn,
ScanState ss, Seg seg, AMC amc)
{
Addr p, limit;
Nailboard board;
@ -1291,8 +1292,8 @@ static Res amcScanNailedOnce(Bool *totalReturn, Bool *moreReturn,
AVER(p == limit);
goto returnGood;
}
res = amcScanNailedRange(totalReturn, moreReturn,
ss, amc, board, p, limit);
res = amcSegScanNailedRange(totalReturn, moreReturn,
ss, amc, board, p, limit);
if (res != ResOK)
return res;
p = limit;
@ -1300,8 +1301,8 @@ static Res amcScanNailedOnce(Bool *totalReturn, Bool *moreReturn,
limit = SegLimit(seg);
/* @@@@ Shouldn't p be set to BufferLimit here?! */
res = amcScanNailedRange(totalReturn, moreReturn,
ss, amc, board, p, limit);
res = amcSegScanNailedRange(totalReturn, moreReturn,
ss, amc, board, p, limit);
if (res != ResOK)
return res;
@ -1313,17 +1314,17 @@ returnGood:
}
/* amcScanNailed -- scan a nailed segment */
/* amcSegScanNailed -- scan a nailed segment */
static Res amcScanNailed(Bool *totalReturn, ScanState ss, Pool pool,
Seg seg, AMC amc)
static Res amcSegScanNailed(Bool *totalReturn, ScanState ss, Pool pool,
Seg seg, AMC amc)
{
Bool total, moreScanning;
size_t loops = 0;
do {
Res res;
res = amcScanNailedOnce(&total, &moreScanning, ss, seg, amc);
res = amcSegScanNailedOnce(&total, &moreScanning, ss, seg, amc);
if(res != ResOK) {
*totalReturn = FALSE;
return res;
@ -1359,27 +1360,29 @@ static Res amcScanNailed(Bool *totalReturn, ScanState ss, Pool pool,
}
/* AMCScan -- scan a single seg, turning it black
/* amcSegScan -- scan a single seg, turning it black
*
* See <design/poolamc/#seg-scan>.
*/
static Res AMCScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)
static Res amcSegScan(Bool *totalReturn, Seg seg, ScanState ss)
{
Addr base, limit;
Format format;
AMC amc = MustBeA(AMCZPool, pool);
Pool pool;
AMC amc;
Res res;
Buffer buffer;
AVER(totalReturn != NULL);
AVERT(ScanState, ss);
AVERT(Seg, seg);
AVERT(ScanState, ss);
pool = SegPool(seg);
amc = MustBeA(AMCZPool, pool);
format = pool->format;
if(amcSegHasNailboard(seg)) {
return amcScanNailed(totalReturn, ss, pool, seg, amc);
return amcSegScanNailed(totalReturn, ss, pool, seg, amc);
}
EVENT3(AMCScanBegin, amc, seg, ss);
@ -2008,7 +2011,6 @@ DEFINE_CLASS(Pool, AMCPool, klass)
INHERIT_CLASS(klass, AMCPool, AMCZPool);
PoolClassMixInScan(klass);
klass->init = AMCInit;
klass->scan = AMCScan;
}

View file

@ -28,6 +28,7 @@ SRCID(poolams, "$Id$");
static void amsSegBlacken(Seg seg, TraceSet traceSet);
static Res amsSegWhiten(Seg seg, Trace trace);
static Res amsSegScan(Bool *totalReturn, Seg seg, ScanState ss);
static void amsSegReclaim(Seg seg, Trace trace);
@ -611,6 +612,7 @@ DEFINE_CLASS(Seg, AMSSeg, klass)
klass->split = AMSSegSplit;
klass->whiten = amsSegWhiten;
klass->blacken = amsSegBlacken;
klass->scan = amsSegScan;
klass->reclaim = amsSegReclaim;
AVERT(SegClass, klass);
}
@ -1246,7 +1248,7 @@ static Res semSegIterate(Seg seg, AMSObjectFunction f, void *closure)
/* amsScanObject -- scan a single object
*
* This is the object function passed to semSegIterate by AMSScan. */
* This is the object function passed to semSegIterate by amsSegScan. */
struct amsScanClosureStruct {
ScanState ss;
@ -1296,29 +1298,23 @@ static Res amsScanObject(Seg seg, Index i, Addr p, Addr next, void *clos)
}
/* AMSScan -- the pool class segment scanning method
/* amsSegScan -- the segment scanning method
*
* See <design/poolams/#scan>
*/
Res AMSScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)
static Res amsSegScan(Bool *totalReturn, Seg seg, ScanState ss)
{
Res res;
AMS ams;
Arena arena;
AMSSeg amsseg;
AMSSeg amsseg = MustBeA(AMSSeg, seg);
Pool pool = SegPool(seg);
AMS ams = MustBeA(AMSPool, pool);
Arena arena = PoolArena(pool);
struct amsScanClosureStruct closureStruct;
Format format;
Align alignment;
AVER(totalReturn != NULL);
AVERT(ScanState, ss);
AVERT(Pool, pool);
ams = PoolAMS(pool);
AVERT(AMS, ams);
arena = PoolArena(pool);
AVERT(Seg, seg);
amsseg = Seg2AMSSeg(seg);
AVERT(AMSSeg, amsseg);
/* Check that we're not in the grey mutator phase (see */
/* <design/poolams/#not-req.grey>). */
@ -1773,7 +1769,6 @@ DEFINE_CLASS(Pool, AMSPool, klass)
klass->bufferClass = RankBufClassGet;
klass->bufferFill = AMSBufferFill;
klass->bufferEmpty = AMSBufferEmpty;
klass->scan = AMSScan;
klass->fix = AMSFix;
klass->fixEmergency = AMSFix;
klass->walk = AMSWalk;

View file

@ -172,8 +172,6 @@ extern Res AMSInitInternal(AMS ams, Arena arena, PoolClass klass,
extern void AMSFinish(Inst inst);
extern Bool AMSCheck(AMS ams);
extern Res AMSScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg);
#define AMSChain(ams) ((ams)->chain)
extern void AMSSegFreeWalk(AMSSeg amsseg, FreeBlockVisitor f, void *p);

View file

@ -51,6 +51,7 @@ SRCID(poolawl, "$Id$");
static Res awlSegWhiten(Seg seg, Trace trace);
static void awlSegGreyen(Seg seg, Trace trace);
static void awlSegBlacken(Seg seg, TraceSet traceSet);
static Res awlSegScan(Bool *totalReturn, Seg seg, ScanState ss);
static void awlSegReclaim(Seg seg, Trace trace);
@ -291,6 +292,7 @@ DEFINE_CLASS(Seg, AWLSeg, klass)
klass->whiten = awlSegWhiten;
klass->greyen = awlSegGreyen;
klass->blacken = awlSegBlacken;
klass->scan = awlSegScan;
klass->reclaim = awlSegReclaim;
}
@ -304,7 +306,7 @@ DEFINE_CLASS(Seg, AWLSeg, klass)
* AWLSegSALimit is the number of accesses for a single segment in a GC cycle.
* AWLTotalSALimit is the total number of accesses during a GC cycle.
*
* These should be set in config.h, but are here in static variables so that
* These should be set in config.h, but are here in global variables so that
* it's possible to tweak them in a debugger.
*/
@ -384,11 +386,13 @@ static void AWLNoteRefAccess(AWL awl, Seg seg, Addr addr)
AVER(addr != NULL);
awlseg->singleAccesses++; /* increment seg count of ref accesses */
if (addr == awlseg->stats.lastAccess) {
/* If this is a repeated access, increment count */
STATISTIC(awlseg->stats.sameAccesses++);
}
STATISTIC(awlseg->stats.lastAccess = addr);
STATISTIC({
if (addr == awlseg->stats.lastAccess) {
/* If this is a repeated access, increment count */
++ awlseg->stats.sameAccesses;
}
awlseg->stats.lastAccess = addr;
});
awl->succAccesses++; /* Note a new successive access */
}
@ -407,33 +411,35 @@ static void AWLNoteSegAccess(AWL awl, Seg seg, Addr addr)
/* Record a scan of a segment which wasn't provoked by an access */
static void AWLNoteScan(AWL awl, Seg seg, ScanState ss)
static void AWLNoteScan(Seg seg, ScanState ss)
{
AWLSeg awlseg = MustBeA(AWLSeg, seg);
AVERT(AWL, awl);
UNUSED(ss);
/* .assume.mixedrank */
/* .assume.samerank */
/* If this segment has any RankWEAK references, then */
/* record statistics about whether weak splatting is being lost. */
if (RankSetIsMember(SegRankSet(seg), RankWEAK)) {
if (RankWEAK == ss->rank) {
/* This is "successful" scan at proper rank. */
STATISTIC(awl->stats.goodScans++);
if (0 < awlseg->singleAccesses) {
/* Accesses have been proceesed singly */
/* Record that we genuinely did save a protection-provoked scan */
STATISTIC(awl->stats.savedScans++);
STATISTIC(awl->stats.savedAccesses += awlseg->singleAccesses);
STATISTIC({
/* If this segment has any RankWEAK references, then record
* statistics about whether weak splatting is being lost. */
AWL awl = MustBeA(AWLPool, SegPool(seg));
if (RankWEAK == ss->rank) {
/* This is "successful" scan at proper rank. */
++ awl->stats.goodScans;
if (0 < awlseg->singleAccesses) {
/* Accesses have been proceesed singly. Record that we
* genuinely did save a protection-provoked scan */
++ awl->stats.savedScans;
awl->stats.savedAccesses += awlseg->singleAccesses;
}
} else {
/* This is "failed" scan at improper rank. */
++ awl->stats.badScans;
}
} else {
/* This is "failed" scan at improper rank. */
STATISTIC(awl->stats.badScans++);
}
awlStatSegInit(awlseg);
});
/* Reinitialize the segment statistics */
awlseg->singleAccesses = 0;
STATISTIC(awlStatSegInit(awlseg));
}
}
@ -870,14 +876,14 @@ static Res awlScanObject(Arena arena, AWL awl, ScanState ss,
}
/* awlScanSinglePass -- a single scan pass over a segment */
/* awlSegScanSinglePass -- a single scan pass over a segment */
static Res awlScanSinglePass(Bool *anyScannedReturn,
ScanState ss, Pool pool,
Seg seg, Bool scanAllObjects)
static Res awlSegScanSinglePass(Bool *anyScannedReturn, ScanState ss,
Seg seg, Bool scanAllObjects)
{
AWL awl = MustBeA(AWLPool, pool);
AWLSeg awlseg = MustBeA(AWLSeg, seg);
Pool pool = SegPool(seg);
AWL awl = MustBeA(AWLPool, pool);
Arena arena = PoolArena(pool);
Buffer buffer;
Format format = pool->format;
@ -935,17 +941,17 @@ static Res awlScanSinglePass(Bool *anyScannedReturn,
}
/* AWLScan -- segment scan method for AWL */
/* awlSegScan -- segment scan method for AWL */
static Res AWLScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)
static Res awlSegScan(Bool *totalReturn, Seg seg, ScanState ss)
{
AWL awl = MustBeA(AWLPool, pool);
Bool anyScanned;
Bool scanAllObjects;
Res res;
AVER(totalReturn != NULL);
AVERT(ScanState, ss);
AVERT(Seg, seg);
/* If the scanner isn't going to scan all the objects then the */
/* summary of the unscanned objects must be added into the scan */
@ -962,7 +968,7 @@ static Res AWLScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)
(TraceSetDiff(ss->traces, SegWhite(seg)) != TraceSetEMPTY);
do {
res = awlScanSinglePass(&anyScanned, ss, pool, seg, scanAllObjects);
res = awlSegScanSinglePass(&anyScanned, ss, seg, scanAllObjects);
if (res != ResOK) {
*totalReturn = FALSE;
return res;
@ -973,7 +979,7 @@ static Res AWLScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)
} while(!scanAllObjects && anyScanned);
*totalReturn = scanAllObjects;
AWLNoteScan(awl, seg, ss);
AWLNoteScan(seg, ss);
return ResOK;
}
@ -1238,7 +1244,6 @@ DEFINE_CLASS(Pool, AWLPool, klass)
klass->bufferFill = AWLBufferFill;
klass->bufferEmpty = AWLBufferEmpty;
klass->access = AWLAccess;
klass->scan = AWLScan;
klass->fix = AWLFix;
klass->fixEmergency = AWLFix;
klass->walk = AWLWalk;

View file

@ -167,6 +167,7 @@ typedef struct MRGRefSegStruct {
DECLARE_CLASS(Seg, MRGLinkSeg, Seg);
DECLARE_CLASS(Seg, MRGRefSeg, GCSeg);
static Res mrgRefSegScan(Bool *totalReturn, Seg seg, ScanState ss);
/* MRGLinkSegCheck -- check a link segment
@ -300,6 +301,7 @@ DEFINE_CLASS(Seg, MRGRefSeg, klass)
SegClassMixInNoSplitMerge(klass); /* no support for this */
klass->size = sizeof(MRGRefSegStruct);
klass->init = MRGRefSegInit;
klass->scan = mrgRefSegScan;
}
@ -558,21 +560,22 @@ static void MRGFinalize(Arena arena, MRGLinkSeg linkseg, Index indx)
}
static Res MRGRefSegScan(ScanState ss, MRGRefSeg refseg, MRG mrg)
static Res mrgRefSegScan(Bool *totalReturn, Seg seg, ScanState ss)
{
MRGRefSeg refseg = MustBeA(MRGRefSeg, seg);
Pool pool = SegPool(seg);
MRG mrg = MustBeA(MRGPool, pool);
Res res;
Arena arena;
MRGLinkSeg linkseg;
RefPart refPart;
Index i;
Count nGuardians;
AVERT(ScanState, ss);
AVERT(MRGRefSeg, refseg);
AVERT(MRG, mrg);
arena = PoolArena(MustBeA(AbstractPool, mrg));
arena = PoolArena(pool);
linkseg = refseg->linkSeg;
nGuardians = MRGGuardiansPerSeg(mrg);
@ -588,8 +591,10 @@ static Res MRGRefSegScan(ScanState ss, MRGRefSeg refseg, MRG mrg)
/* because we are in a scan and the shield is exposed. */
if (TRACE_FIX1(ss, refPart->ref)) {
res = TRACE_FIX2(ss, &(refPart->ref));
if (res != ResOK)
if (res != ResOK) {
*totalReturn = FALSE;
return res;
}
if (ss->rank == RankFINAL && !ss->wasMarked) { /* .improve.rank */
MRGFinalize(arena, linkseg, i);
@ -600,6 +605,7 @@ static Res MRGRefSegScan(ScanState ss, MRGRefSeg refseg, MRG mrg)
}
} TRACE_SCAN_END(ss);
*totalReturn = TRUE;
return ResOK;
}
@ -821,27 +827,6 @@ static Res MRGDescribe(Inst inst, mps_lib_FILE *stream, Count depth)
}
static Res MRGScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)
{
MRG mrg = MustBeA(MRGPool, pool);
MRGRefSeg refseg = MustBeA(MRGRefSeg, seg);
Res res;
AVERT(ScanState, ss);
AVER(SegRankSet(seg) == RankSetSingle(RankFINAL)); /* .improve.rank */
AVER(TraceSetInter(SegGrey(seg), ss->traces) != TraceSetEMPTY);
res = MRGRefSegScan(ss, refseg, mrg);
if (res != ResOK) {
*totalReturn = FALSE;
return res;
}
*totalReturn = TRUE;
return ResOK;
}
DEFINE_CLASS(Pool, MRGPool, klass)
{
INHERIT_CLASS(klass, MRGPool, AbstractPool);
@ -849,7 +834,6 @@ DEFINE_CLASS(Pool, MRGPool, klass)
klass->instClassStruct.finish = MRGFinish;
klass->size = sizeof(MRGStruct);
klass->init = MRGInit;
klass->scan = MRGScan;
}

View file

@ -156,21 +156,6 @@ static Res NDescribe(Inst inst, mps_lib_FILE *stream, Count depth)
}
/* NScan -- scan method for class N */
static Res NScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)
{
PoolN poolN = MustBeA(NPool, pool);
AVER(totalReturn != NULL);
AVERT(ScanState, ss);
AVERT(Seg, seg);
UNUSED(poolN);
return ResOK;
}
/* NFix -- fix method for class N */
static Res NFix(Pool pool, ScanState ss, Seg seg, Ref *refIO)
@ -201,7 +186,6 @@ DEFINE_CLASS(Pool, NPool, klass)
klass->free = NFree;
klass->bufferFill = NBufferFill;
klass->bufferEmpty = NBufferEmpty;
klass->scan = NScan;
klass->fix = NFix;
klass->fixEmergency = NFix;
AVERT(PoolClass, klass);

View file

@ -51,6 +51,7 @@ DECLARE_CLASS(Seg, SNCSeg, GCSeg);
DECLARE_CLASS(Buffer, SNCBuf, RankBuf);
static Bool SNCCheck(SNC snc);
static void sncPopPartialSegChain(SNC snc, Buffer buf, Seg upTo);
static Res sncSegScan(Bool *totalReturn, Seg seg, ScanState ss);
/* Management of segment chains
@ -233,6 +234,7 @@ DEFINE_CLASS(Seg, SNCSeg, klass)
SegClassMixInNoSplitMerge(klass); /* no support for this (yet) */
klass->size = sizeof(SNCSegStruct);
klass->init = sncSegInit;
klass->scan = sncSegScan;
}
@ -480,24 +482,20 @@ static void SNCBufferEmpty(Pool pool, Buffer buffer,
}
static Res SNCScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)
static Res sncSegScan(Bool *totalReturn, Seg seg, ScanState ss)
{
Addr base, limit;
Format format;
SNC snc;
Res res;
AVER(totalReturn != NULL);
AVERT(ScanState, ss);
AVERT(Seg, seg);
AVERT(Pool, pool);
snc = PoolSNC(pool);
AVERT(SNC, snc);
format = pool->format;
format = SegPool(seg)->format;
base = SegBase(seg);
limit = SegBufferScanLimit(seg);
if (base < limit) {
res = FormatScan(format, ss, base, limit);
if (res != ResOK) {
@ -679,7 +677,6 @@ DEFINE_CLASS(Pool, SNCPool, klass)
klass->init = SNCInit;
klass->bufferFill = SNCBufferFill;
klass->bufferEmpty = SNCBufferEmpty;
klass->scan = SNCScan;
klass->framePush = SNCFramePush;
klass->framePop = SNCFramePop;
klass->walk = SNCWalk;

View file

@ -718,7 +718,29 @@ void SegBlacken(Seg seg, TraceSet traceSet)
}
/* PoolReclaim -- reclaim a segment in the pool */
/* SegScan -- scan a segment */
Res SegScan(Bool *totalReturn, Seg seg, ScanState ss)
{
AVER(totalReturn != NULL);
AVERT(Seg, seg);
AVERT(ScanState, ss);
AVER(PoolArena(SegPool(seg)) == ss->arena);
/* We check that either ss->rank is in the segment's
* ranks, or that ss->rank is exact. The check is more complicated if
* we actually have multiple ranks in a seg.
* See <code/trace.c#scan.conservative> */
AVER(ss->rank == RankEXACT || RankSetIsMember(SegRankSet(seg), ss->rank));
/* Should only scan segments which contain grey objects. */
AVER(TraceSetInter(SegGrey(seg), ss->traces) != TraceSetEMPTY);
return Method(Seg, seg, scan)(totalReturn, seg, ss);
}
/* SegReclaim -- reclaim a segment */
void SegReclaim(Seg seg, Trace trace)
{
@ -1110,6 +1132,19 @@ static void segNoBlacken(Seg seg, TraceSet traceSet)
}
/* segNoScan -- scan method for non-GC segs */
static Res segNoScan(Bool *totalReturn, Seg seg, ScanState ss)
{
AVER(totalReturn != NULL);
AVERT(Seg, seg);
AVERT(ScanState, ss);
AVER(PoolArena(SegPool(seg)) == ss->arena);
NOTREACHED;
return ResUNIMPL;
}
/* segNoReclaim -- reclaim method for non-GC segs */
static void segNoReclaim(Seg seg, Trace trace)
@ -1741,6 +1776,7 @@ Bool SegClassCheck(SegClass klass)
CHECKL(FUNCHECK(klass->whiten));
CHECKL(FUNCHECK(klass->greyen));
CHECKL(FUNCHECK(klass->blacken));
CHECKL(FUNCHECK(klass->scan));
CHECKL(FUNCHECK(klass->reclaim));
CHECKS(SegClass, klass);
return TRUE;
@ -1774,6 +1810,7 @@ DEFINE_CLASS(Seg, Seg, klass)
klass->whiten = segNoWhiten;
klass->greyen = segNoGreyen;
klass->blacken = segNoBlacken;
klass->scan = segNoScan;
klass->reclaim = segNoReclaim;
klass->sig = SegClassSig;
AVERT(SegClass, klass);
@ -1804,6 +1841,7 @@ DEFINE_CLASS(Seg, GCSeg, klass)
klass->whiten = gcSegWhiten;
klass->greyen = gcSegGreyen;
klass->blacken = gcSegTrivBlacken;
klass->scan = segNoScan; /* no useful default method */
klass->reclaim = segNoReclaim; /* no useful default method */
AVERT(SegClass, klass);
}

View file

@ -1109,7 +1109,7 @@ static Res traceScanSegRes(TraceSet ts, Rank rank, Arena arena, Seg seg)
/* Expose the segment to make sure we can scan it. */
ShieldExpose(arena, seg);
res = PoolScan(&wasTotal, ss, SegPool(seg), seg);
res = SegScan(&wasTotal, seg, ss);
/* Cover, regardless of result */
ShieldCover(arena, seg);

View file

@ -196,19 +196,6 @@ required to provide this method, and not doing so indicates they never
protect any memory managed by the pool. This method is called via the
generic function ``PoolAccess()``.
``typedef Res (*PoolScanMethod)(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)``
_`.method.scan`: The ``scan`` method requires that the pool scan all
the grey objects on the segment ``seg``, passing the scan state ``ss``
to ``FormatScan``. The pool may additionally accumulate a summary of
*all* the objects on the segment. If it succeeds in accumulating such
a summary it must indicate that it has done so by setting the
``*totalReturn`` parameter to ``TRUE``. Otherwise it must set
``*totalReturn`` to ``FALSE``. Pool classes are not required to
provide this method, and not doing so indicates that all instances of
this class will have no fixable or traceable references in them. This
method is called via the generic function ``PoolScan()``.
``typedef Res (*PoolFixMethod)(Pool pool, ScanState ss, Seg seg, Ref *refIO)``
_`.method.fix`: The ``fix`` method indicates that the reference

View file

@ -763,7 +763,7 @@ _`.fix.exact.grey`: The new copy must be at least as grey as the old
as it may have been grey for some other collection.
``Res AMCScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)``
``Res amcSegScan(Bool *totalReturn, Seg seg, ScanState ss1)``
_`.scan`: Searches for a group which is grey for the trace and scans
it. If there aren't any, it sets the finished flag to true.

View file

@ -257,7 +257,7 @@ have been subsequently scanned and blackened.
_`.marked.fix`: The ``marksChanged`` flag is set ``TRUE`` by
``AMSFix()`` when an object is made grey.
_`.marked.scan`: ``AMSScan()`` must blacken all grey objects on the
_`.marked.scan`: ``amsSegScan()`` must blacken all grey objects on the
segment, so it must iterate over the segment until all grey objects
have been seen. Scanning an object in the segment might grey another
one (`.marked.fix`_), so the scanner iterates until this flag is
@ -462,10 +462,10 @@ index in a segment uses macros such as ``AMS_INDEX`` and
every translation -- we could cache that.
_`.grey-mutator`: To enforce the restriction set in `.not-req.grey`_
we check that all the traces are flipped in ``AMSScan()``. It would be
good to check in ``AMSFix()`` as well, but we can't do that, because
it's called during the flip, and we can't tell the difference between
the flip and the grey mutator phases with the current tracer
we check that all the traces are flipped in ``amsSegScan()``. It would
be good to check in ``AMSFix()`` as well, but we can't do that,
because it's called during the flip, and we can't tell the difference
between the flip and the grey mutator phases with the current tracer
interface.

View file

@ -287,12 +287,99 @@ allocation minus the ``lastRembemberedSize`` minus 10 MiB, so the pool
becomes an increasingly good candidate for collection at a constant
(mutator allocation) rate, crossing the 0 line when there has been
10 MiB of allocation since the (beginning of the) last collection. So
it gets collected approximately every 10 MiB of allocation. Note that
it will also get collected by virtue of being in the same zone as some
AMC generation (assuming there are instantiated AMC pools), see
`.poolstruct.gen`_ above.
it gets collected approximately every 10 MiB of allocation.
``Res AWLScan(ScanState ss, Pool pool, Seg seg)``
``Res AWLFix(Pool pool, ScanState ss, Seg seg, Ref *refIO)``
_`.fun.fix`: ``ss->wasMarked`` is set to ``TRUE`` (clear compliance
with design.mps.fix.protocol.was-marked.conservative_).
.. _design.mps.fix.protocol.was-marked.conservative: fix#protocol-was-marked-conservative
If the rank (``ss->rank``) is ``RankAMBIG`` then fix returns
immediately unless the reference is aligned to the pool alignment.
If the rank (``ss->rank``) is ``RankAMBIG`` then fix returns
immediately unless the referenced grain is allocated.
The bit in the marked table corresponding to the referenced grain will
be read. If it is already marked then fix returns. Otherwise (the
grain is unmarked), ``ss->wasMarked`` is set to ``FALSE``, the
remaining actions depend on whether the rank (``ss->rank``) is
``RankWEAK`` or not. If the rank is weak then the reference is
adjusted to 0 (see design.mps.weakness) and fix returns. If the rank
is something else then the mark bit corresponding to the referenced
grain is set, and the segment is greyed using ``TraceSegGreyen()``.
Fix returns.
``Res AWLDescribe(Pool pool, mps_lib_FILE *stream, Count depth)``
_`.fun.describe`:
Internal
........
``Res AWLSegCreate(AWLSeg *awlsegReturn, Size size)``
_`.fun.awlsegcreate`: Creates a segment of class ``AWLSegClass`` of size at least ``size``.
_`.fun.awlsegcreate.size.round`: ``size`` is rounded up to the arena
grain size before requesting the segment.
_`.fun.awlsegcreate.size.round.justify`: The arena requires that all
segment sizes are rounded up to the arena grain size.
_`.fun.awlsegcreate.where`: The segment is allocated using a
generation preference, using the generation number stored in the
``AWLStruct`` (the ``gen`` field), see `.poolstruct.gen`_ above.
``Res awlSegInit(Seg seg, Pool pool, Addr base, Size size, ArgList args)``
_`.fun.awlseginit`: Init method for ``AWLSegClass``, called for
``SegAlloc()`` whenever an ``AWLSeg`` is created (see
`.fun.awlsegcreate`_ above).
_`.fun.awlseginit.tables`: The segment's mark scanned and alloc tables
(see `.awlseg.bt`_ above) are allocated and initialised. The segment's
grains field is computed and stored.
``void awlSegFinish(Seg seg)``
_`.fun.awlsegfinish`: Finish method for ``AWLSegClass``, called from
``SegFree()``. Will free the segment's tables (see `.awlseg.bt`_).
``Bool AWLSegAlloc(Addr *baseReturn, Addr *limitReturn, AWLSeg awlseg, AWL awl, Size size)``
_`.fun.awlsegalloc`: Will search for a free block in the segment that
is at least size bytes long. The base address of the block is returned
in ``*baseReturn``, the limit of the entire free block (which must be
at least as large size and may be bigger) is returned in
``*limitReturn``. The requested size is converted to a number of
grains, ``BTFindResRange()`` is called to find a run of this length in
the alloc bit-table (`.awlseg.alloc`_). The return results (if it is
successful) from ``BTFindResRange()`` are in terms of grains, they are
converted back to addresses before returning the relevant values from
this function.
``Res awlSegWhiten(Seg seg, Trace trace)``
_`.fun.whiten`: The current design only permits each segment to be
condemned for one trace (see `.awlseg.mark`_). This function checks
that the segment is not white for any trace (``seg->white ==
TraceSetEMPTY``). The segment's mark bit-table is reset, and the
whiteness of the seg (``seg->white``) has the current trace added to
it.
``void awlSegGrey(Seg seg, Trace trace)``
_`.fun.grey`: If the segment is not white for this trace, the
segment's mark table is set to all 1s and the segment is recorded as
being grey.
``Res awlSegScan(ScanState ss, Pool pool, Seg seg)``
_`.fun.scan`:
@ -391,121 +478,12 @@ _`.fun.scan.pass.more`: At the end of a pass the finished flag is
examined.
_`.fun.scan.pass.more.not`: If the finished flag is set then we are
done (see `.fun.scan.overview.finished-flag`_ above), ``AWLScan()``
done (see `.fun.scan.overview.finished-flag`_ above), ``awlSegScan()``
returns.
_`.fun.scan.pass.more.so`: Otherwise (the finished flag is reset) we
perform another pass (see `.fun.scan.pass`_ above).
``Res AWLFix(Pool pool, ScanState ss, Seg seg, Ref *refIO)``
_`.fun.fix`: ``ss->wasMarked`` is set to ``TRUE`` (clear compliance
with design.mps.fix.protocol.was-marked.conservative_).
.. _design.mps.fix.protocol.was-marked.conservative: fix#protocol-was-marked-conservative
If the rank (``ss->rank``) is ``RankAMBIG`` then fix returns
immediately unless the reference is aligned to the pool alignment.
If the rank (``ss->rank``) is ``RankAMBIG`` then fix returns
immediately unless the referenced grain is allocated.
The bit in the marked table corresponding to the referenced grain will
be read. If it is already marked then fix returns. Otherwise (the
grain is unmarked), ``ss->wasMarked`` is set to ``FALSE``, the
remaining actions depend on whether the rank (``ss->rank``) is
``RankWEAK`` or not. If the rank is weak then the reference is
adjusted to 0 (see design.mps.weakness) and fix returns. If the rank
is something else then the mark bit corresponding to the referenced
grain is set, and the segment is greyed using ``TraceSegGreyen()``.
Fix returns.
``Res AWLDescribe(Pool pool, mps_lib_FILE *stream, Count depth)``
_`.fun.describe`:
Internal
........
``Res AWLSegCreate(AWLSeg *awlsegReturn, Size size)``
_`.fun.awlsegcreate`: Creates a segment of class ``AWLSegClass`` of size at least ``size``.
_`.fun.awlsegcreate.size.round`: ``size`` is rounded up to the arena
grain size before requesting the segment.
_`.fun.awlsegcreate.size.round.justify`: The arena requires that all
segment sizes are rounded up to the arena grain size.
_`.fun.awlsegcreate.where`: The segment is allocated using a
generation preference, using the generation number stored in the
``AWLStruct`` (the ``gen`` field), see `.poolstruct.gen`_ above.
``Res awlSegInit(Seg seg, Pool pool, Addr base, Size size, ArgList args)``
_`.fun.awlseginit`: Init method for ``AWLSegClass``, called for
``SegAlloc()`` whenever an ``AWLSeg`` is created (see
`.fun.awlsegcreate`_ above).
_`.fun.awlseginit.tables`: The segment's mark scanned and alloc tables
(see `.awlseg.bt`_ above) are allocated and initialised. The segment's
grains field is computed and stored.
``void awlSegFinish(Seg seg)``
_`.fun.awlsegfinish`: Finish method for ``AWLSegClass``, called from
``SegFree()``. Will free the segment's tables (see `.awlseg.bt`_).
``Bool AWLSegAlloc(Addr *baseReturn, Addr *limitReturn, AWLSeg awlseg, AWL awl, Size size)``
_`.fun.awlsegalloc`: Will search for a free block in the segment that
is at least size bytes long. The base address of the block is returned
in ``*baseReturn``, the limit of the entire free block (which must be
at least as large size and may be bigger) is returned in
``*limitReturn``. The requested size is converted to a number of
grains, ``BTFindResRange()`` is called to find a run of this length in
the alloc bit-table (`.awlseg.alloc`_). The return results (if it is
successful) from ``BTFindResRange()`` are in terms of grains, they are
converted back to addresses before returning the relevant values from
this function.
``Res awlSegWhiten(Seg seg, Trace trace)``
_`.fun.whiten`: The current design only permits each segment to be
condemned for one trace (see `.awlseg.mark`_). This function checks
that the segment is not white for any trace (``seg->white ==
TraceSetEMPTY``). The segment's mark bit-table is reset, and the
whiteness of the seg (``seg->white``) has the current trace added to
it.
``void awlSegGrey(Seg seg, Trace trace)``
_`.fun.grey`: If the segment is not white for this trace, the
segment's mark table is set to all 1s and the segment is recorded as
being grey.
``Bool AWLDependentObject(Addr *objReturn, Addr parent)``
_`.fun.dependent-object`: This function abstracts the association
between an object and its linked dependent (see `.req.obj-format`_).
It currently assumes that objects are Dylan Object formatted according
to design.dylan.container (see analysis.mps.poolawl.dependent.abstract
for suggested improvements). An object has a dependent object iff the
second word of the object, that is, ``((Word *)parent)[1]``, is
non-``NULL``. The dependent object is the object referenced by the
second word and must be a valid object.
This function assumes objects are in Dylan Object Format (see
design.dylan.container). It will check that the first word looks like
a Dylan wrapper pointer. It will check that the wrapper indicates that
the wrapper has a reasonable format (namely at least one fixed field).
If the second word is ``NULL`` it will return ``FALSE``. If the second
word is non-``NULL`` then the contents of it will be assigned to
``*objReturn``, and it will return ``TRUE``.
``void awlSegReclaim(Seg seg, Trace trace)``
_`.fun.reclaim`: This iterates over all allocated objects in the
@ -528,6 +506,25 @@ objects. Now reclaim doesn't need to check that the objects are
allocated before skipping them. There may be a corresponding change
for scan as well.
``Bool AWLDependentObject(Addr *objReturn, Addr parent)``
_`.fun.dependent-object`: This function abstracts the association
between an object and its linked dependent (see `.req.obj-format`_).
It currently assumes that objects are Dylan Object formatted according
to design.dylan.container (see analysis.mps.poolawl.dependent.abstract
for suggested improvements). An object has a dependent object iff the
second word of the object, that is, ``((Word *)parent)[1]``, is
non-``NULL``. The dependent object is the object referenced by the
second word and must be a valid object.
This function assumes objects are in Dylan Object Format (see
design.dylan.container). It will check that the first word looks like
a Dylan wrapper pointer. It will check that the wrapper indicates that
the wrapper has a reasonable format (namely at least one fixed field).
If the second word is ``NULL`` it will return ``FALSE``. If the second
word is non-``NULL`` then the contents of it will be assigned to
``*objReturn``, and it will return ``TRUE``.
Test
----

View file

@ -432,9 +432,9 @@ to grow very quickly.
_`.finish`: Iterate over all the segments, returning all the segments
to the arena.
``Res MRGScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)``
``Res mrgRefSegScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg)``
_`.scan`: ``MRGScan()`` scans a segment.
_`.scan`: ``mrgRefSegScan()`` scans a segment of guardians.
_`.scan.trivial`: Scan will do nothing (that is, return immediately)
if the tracing rank is anything other than final.
@ -451,10 +451,10 @@ scanning is detrimental, it will only delay finalization. If the rank
is higher than final there is nothing to do, the pool only contains
final references.
_`.scan.guardians`: ``MRGScan()`` will iterate over all guardians in
the segment. Every guardian's reference will be fixed (_`.scan.free`:
note that guardians that are on the free list have ``NULL`` in their
reference part).
_`.scan.guardians`: ``mrgRefSegScan()`` will iterate over all
guardians in the segment. Every guardian's reference will be fixed
(_`.scan.free`: note that guardians that are on the free list have
``NULL`` in their reference part).
_`.scan.wasold`: If the object referred to had not been fixed
previously (that is, was unmarked) then the object is not referenced
@ -508,7 +508,7 @@ arena. A suggested strategy for this is as follows:
- Add a free segment ring to the pool.
- In ``MRGRefSegScan()``, if the segment is entirely free, don't scan
- In ``mrgRefSegScan()``, if the segment is entirely free, don't scan
it, but instead detach its links from the free ring, and move the
segment to the free segment ring.

View file

@ -270,6 +270,19 @@ method, and not doing so indicates that all instances of this class
will have no fixable or traceable references in them. This method is
called via the generic function ``SegBlacken()``.
``typedef Res (*SegScanMethod)(Bool *totalReturn, Seg seg, ScanState ss)``
_`.method.scan`: The ``scan`` method scans all the grey objects on the
segment ``seg``, passing the scan state ``ss`` to ``FormatScan``. The
segment may additionally accumulate a summary of *all* its objects. If
it succeeds in accumulating such a summary it must indicate that it
has done so by setting the ``*totalReturn`` parameter to ``TRUE``.
Otherwise it must set ``*totalReturn`` to ``FALSE``. Segment classes
are not required to provide this method, and not doing so indicates
that all instances of this class will have no fixable or traceable
references in them. This method is called via the generic function
``SegScan()``.
``typedef void (*SegReclaimMethod)(Seg seg, Trace trace)``
_`.method.reclaim`: The ``reclaim`` method indicates that any

View file

@ -93,8 +93,9 @@ Args Locals Function
3 5 ``TraceSegAccess()``
4 1 ``traceScanSeg()``
4 8 ``traceScanSegRes()``
4 0 ``PoolScan()``
4 5 ``AMCScan()``
4 0 ``SegScan()``
4 5 ``amcSegScan()``
4 0 ``FormatScan()``
3 ≤64 ``format->scan()``
4 15 ``AMCFix()``
4 5 ``BufferFill()``
@ -111,7 +112,7 @@ Args Locals Function
3 7 ``SplaySplay()``
4 8 ``SplaySplitDown()``
3 0 ``SplayZig()``
109 ≤190 **Total**
113 ≤190 **Total**
==== ====== ========================
We expect that a compiler will often be able to share stack space