1
Fork 0
mirror of git://git.sv.gnu.org/emacs.git synced 2026-01-07 04:10:27 -08:00

Replacing segfirst/segnext loops with tree traversals in condemn and reclaim.

Copied from Perforce
 Change: 190080
 ServerID: perforce.ravenbrook.com
This commit is contained in:
Richard Brooksby 2016-02-28 09:44:26 +00:00
parent db8c72a427
commit d72b8a6539
11 changed files with 162 additions and 82 deletions

View file

@ -223,7 +223,7 @@ extern Res (PoolFix)(Pool pool, ScanState ss, Seg seg, Addr *refIO);
#define PoolFix(pool, ss, seg, refIO) \
((*(pool)->fix)(pool, ss, seg, refIO))
extern Res PoolFixEmergency(Pool pool, ScanState ss, Seg seg, Addr *refIO);
extern void PoolReclaim(Pool pool, Trace trace, Seg seg);
extern Bool PoolReclaim(Pool pool, Trace trace, Seg seg);
extern void PoolTraceEnd(Pool pool, Trace trace);
extern Res PoolAddrObject(Addr *pReturn, Pool pool, Seg seg, Addr addr);
extern void PoolWalk(Pool pool, Seg seg, FormattedObjectsVisitor f,
@ -267,7 +267,7 @@ extern void PoolNoBlacken(Pool pool, TraceSet traceSet, Seg seg);
extern void PoolTrivBlacken(Pool pool, TraceSet traceSet, Seg seg);
extern Res PoolNoScan(Bool *totalReturn, ScanState ss, Pool pool, Seg seg);
extern Res PoolNoFix(Pool pool, ScanState ss, Seg seg, Ref *refIO);
extern void PoolNoReclaim(Pool pool, Trace trace, Seg seg);
extern Bool PoolNoReclaim(Pool pool, Trace trace, Seg seg);
extern void PoolTrivTraceEnd(Pool pool, Trace trace);
extern void PoolNoRampBegin(Pool pool, Buffer buf, Bool collectAll);
extern void PoolTrivRampBegin(Pool pool, Buffer buf, Bool collectAll);
@ -695,6 +695,9 @@ extern Res SegAlloc(Seg *segReturn, SegClass class, LocusPref pref,
ArgList args);
extern void SegFree(Seg seg);
extern Bool SegOfAddr(Seg *segReturn, Arena arena, Addr addr);
typedef Bool (*SegVisitor)(Seg seg, void *closure);
extern Bool SegTraverse(Arena arena, SegVisitor visit, void *closure);
extern void SegTraverseAndDelete(Arena arena, SegVisitor visit, void *closure);
extern Bool SegFirst(Seg *segReturn, Arena arena);
extern Bool SegNext(Seg *segReturn, Arena arena, Seg seg);
extern Bool SegNextOfRing(Seg *segReturn, Arena arena, Pool pool, Ring next);

View file

@ -218,7 +218,7 @@ typedef Res (*PoolFixMethod)(Pool pool, ScanState ss, Seg seg,
Ref *refIO);
typedef Res (*PoolFixEmergencyMethod)(Pool pool, ScanState ss,
Seg seg, Ref *refIO);
typedef void (*PoolReclaimMethod)(Pool pool, Trace trace, Seg seg);
typedef Bool (*PoolReclaimMethod)(Pool pool, Trace trace, Seg seg);
typedef void (*PoolTraceEndMethod)(Pool pool, Trace trace);
typedef void (*PoolRampBeginMethod)(Pool pool, Buffer buf, Bool collectAll);
typedef void (*PoolRampEndMethod)(Pool pool, Buffer buf);

View file

@ -433,7 +433,7 @@ Res PoolFixEmergency(Pool pool, ScanState ss, Seg seg, Addr *refIO)
/* PoolReclaim -- reclaim a segment in the pool */
void PoolReclaim(Pool pool, Trace trace, Seg seg)
Bool PoolReclaim(Pool pool, Trace trace, Seg seg)
{
AVERT_CRITICAL(Pool, pool);
AVERT_CRITICAL(Trace, trace);
@ -446,7 +446,7 @@ void PoolReclaim(Pool pool, Trace trace, Seg seg)
/* Should only be reclaiming segments which are still white. */
AVER_CRITICAL(TraceSetIsMember(SegWhite(seg), trace));
(*pool->class->reclaim)(pool, trace, seg);
return (*pool->class->reclaim)(pool, trace, seg);
}

View file

@ -529,12 +529,13 @@ Res PoolNoFix(Pool pool, ScanState ss, Seg seg, Ref *refIO)
return ResUNIMPL;
}
void PoolNoReclaim(Pool pool, Trace trace, Seg seg)
Bool PoolNoReclaim(Pool pool, Trace trace, Seg seg)
{
AVERT(Pool, pool);
AVERT(Trace, trace);
AVERT(Seg, seg);
NOTREACHED;
return FALSE;
}
void PoolTrivTraceEnd(Pool pool, Trace trace)

View file

@ -1691,7 +1691,7 @@ returnRes:
/* amcReclaimNailed -- reclaim what you can from a nailed segment */
static void amcReclaimNailed(Pool pool, Trace trace, Seg seg)
static Bool amcReclaimNailed(Pool pool, Trace trace, Seg seg)
{
Addr p, limit;
Arena arena;
@ -1791,7 +1791,10 @@ static void amcReclaimNailed(Pool pool, Trace trace, Seg seg)
AVER(SegBuffer(seg) == NULL);
PoolGenFree(&gen->pgen, seg, 0, SegSize(seg), 0, Seg2amcSeg(seg)->deferred);
return TRUE;
}
return FALSE;
}
@ -1799,7 +1802,7 @@ static void amcReclaimNailed(Pool pool, Trace trace, Seg seg)
*
* See <design/poolamc/#reclaim>.
*/
static void AMCReclaim(Pool pool, Trace trace, Seg seg)
static Bool AMCReclaim(Pool pool, Trace trace, Seg seg)
{
AMC amc;
amcGen gen;
@ -1827,8 +1830,7 @@ static void AMCReclaim(Pool pool, Trace trace, Seg seg)
}
if(SegNailed(seg) != TraceSetEMPTY) {
amcReclaimNailed(pool, trace, seg);
return;
return amcReclaimNailed(pool, trace, seg);
}
/* We may not free a buffered seg. (But all buffered + condemned */
@ -1838,6 +1840,8 @@ static void AMCReclaim(Pool pool, Trace trace, Seg seg)
trace->reclaimSize += SegSize(seg);
PoolGenFree(&gen->pgen, seg, 0, SegSize(seg), 0, Seg2amcSeg(seg)->deferred);
return TRUE;
}

View file

@ -1580,7 +1580,7 @@ static void AMSBlacken(Pool pool, TraceSet traceSet, Seg seg)
/* AMSReclaim -- the pool class reclamation method */
static void AMSReclaim(Pool pool, Trace trace, Seg seg)
static Bool AMSReclaim(Pool pool, Trace trace, Seg seg)
{
AMS ams;
AMSSeg amsseg;
@ -1642,13 +1642,17 @@ static void AMSReclaim(Pool pool, Trace trace, Seg seg)
amsseg->colourTablesInUse = FALSE;
SegSetWhite(seg, TraceSetDel(SegWhite(seg), trace));
if (amsseg->freeGrains == grains && SegBuffer(seg) == NULL)
if (amsseg->freeGrains == grains && SegBuffer(seg) == NULL) {
/* No survivors */
PoolGenFree(&ams->pgen, seg,
AMSGrainsSize(ams, amsseg->freeGrains),
AMSGrainsSize(ams, amsseg->oldGrains),
AMSGrainsSize(ams, amsseg->newGrains),
FALSE);
return TRUE;
}
return FALSE;
}

View file

@ -1104,7 +1104,7 @@ static Res AWLFix(Pool pool, ScanState ss, Seg seg, Ref *refIO)
/* AWLReclaim -- reclaim dead objects in an AWL segment */
static void AWLReclaim(Pool pool, Trace trace, Seg seg)
static Bool AWLReclaim(Pool pool, Trace trace, Seg seg)
{
Addr base;
AWL awl;
@ -1179,13 +1179,17 @@ static void AWLReclaim(Pool pool, Trace trace, Seg seg)
trace->preservedInPlaceSize += preservedInPlaceSize;
SegSetWhite(seg, TraceSetDel(SegWhite(seg), trace));
if (awlseg->freeGrains == awlseg->grains && buffer == NULL)
if (awlseg->freeGrains == awlseg->grains && buffer == NULL) {
/* No survivors */
PoolGenFree(&awl->pgen, seg,
AWLGrainsSize(awl, awlseg->freeGrains),
AWLGrainsSize(awl, awlseg->oldGrains),
AWLGrainsSize(awl, awlseg->newGrains),
FALSE);
return TRUE;
}
return FALSE;
}

View file

@ -310,7 +310,7 @@ static Res loSegCreate(LOSeg *loSegReturn, Pool pool, Size size,
* Could consider implementing this using Walk.
*/
static void loSegReclaim(LOSeg loseg, Trace trace)
static Bool loSegReclaim(LOSeg loseg, Trace trace)
{
Addr p, base, limit;
Bool marked;
@ -390,12 +390,16 @@ static void loSegReclaim(LOSeg loseg, Trace trace)
SegSetWhite(seg, TraceSetDel(SegWhite(seg), trace));
if (!marked)
if (!marked) {
PoolGenFree(&lo->pgen, seg,
LOGrainsSize(lo, loseg->freeGrains),
LOGrainsSize(lo, loseg->oldGrains),
LOGrainsSize(lo, loseg->newGrains),
FALSE);
return TRUE;
}
return FALSE;
}
/* This walks over _all_ objects in the heap, whether they are */
@ -777,7 +781,7 @@ static Res LOFix(Pool pool, ScanState ss, Seg seg, Ref *refIO)
}
static void LOReclaim(Pool pool, Trace trace, Seg seg)
static Bool LOReclaim(Pool pool, Trace trace, Seg seg)
{
LO lo;
LOSeg loseg;
@ -791,7 +795,7 @@ static void LOReclaim(Pool pool, Trace trace, Seg seg)
AVER(TraceSetIsMember(SegWhite(seg), trace));
loseg = SegLOSeg(seg);
loSegReclaim(loseg, trace);
return loSegReclaim(loseg, trace);
}

View file

@ -465,6 +465,41 @@ Bool SegOfAddr(Seg *segReturn, Arena arena, Addr addr)
}
/* SegTraverse -- visit all segments in the arena in address order */
typedef struct SegTraverseClosureStruct {
SegVisitor visit;
void *closure;
} SegTraverseClosureStruct, *SegTraverseClosure;
static Bool segTraverseVisit(Tree tree, void *closure)
{
SegTraverseClosure stv = closure;
return stv->visit(segOfTree(tree), stv->closure);
}
Bool SegTraverse(Arena arena, SegVisitor visit, void *closure)
{
SegTraverseClosureStruct stvStruct;
stvStruct.visit = visit;
stvStruct.closure = closure;
return TreeTraverse(SplayTreeRoot(ArenaSegSplay(arena)),
SegCompare, SegKey,
segTraverseVisit,
&stvStruct);
}
void SegTraverseAndDelete(Arena arena, SegVisitor visit, void *closure)
{
SegTraverseClosureStruct stvStruct;
stvStruct.visit = visit;
stvStruct.closure = closure;
TreeTraverseAndDelete(&SplayTreeRoot(ArenaSegSplay(arena)),
segTraverseVisit,
&stvStruct);
}
/* SegFirst -- return the first seg in the arena
*
* This is used to start an iteration over all segs in the arena.

View file

@ -33,7 +33,7 @@ typedef struct SplayTreeStruct {
Tree root;
} SplayTreeStruct;
#define SplayTreeRoot(splay) RVALUE((splay)->root)
#define SplayTreeRoot(splay) ((splay)->root)
#define SplayTreeIsEmpty(splay) (SplayTreeRoot(splay) == TreeEMPTY)
extern Bool SplayTreeCheck(SplayTree splay);

View file

@ -415,41 +415,66 @@ failDefine:
* because some pools still use TraceAddWhite for the condemned set.
*
* @@@@ This function would be more efficient if there were a cheaper
* way to select the segments in a particular zone set. */
* way to select the segments in a particular zone set.
*/
typedef struct TraceCondemnZonesClosureStruct {
Trace trace;
ZoneSet condemnedSet;
Res res;
Bool haveWhiteSegs;
} TraceCondemnZonesClosureStruct, *TraceCondemnZonesClosure;
static Bool traceCondemnZonesVisit(Seg seg, void *closure)
{
TraceCondemnZonesClosure tcz = closure;
Trace trace = tcz->trace;
Arena arena = tcz->trace->arena;
ZoneSet condemnedSet = tcz->condemnedSet;
Bool haveWhiteSegs = FALSE;
/* Segment should be black now. */
AVER(!TraceSetIsMember(SegGrey(seg), trace));
AVER(!TraceSetIsMember(SegWhite(seg), trace));
/* A segment can only be white if it is GC-able. */
/* This is indicated by the pool having the GC attribute */
/* We only condemn segments that fall entirely within */
/* the requested zone set. Otherwise, we would bloat the */
/* foundation to no gain. Note that this doesn't exclude */
/* any segments from which the condemned set was derived, */
if(PoolHasAttr(SegPool(seg), AttrGC)
&& ZoneSetSuper(condemnedSet, ZoneSetOfSeg(arena, seg)))
{
Res res = TraceAddWhite(trace, seg);
if(res != ResOK) {
tcz->res = res;
return FALSE;
}
haveWhiteSegs = TRUE;
}
tcz->haveWhiteSegs = haveWhiteSegs;
return TRUE;
}
Res TraceCondemnZones(Trace trace, ZoneSet condemnedSet)
{
Seg seg;
Arena arena;
Res res;
TraceCondemnZonesClosureStruct tczStruct;
AVERT(Trace, trace);
AVER(condemnedSet != ZoneSetEMPTY);
AVER(trace->state == TraceINIT);
AVER(trace->white == ZoneSetEMPTY);
arena = trace->arena;
if(SegFirst(&seg, arena)) {
do {
/* Segment should be black now. */
AVER(!TraceSetIsMember(SegGrey(seg), trace));
AVER(!TraceSetIsMember(SegWhite(seg), trace));
/* A segment can only be white if it is GC-able. */
/* This is indicated by the pool having the GC attribute */
/* We only condemn segments that fall entirely within */
/* the requested zone set. Otherwise, we would bloat the */
/* foundation to no gain. Note that this doesn't exclude */
/* any segments from which the condemned set was derived, */
if(PoolHasAttr(SegPool(seg), AttrGC)
&& ZoneSetSuper(condemnedSet, ZoneSetOfSeg(arena, seg)))
{
res = TraceAddWhite(trace, seg);
if(res != ResOK)
goto failBegin;
}
} while (SegNext(&seg, arena, seg));
tczStruct.trace = trace;
tczStruct.condemnedSet = condemnedSet;
tczStruct.haveWhiteSegs = FALSE;
tczStruct.res = ResOK;
if (!SegTraverse(trace->arena, traceCondemnZonesVisit, &tczStruct)) {
AVER(tczStruct.res != ResOK);
AVER(TraceIsEmpty(trace)); /* See .whiten.fail. */
return tczStruct.res;
}
EVENT3(TraceCondemnZones, trace, condemnedSet, trace->white);
@ -458,10 +483,6 @@ Res TraceCondemnZones(Trace trace, ZoneSet condemnedSet)
AVER(ZoneSetSuper(condemnedSet, trace->white));
return ResOK;
failBegin:
AVER(TraceIsEmpty(trace)); /* See .whiten.fail. */
return res;
}
@ -890,53 +911,57 @@ void TraceDestroyFinished(Trace trace)
/* traceReclaim -- reclaim the remaining objects white for this trace */
static Bool traceReclaimVisit(Seg seg, void *closure)
{
Trace trace = closure;
Pool pool;
AVERT_CRITICAL(Trace, trace);
AVERT_CRITICAL(Seg, seg);
/* There shouldn't be any grey stuff left for this trace. */
AVER_CRITICAL(!TraceSetIsMember(SegGrey(seg), trace));
if (!TraceSetIsMember(SegWhite(seg), trace))
return FALSE;
pool = SegPool(seg);
AVER_CRITICAL(PoolHasAttr(pool, AttrGC));
STATISTIC(++trace->reclaimCount);
if (PoolReclaim(pool, trace, seg))
return TRUE;
/* If the segment still exists, it should no longer be white. */
/* TODO: The code from the class-specific reclaim methods to
unwhiten the segment could in fact be moved here. */
AVERT_CRITICAL(Seg, seg);
AVER_CRITICAL(!TraceSetIsMember(SegWhite(seg), trace));
return FALSE;
}
static void traceReclaim(Trace trace)
{
Arena arena;
Seg seg;
Ring node, nextNode;
AVERT(Trace, trace);
AVER(trace->state == TraceRECLAIM);
EVENT1(TraceReclaim, trace);
arena = trace->arena;
if(SegFirst(&seg, arena)) {
Pool pool;
Ring next;
do {
Addr base = SegBase(seg);
pool = SegPool(seg);
next = RingNext(SegPoolRing(seg));
/* There shouldn't be any grey stuff left for this trace. */
AVER_CRITICAL(!TraceSetIsMember(SegGrey(seg), trace));
EVENT1(TraceReclaim, trace);
if(TraceSetIsMember(SegWhite(seg), trace)) {
AVER_CRITICAL(PoolHasAttr(pool, AttrGC));
STATISTIC(++trace->reclaimCount);
PoolReclaim(pool, trace, seg);
/* If the segment still exists, it should no longer be white. */
/* Note that the seg returned by this SegOfAddr may not be */
/* the same as the one above, but in that case it's new and */
/* still shouldn't be white for this trace. */
/* The code from the class-specific reclaim methods to */
/* unwhiten the segment could in fact be moved here. */
{
Seg nonWhiteSeg = NULL; /* prevents compiler warning */
AVER_CRITICAL(!(SegOfAddr(&nonWhiteSeg, arena, base)
&& TraceSetIsMember(SegWhite(nonWhiteSeg), trace)));
UNUSED(nonWhiteSeg); /* <code/mpm.c#check.unused> */
}
}
} while(SegNextOfRing(&seg, arena, pool, next));
}
/* TODO: This isn't very nice, as it rebalances the segment splay
tree and destroys any optimisation discovered by splaying. */
SegTraverseAndDelete(arena, traceReclaimVisit, trace);
trace->state = TraceFINISHED;
arena = trace->arena;
/* Call each pool's TraceEnd method -- do end-of-trace work */
RING_FOR(node, &ArenaGlobals(arena)->poolRing, nextNode) {
RING_FOR(node, ArenaPoolRing(arena), nextNode) {
Pool pool = RING_ELT(Pool, arenaRing, node);
PoolTraceEnd(pool, trace);
}