gc: use proportional instead of fixed threshold

The problem with a fixed count value is that it result in varying
degrees of performance and memory impact proportions depending on
the usage pattern of each script.

E.g. if a script keeps using the same 1M objects, then a threshold of
10K objects will result in GC cycles which free only 1% of objects,
which is hugely wasteful in terms of performance. On the other hand,
if a script only uses 100 objects then a threshold of 10K means it
uses 100 times more memory than it actually needs before GC triggers.

Now the threshold is a target memory usage factor (of the minimum
needed memory) which GC tries to aim at. This makes the GC impact
have constant proportions.

The default aims at a memory usage factor of 5, i.e. 80% garbage
and 20% remaining on each GC cycle.

The factor is only a target/goal because the actual overhead is not
known until GC completes. However, most scripts exhibit consistent
enough behavior such that the real overhead is within 10% or less of
the goal even when the usage pattern changes over time.

Within the v8 bench test suite, the actual GC threshold count varies
between ~50K to ~500K, where only one test (raytrace.js) stabilizes on
less than 10K (the previous fixed default) - at about 9K and its score
decreases by ~5%.

The splay.js score increases about x12 fold (or x50 fold from the
previous commit which counts properties), other tests quite a bit
less or none at all, and the overall score increases by nearly 40%.

Also, change the count type from int to unsigned int to get twice the
range. Preferably we should make it even bigger, maybe uint64_t.

For instance the splay.js v8 bench test surpasses million non-garbage
allocations within few seconds (that's why its score increased so much
with a proportional overhead), and the default GC threshold is 5 times
that number.
This commit is contained in:
Avi Halachmi (:avih)
2020-03-28 16:06:17 +03:00
committed by Tor Andersson
parent c4c1524e97
commit b5eccea611
4 changed files with 24 additions and 10 deletions

17
jsgc.c
View File

@@ -130,8 +130,8 @@ void js_gc(js_State *J, int report)
js_Object *obj, *nextobj, **prevnextobj;
js_String *str, *nextstr, **prevnextstr;
js_Environment *env, *nextenv, **prevnextenv;
int nenv = 0, nfun = 0, nobj = 0, nstr = 0, nprop = 0;
int genv = 0, gfun = 0, gobj = 0, gstr = 0, gprop = 0;
unsigned int nenv = 0, nfun = 0, nobj = 0, nstr = 0, nprop = 0;
unsigned int genv = 0, gfun = 0, gobj = 0, gstr = 0, gprop = 0;
int mark;
int i;
@@ -141,8 +141,6 @@ void js_gc(js_State *J, int report)
return;
}
J->gccounter = 0;
mark = J->gcmark = J->gcmark == 1 ? 2 : 1;
/* Add initial roots. */
@@ -238,10 +236,17 @@ void js_gc(js_State *J, int report)
++nstr;
}
unsigned int ntot = nenv + nfun + nobj + nstr + nprop;
unsigned int gtot = genv + gfun + gobj + gstr + gprop;
unsigned int remaining = ntot - gtot;
J->gccounter = remaining;
J->gcthresh = remaining * JS_GCFACTOR;
if (report) {
char buf[256];
snprintf(buf, sizeof buf, "garbage collected: %d/%d envs, %d/%d funs, %d/%d objs, %d/%d props, %d/%d strs",
genv, nenv, gfun, nfun, gobj, nobj, gprop, nprop, gstr, nstr);
snprintf(buf, sizeof buf, "garbage collected (%d%%): %d/%d envs, %d/%d funs, %d/%d objs, %d/%d props, %d/%d strs",
100*gtot/ntot, genv, nenv, gfun, nfun, gobj, nobj, gprop, nprop, gstr, nstr);
js_report(J, buf);
}
}

14
jsi.h
View File

@@ -78,8 +78,16 @@ typedef struct js_StackTrace js_StackTrace;
#ifndef JS_TRYLIMIT
#define JS_TRYLIMIT 64 /* exception stack size */
#endif
#ifndef JS_GCLIMIT
#define JS_GCLIMIT 10000 /* run gc cycle every N allocations */
#ifndef JS_GCFACTOR
/*
* GC will try to trigger when memory usage is this value times the minimum
* needed memory. E.g. if there are 100 remaining objects after GC and this
* value is 5.0, then the next GC will trigger when the overall number is 500.
* I.e. a value of 5.0 aims at 80% garbage, 20% remain-used on each GC.
* The bigger the value the less impact GC has on overall performance, but more
* memory is used and individual GC pauses are longer (but fewer).
*/
#define JS_GCFACTOR 5.0 /* memory overhead factor >= 1.0 */
#endif
#ifndef JS_ASTLIMIT
#define JS_ASTLIMIT 100 /* max nested expressions */
@@ -233,7 +241,7 @@ struct js_State
/* garbage collector list */
int gcpause;
int gcmark;
int gccounter;
unsigned int gccounter, gcthresh;
js_Environment *gcenv;
js_Function *gcfun;
js_Object *gcobj;

View File

@@ -1356,7 +1356,7 @@ static void jsR_run(js_State *J, js_Function *F)
J->strict = F->strict;
while (1) {
if (J->gccounter > JS_GCLIMIT)
if (J->gccounter > J->gcthresh)
js_gc(J, 0);
J->trace[J->tracetop].line = *pc++;

View File

@@ -285,6 +285,7 @@ js_State *js_newstate(js_Alloc alloc, void *actx, int flags)
J->gcmark = 1;
J->nextref = 0;
J->gcthresh = 0; /* reaches stability within ~ 2-5 GC cycles */
J->R = jsV_newobject(J, JS_COBJECT, NULL);
J->G = jsV_newobject(J, JS_COBJECT, NULL);