Skip to content

Commit 2b6cfaa

Browse files
Fix object_visits calculation
1 parent 2a04eb4 commit 2b6cfaa

1 file changed

Lines changed: 69 additions & 36 deletions

File tree

Python/gc.c

Lines changed: 69 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -525,12 +525,18 @@ update_refs(PyGC_Head *containers)
525525
return candidates;
526526
}
527527

528+
struct visit_decref_context {
529+
PyObject *parent;
530+
struct gc_generation_stats *stats;
531+
};
532+
528533
/* A traversal callback for subtract_refs. */
529534
static int
530-
visit_decref(PyObject *op, void *parent)
535+
visit_decref(PyObject *op, void *arg)
531536
{
532-
OBJECT_STAT_INC(object_visits);
533-
_PyObject_ASSERT(_PyObject_CAST(parent), !_PyObject_IsFreed(op));
537+
struct visit_decref_context *ctx = (struct visit_decref_context *)arg;
538+
ctx->stats->object_visits += 1;
539+
_PyObject_ASSERT(ctx->parent, !_PyObject_IsFreed(op));
534540

535541
if (_PyObject_IS_GC(op)) {
536542
PyGC_Head *gc = AS_GC(op);
@@ -577,25 +583,35 @@ _PyGC_VisitFrameStack(_PyInterpreterFrame *frame, visitproc visit, void *arg)
577583
* reachable from outside containers, and so can't be collected.
578584
*/
579585
static void
580-
subtract_refs(PyGC_Head *containers)
586+
subtract_refs(PyGC_Head *containers, struct gc_generation_stats *stats)
581587
{
582588
traverseproc traverse;
583589
PyGC_Head *gc = GC_NEXT(containers);
584590
for (; gc != containers; gc = GC_NEXT(gc)) {
585591
PyObject *op = FROM_GC(gc);
586592
traverse = Py_TYPE(op)->tp_traverse;
593+
struct visit_decref_context ctx = {
594+
.parent = op,
595+
.stats = stats
596+
};
587597
(void) traverse(op,
588598
visit_decref,
589-
op);
599+
&ctx);
590600
}
591601
}
592602

603+
struct visit_reachable_context {
604+
PyGC_Head *head;
605+
struct gc_generation_stats *stats;
606+
};
607+
593608
/* A traversal callback for move_unreachable. */
594609
static int
595610
visit_reachable(PyObject *op, void *arg)
596611
{
597-
PyGC_Head *reachable = arg;
598-
OBJECT_STAT_INC(object_visits);
612+
struct visit_reachable_context *ctx = (struct visit_reachable_context *)arg;
613+
ctx->stats->object_visits += 1;
614+
PyGC_Head *reachable = ctx->head;
599615
if (!_PyObject_IS_GC(op)) {
600616
return 0;
601617
}
@@ -667,7 +683,7 @@ visit_reachable(PyObject *op, void *arg)
667683
* So we can not gc_list_* functions for unreachable until we remove the flag.
668684
*/
669685
static void
670-
move_unreachable(PyGC_Head *young, PyGC_Head *unreachable)
686+
move_unreachable(PyGC_Head *young, PyGC_Head *unreachable, struct gc_generation_stats *stats)
671687
{
672688
// previous elem in the young list, used for restore gc_prev.
673689
PyGC_Head *prev = young;
@@ -682,6 +698,11 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable)
682698
* or to the right have been scanned yet.
683699
*/
684700

701+
struct visit_reachable_context ctx = {
702+
.head = young,
703+
.stats = stats
704+
};
705+
685706
validate_consistent_old_space(young);
686707
/* Record which old space we are in, and set NEXT_MASK_UNREACHABLE bit for convenience */
687708
uintptr_t flags = NEXT_MASK_UNREACHABLE | (gc->_gc_next & _PyGC_NEXT_MASK_OLD_SPACE_1);
@@ -703,7 +724,7 @@ move_unreachable(PyGC_Head *young, PyGC_Head *unreachable)
703724
// young->_gc_prev == gc. Don't do gc = GC_NEXT(gc) before!
704725
(void) traverse(op,
705726
visit_reachable,
706-
(void *)young);
727+
&ctx);
707728
// relink gc_prev to prev element.
708729
_PyGCHead_SET_PREV(gc, prev);
709730
// gc is not COLLECTING state after here.
@@ -831,8 +852,9 @@ clear_unreachable_mask(PyGC_Head *unreachable)
831852
static int
832853
visit_move(PyObject *op, void *arg)
833854
{
834-
PyGC_Head *tolist = arg;
835-
OBJECT_STAT_INC(object_visits);
855+
struct visit_reachable_context *ctx = (struct visit_reachable_context *)arg;
856+
PyGC_Head *tolist = ctx->head;
857+
ctx->stats->object_visits += 1;
836858
if (_PyObject_IS_GC(op)) {
837859
PyGC_Head *gc = AS_GC(op);
838860
if (gc_is_collecting(gc)) {
@@ -847,16 +869,20 @@ visit_move(PyObject *op, void *arg)
847869
* into finalizers set.
848870
*/
849871
static void
850-
move_legacy_finalizer_reachable(PyGC_Head *finalizers)
872+
move_legacy_finalizer_reachable(PyGC_Head *finalizers, struct gc_generation_stats *stats)
851873
{
874+
struct visit_reachable_context ctx = {
875+
.head = finalizers,
876+
.stats = stats
877+
};
852878
traverseproc traverse;
853879
PyGC_Head *gc = GC_NEXT(finalizers);
854880
for (; gc != finalizers; gc = GC_NEXT(gc)) {
855881
/* Note that the finalizers list may grow during this. */
856882
traverse = Py_TYPE(FROM_GC(gc))->tp_traverse;
857883
(void) traverse(FROM_GC(gc),
858884
visit_move,
859-
(void *)finalizers);
885+
&ctx);
860886
}
861887
}
862888

@@ -1244,15 +1270,15 @@ flag is cleared (for example, by using 'clear_unreachable_mask' function or
12441270
by a call to 'move_legacy_finalizers'), the 'unreachable' list is not a normal
12451271
list and we can not use most gc_list_* functions for it. */
12461272
static inline Py_ssize_t
1247-
deduce_unreachable(PyGC_Head *base, PyGC_Head *unreachable) {
1273+
deduce_unreachable(PyGC_Head *base, PyGC_Head *unreachable, struct gc_generation_stats *stats) {
12481274
validate_list(base, collecting_clear_unreachable_clear);
12491275
/* Using ob_refcnt and gc_refs, calculate which objects in the
12501276
* container set are reachable from outside the set (i.e., have a
12511277
* refcount greater than 0 when all the references within the
12521278
* set are taken into account).
12531279
*/
12541280
Py_ssize_t candidates = update_refs(base); // gc_prev is used for gc_refs
1255-
subtract_refs(base);
1281+
subtract_refs(base, stats);
12561282

12571283
/* Leave everything reachable from outside base in base, and move
12581284
* everything else (in base) to unreachable.
@@ -1289,7 +1315,7 @@ deduce_unreachable(PyGC_Head *base, PyGC_Head *unreachable) {
12891315
* the reachable objects instead. But this is a one-time cost, probably not
12901316
* worth complicating the code to speed just a little.
12911317
*/
1292-
move_unreachable(base, unreachable); // gc_prev is pointer again
1318+
move_unreachable(base, unreachable, stats); // gc_prev is pointer again
12931319
validate_list(base, collecting_clear_unreachable_clear);
12941320
validate_list(unreachable, collecting_set_unreachable_set);
12951321
return candidates;
@@ -1310,7 +1336,8 @@ PREV_MARK_COLLECTING set, but the objects in this set are going to be removed so
13101336
we can skip the expense of clearing the flag to avoid extra iteration. */
13111337
static inline void
13121338
handle_resurrected_objects(PyGC_Head *unreachable, PyGC_Head* still_unreachable,
1313-
PyGC_Head *old_generation)
1339+
PyGC_Head *old_generation,
1340+
struct gc_generation_stats *stats)
13141341
{
13151342
// Remove the PREV_MASK_COLLECTING from unreachable
13161343
// to prepare it for a new call to 'deduce_unreachable'
@@ -1320,7 +1347,7 @@ handle_resurrected_objects(PyGC_Head *unreachable, PyGC_Head* still_unreachable,
13201347
// have the PREV_MARK_COLLECTING set, but the objects are going to be
13211348
// removed so we can skip the expense of clearing the flag.
13221349
PyGC_Head* resurrected = unreachable;
1323-
deduce_unreachable(resurrected, still_unreachable);
1350+
deduce_unreachable(resurrected, still_unreachable, stats);
13241351
clear_unreachable_mask(still_unreachable);
13251352

13261353
// Move the resurrected objects to the old generation for future collection.
@@ -1432,14 +1459,15 @@ struct container_and_flag {
14321459
PyGC_Head *container;
14331460
int visited_space;
14341461
intptr_t size;
1462+
struct gc_generation_stats *stats;
14351463
};
14361464

14371465
/* A traversal callback for adding to container) */
14381466
static int
14391467
visit_add_to_container(PyObject *op, void *arg)
14401468
{
1441-
OBJECT_STAT_INC(object_visits);
14421469
struct container_and_flag *cf = (struct container_and_flag *)arg;
1470+
cf->stats->object_visits += 1;
14431471
int visited = cf->visited_space;
14441472
assert(visited == get_gc_state()->visited_space);
14451473
if (!_Py_IsImmortal(op) && _PyObject_IS_GC(op)) {
@@ -1455,12 +1483,16 @@ visit_add_to_container(PyObject *op, void *arg)
14551483
}
14561484

14571485
static intptr_t
1458-
expand_region_transitively_reachable(PyGC_Head *container, PyGC_Head *gc, GCState *gcstate)
1486+
expand_region_transitively_reachable(PyGC_Head *container,
1487+
PyGC_Head *gc,
1488+
GCState *gcstate,
1489+
struct gc_generation_stats *stats)
14591490
{
14601491
struct container_and_flag arg = {
14611492
.container = container,
14621493
.visited_space = gcstate->visited_space,
1463-
.size = 0
1494+
.size = 0,
1495+
.stats = stats
14641496
};
14651497
assert(GC_NEXT(gc) == container);
14661498
while (gc != container) {
@@ -1529,13 +1561,14 @@ move_to_reachable(PyObject *op, PyGC_Head *reachable, int visited_space)
15291561
}
15301562

15311563
static intptr_t
1532-
mark_all_reachable(PyGC_Head *reachable, PyGC_Head *visited, int visited_space)
1564+
mark_all_reachable(PyGC_Head *reachable, PyGC_Head *visited, int visited_space, struct gc_generation_stats *stats)
15331565
{
15341566
// Transitively traverse all objects from reachable, until empty
15351567
struct container_and_flag arg = {
15361568
.container = reachable,
15371569
.visited_space = visited_space,
1538-
.size = 0
1570+
.size = 0,
1571+
.stats = stats
15391572
};
15401573
while (!gc_list_is_empty(reachable)) {
15411574
PyGC_Head *gc = _PyGCHead_NEXT(reachable);
@@ -1552,7 +1585,7 @@ mark_all_reachable(PyGC_Head *reachable, PyGC_Head *visited, int visited_space)
15521585
}
15531586

15541587
static intptr_t
1555-
mark_stacks(PyInterpreterState *interp, PyGC_Head *visited, int visited_space, bool start)
1588+
mark_stacks(PyInterpreterState *interp, PyGC_Head *visited, int visited_space, bool start, struct gc_generation_stats *stats)
15561589
{
15571590
PyGC_Head reachable;
15581591
gc_list_init(&reachable);
@@ -1605,13 +1638,13 @@ mark_stacks(PyInterpreterState *interp, PyGC_Head *visited, int visited_space, b
16051638
ts = PyThreadState_Next(ts);
16061639
HEAD_UNLOCK(runtime);
16071640
}
1608-
objects_marked += mark_all_reachable(&reachable, visited, visited_space);
1641+
objects_marked += mark_all_reachable(&reachable, visited, visited_space, stats);
16091642
assert(gc_list_is_empty(&reachable));
16101643
return objects_marked;
16111644
}
16121645

16131646
static intptr_t
1614-
mark_global_roots(PyInterpreterState *interp, PyGC_Head *visited, int visited_space)
1647+
mark_global_roots(PyInterpreterState *interp, PyGC_Head *visited, int visited_space, struct gc_generation_stats *stats)
16151648
{
16161649
PyGC_Head reachable;
16171650
gc_list_init(&reachable);
@@ -1628,19 +1661,19 @@ mark_global_roots(PyInterpreterState *interp, PyGC_Head *visited, int visited_sp
16281661
objects_marked += move_to_reachable(types->for_extensions.initialized[i].tp_dict, &reachable, visited_space);
16291662
objects_marked += move_to_reachable(types->for_extensions.initialized[i].tp_subclasses, &reachable, visited_space);
16301663
}
1631-
objects_marked += mark_all_reachable(&reachable, visited, visited_space);
1664+
objects_marked += mark_all_reachable(&reachable, visited, visited_space, stats);
16321665
assert(gc_list_is_empty(&reachable));
16331666
return objects_marked;
16341667
}
16351668

16361669
static intptr_t
1637-
mark_at_start(PyThreadState *tstate)
1670+
mark_at_start(PyThreadState *tstate, struct gc_generation_stats *stats)
16381671
{
16391672
// TO DO -- Make this incremental
16401673
GCState *gcstate = &tstate->interp->gc;
16411674
PyGC_Head *visited = &gcstate->old[gcstate->visited_space].head;
1642-
Py_ssize_t objects_marked = mark_global_roots(tstate->interp, visited, gcstate->visited_space);
1643-
objects_marked += mark_stacks(tstate->interp, visited, gcstate->visited_space, true);
1675+
Py_ssize_t objects_marked = mark_global_roots(tstate->interp, visited, gcstate->visited_space, stats);
1676+
objects_marked += mark_stacks(tstate->interp, visited, gcstate->visited_space, true, stats);
16441677
gcstate->work_to_do -= objects_marked;
16451678
gcstate->phase = GC_PHASE_COLLECT;
16461679
validate_spaces(gcstate);
@@ -1686,7 +1719,7 @@ gc_collect_increment(PyThreadState *tstate, struct gc_generation_stats *stats)
16861719
}
16871720
untrack_tuples(&gcstate->young.head);
16881721
if (gcstate->phase == GC_PHASE_MARK) {
1689-
Py_ssize_t objects_marked = mark_at_start(tstate);
1722+
Py_ssize_t objects_marked = mark_at_start(tstate, stats);
16901723
stats->objects_transitively_reachable += objects_marked;
16911724
stats->candidates += objects_marked;
16921725
gcstate->work_to_do -= objects_marked;
@@ -1701,7 +1734,7 @@ gc_collect_increment(PyThreadState *tstate, struct gc_generation_stats *stats)
17011734
if (scale_factor < 2) {
17021735
scale_factor = 2;
17031736
}
1704-
intptr_t objects_marked = mark_stacks(tstate->interp, visited, gcstate->visited_space, false);
1737+
intptr_t objects_marked = mark_stacks(tstate->interp, visited, gcstate->visited_space, false, stats);
17051738
stats->objects_transitively_reachable += objects_marked;
17061739
gcstate->work_to_do -= objects_marked;
17071740
gc_list_set_space(&gcstate->young.head, gcstate->visited_space);
@@ -1717,7 +1750,7 @@ gc_collect_increment(PyThreadState *tstate, struct gc_generation_stats *stats)
17171750
increment_size++;
17181751
assert(!_Py_IsImmortal(FROM_GC(gc)));
17191752
gc_set_old_space(gc, gcstate->visited_space);
1720-
increment_size += expand_region_transitively_reachable(&increment, gc, gcstate);
1753+
increment_size += expand_region_transitively_reachable(&increment, gc, gcstate, stats);
17211754
}
17221755
stats->objects_not_transitively_reachable += increment_size;
17231756
validate_list(&increment, collecting_clear_unreachable_clear);
@@ -1781,7 +1814,7 @@ gc_collect_region(PyThreadState *tstate,
17811814
assert(!_PyErr_Occurred(tstate));
17821815

17831816
gc_list_init(&unreachable);
1784-
stats->candidates = deduce_unreachable(from, &unreachable);
1817+
stats->candidates = deduce_unreachable(from, &unreachable, stats);
17851818
validate_consistent_old_space(from);
17861819
untrack_tuples(from);
17871820

@@ -1803,7 +1836,7 @@ gc_collect_region(PyThreadState *tstate,
18031836
* unreachable objects reachable *from* those are also uncollectable,
18041837
* and we move those into the finalizers list too.
18051838
*/
1806-
move_legacy_finalizer_reachable(&finalizers);
1839+
move_legacy_finalizer_reachable(&finalizers, stats);
18071840
validate_list(&finalizers, collecting_clear_unreachable_clear);
18081841
validate_list(&unreachable, collecting_set_unreachable_clear);
18091842
/* Print debugging information. */
@@ -1826,7 +1859,7 @@ gc_collect_region(PyThreadState *tstate,
18261859
* objects that are still unreachable */
18271860
PyGC_Head final_unreachable;
18281861
gc_list_init(&final_unreachable);
1829-
handle_resurrected_objects(&unreachable, &final_unreachable, to);
1862+
handle_resurrected_objects(&unreachable, &final_unreachable, to, stats);
18301863

18311864
/* Clear weakrefs to objects in the unreachable set. See the comments
18321865
* above handle_weakref_callbacks() for details.

0 commit comments

Comments
 (0)