1/* BEGIN LICENSE BLOCK
2 * Version: CMPL 1.1
3 *
4 * The contents of this file are subject to the Cisco-style Mozilla Public
5 * License Version 1.1 (the "License"); you may not use this file except
6 * in compliance with the License.  You may obtain a copy of the License
7 * at www.eclipse-clp.org/license.
8 *
9 * Software distributed under the License is distributed on an "AS IS"
10 * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied.  See
11 * the License for the specific language governing rights and limitations
12 * under the License.
13 *
14 * The Original Code is  The ECLiPSe Constraint Logic Programming System.
15 * The Initial Developer of the Original Code is  Cisco Systems, Inc.
16 * Portions created by the Initial Developer are
17 * Copyright (C) 1988-2006 Cisco Systems, Inc.  All Rights Reserved.
18 *
19 * Contributor(s):
20 *
21 * END LICENSE BLOCK */
22
23/*
24 * SEPIA SOURCE FILE
25 *
26 * $Id: gc_stacks.c,v 1.6 2013/03/17 12:09:59 jschimpf Exp $
27 *
28 * IDENTIFICATION	gc_stacks.c
29 *
30 * DESCRIPTION		SEPIA stack garbage collector
31 *			Please refer to report IR-LP-13-26
32 *
33 * CHANGE NOTE:	Due to the general design, it is not allowed to mark twice
34 *		from the same root pword. Normally this is ok, since the
35 *		traversal algorithm guarantees that every root is visited
36 *		only once during marking (e.g. choicepoints). Where single
37 *		traversal cannot be guaranteed (e.g. marking from "old"
38 *		locations, as done in mark_from_trail(), or marking
39 *		environments multiple times in different states of activity),
40 *		we use ALREADY_MARKED_FROM bits to remember that a root
41 *		was already used for marking.
42 *		Note that this requires that all trailed items (except those
43 *		that are only trailed via simple TRAILED_WORD32 value trails)
44 *		must have tags! This is the reason that abstact machine
45 *		"registers" like WL, POSTED, etc have tags.
46 *
47 * CONTENTS:	Stack garbage collector
48 *
49 *			collect_stacks()
50 *
51 *		Stack overflow handling routines
52 *
53 *			trail_ov()
54 *			global_ov()
55 *			final_overflow()
56 *			local_ov()
57 *			control_ov()
58 *
59 *		Traversal functions for dictionary collector
60 *
61 *			mark_dids_from_pwords()
62 *			mark_dids_from_stacks()
63 *
64 *
65 * AUTHOR       VERSION  DATE   REASON
66 * Joachim Schimpf	880706	Created file.
67 *
68 */
69
70#define DEBUG_GC
71#define INCR_GC_LIMIT	16
72
73/*
74 * INCLUDES:
75 */
76
77#include "config.h"
78#include "os_support.h"
79#include "sepia.h"
80#include "types.h"
81#include "embed.h"
82#include "error.h"
83#include "mem.h"
84#include "dict.h"
85#include "ec_io.h"
86#include "opcode.h"
87#include "emu_export.h"
88
89/*
90 * extern declarations
91 */
92
93/*
94 * global variables
95 */
96
97uword
98#ifdef DEBUG_GC
99	stop_at_ = MAX_U_WORD,
100#endif
101	incremental_= 0,	/* number of consecutive incremental GCs */
102	collections_ = 0,	/* statistics	*/
103	average_area_ = 0,
104	collection_time_ = 0;
105
106double	average_ratio_ = 1.0,
107	total_garbage_ = 0;
108
109
110/*
111 * static functions
112 */
113
114static void
115	make_choicepoint(word ar),
116	pop_choicepoint(void),
117	non_marking_reference(pword **ref),
118	mark_from_trail(control_ptr GCB),
119	_mark_from_global_variables(void),
120	mark_from(word tag, pword *ref, int ref_in_segment),
121	compact_and_update(void),
122	compact_trail(register pword **garbage_list),
123	reset_env_marks(control_ptr GCB),
124	update_trail_ptrs(control_ptr GCB),
125	ov_reset(void);
126
127static pword
128	** early_untrail(control_ptr GCB, register pword **tr, control_ptr fp, pword **garbage_list, word *trail_garbage),
129	** mark_from_control_frames(control_ptr GCB, word *trail_garb_count);
130
131
132/*
133 * macros
134 */
135
136#define Chp_Tg(b)	(((b).top - 1)->frame.chp->tg)
137#define Chp_Tt(b)	(((b).top - 1)->frame.chp->tt)
138#define Chp_Sp(b)	(((b).top - 1)->frame.chp->sp)
139#define Chp_E(b)	(((b).top - 1)->frame.chp->e)
140
141#define PrevEnv(e)	(*(pword **)(e))
142
143/* this macro assumes that GCTG = Chp_Tg(GCB) */
144#define InCurrentSegment(ptr) \
145	((ptr) >= GCTG && (ptr) < TG)
146
147#define Set_Bit(mask,pw)	(pw)->tag.kernel |= (mask);
148#define Clr_Bit(mask,pw)	(pw)->tag.kernel &= ~(mask);
149
150#define Marked(tag)		((tag) & MARK)
151#define IsLink(tag)		((tag) & LINK)
152
153#define TMIN		TUNIV
154#define TMAX		TBUFFER
155
156/*
157 * Caution: MARK and LINK bits are sometimes used for other purposes.
158 * This should not lead to conflicts, but be careful when changing things!
159 * ALREADY_MARKED_FROM is the same as MARK, but only used on tags of pwords
160 * outside the collection segment, which are never MARKed, so this is safe.
161 * MARK_FULL_DE is the same as LINK, but only used in the combination
162 * MARK_FULL_DE|TSUSP in the tag-argument of mark_from().  This is ok
163 * since valid tags never have the LINK bit set.
164 */
165#define MARK_FULL_DE		LINK
166#define ALREADY_MARKED_FROM	MARK
167
168#define AlreadyMarkedFrom(tag)	((tag) & ALREADY_MARKED_FROM)
169
170/*
171 * this macro is supposed to be applied to a pword that is known
172 * to be unmarked (yet)
173 */
174
175#define Mark_from(tag, ref, in_seg) \
176{\
177    if (ISPointer(tag))\
178	mark_from(tag,ref,in_seg);\
179}
180
181#define Mark_from_pointer(tag, ref, in_seg) \
182{\
183    mark_from((word) (tag),(pword *)(ref),in_seg);\
184}
185
186
187#define PointerToLink(oldtag,ptr) \
188    ((oldtag) & MARK | (word)ptr >> 2 | LINK)
189
190#define PointerToMarkedLink(ptr) \
191    ((word)ptr >> 2 | (MARK|LINK))
192
193
194#define LinkToPointer(link) \
195    (pword *)((link) & SIGN_BIT | ((link) << 2 & ~SIGN_BIT))
196
197
198#define Into_Reloc_Chain_Nonmarking(target, ref) \
199{\
200    (ref)->val.all = (target)->tag.all;\
201    (target)->tag.all = PointerToLink((target)->tag.all,ref);\
202}
203
204#define Into_Reloc_Chain(target, ref) \
205{\
206    (ref)->val.all = (target)->tag.all;\
207    (target)->tag.all = PointerToMarkedLink(ref);\
208}
209
210
211/* Environment descriptors and corresponding access macros.
212 * Environment descriptors occur in call and retry/trust_inline
213 * instructions. They indicate which parts of an environment are active,
214 * and consist of an environment size or an activity bitmap (EAM).  */
215
216/* access environment descriptor, given code pointer */
217#define EnvDescPP(pp)	(*((word*)(pp)))
218/* access environment descriptor, given stack pointer to return address */
219#define EnvDesc(sp)	EnvDescPP(*(vmcode**)(sp) - 1)
220
221/*------------------------------------------------------------------
222 * Debugging the GC
223 *------------------------------------------------------------------*/
224
225#ifdef DEBUG_GC
226
227#define NO	0
228#define YES	1
229
230#define Check_Pointer(ptr) \
231    if ((ptr) > TG && (ptr) < g_emu_.tg_limit)\
232	_gc_error("invalid pointer encountered\n");
233
234#define Check_Tag_Range(target_tag) \
235    if (TagTypeC(target_tag) < TMIN || TagTypeC(target_tag) > TMAX)\
236	_gc_error1("invalid tag (0x%x)\n", target_tag);
237
238#define Check_Tag(target_tag) \
239    if (IsLink(target_tag))\
240	_gc_error1("unexpected unmarked link (0x%x)\n", target_tag);
241
242#define Check_Functor(target_tag) \
243    if (TagTypeC(target_tag) != TDICT)\
244	_gc_error("invalid structure reference\n");
245
246#define Check_Susp(target_tag) \
247    if (TagTypeC(target_tag) != TDE)\
248	_gc_error("invalid suspension pointer\n");
249
250#define Check_Size(esize) \
251    if ((uword)esize > 1000000) {\
252	p_fprintf(current_err_,\
253		"GC warning: unlikely environment size (%" W_MOD "x %" W_MOD "x)\n",\
254			edesc,esize);\
255	ec_flush(current_err_);\
256    }
257
258#else /* DEBUG_GC */
259
260#define Check_Pointer(ptr)
261#define Check_Tag_Range(target_tag)
262#define Check_Tag(target_tag)
263#define Check_Functor(target_tag)
264#define Check_Susp(target_tag)
265#define Check_Size(esize)
266
267#endif /* DEBUG_GC */
268
269#define Print_Err(msg)		_gc_error(msg);
270#define Print_Err1(msg, arg)	_gc_error1(msg, arg);
271
272static void
273_gc_error(char *msg)
274{
275    (void) ec_outfs(current_err_,"GC internal error: ");
276    (void) ec_outfs(current_err_,msg);
277    ec_flush(current_err_);
278}
279
280static void
281_gc_error1(char *msg, word arg)
282{
283    (void) ec_outfs(current_err_,"GC internal error: ");
284    p_fprintf(current_err_, msg, arg);
285    ec_flush(current_err_);
286}
287
288
289/*------------------------------------------------------------------
290 * GC builtins
291 *------------------------------------------------------------------*/
292
293/*
294 * set or query the GC interval (in bytes!)
295 */
296
297static int
298p_gc_interval(value val, type tag)
299{
300    if (IsRef(tag))
301    {
302	Return_Unify_Integer(val, tag, TG_SEG * sizeof(pword));
303    }
304    else
305    {
306	pword *tg_gc;
307	Check_Integer(tag);
308	    /*
309	     * update TG_SL: if the new value is below TG,
310	     * the next overflow check invokes the GC
311	     */
312	if (val.nint < sizeof(pword))
313		{ Bip_Error(RANGE_ERROR); }
314	TG_SEG = val.nint / sizeof(pword);
315	if (TG_SEG > (pword *) g_emu_.global_trail[1].start - (pword *) g_emu_.global_trail[0].start)
316	    TG_SEG = (pword *) g_emu_.global_trail[1].start - (pword *) g_emu_.global_trail[0].start;
317	Succeed_;
318    }
319}
320
321
322/*ARGSUSED*/
323static int
324p_gc_stat(value vwhat, type twhat, value vval, type tval)
325{
326    pword result;
327
328    result.tag.kernel = TINT;
329    switch(vwhat.nint)
330    {
331    case 0:	/* gc_number */
332	result.val.nint = collections_;
333	break;
334    case 1:	/* gc_collected */
335	Make_Float(&result, total_garbage_ * sizeof(pword));
336	break;
337    case 2:	/* gc_area */
338	result.val.nint = average_area_ * sizeof(pword);
339	break;
340    case 3:	/* gc_ratio */
341	Make_Float(&result, average_ratio_ * 100.0);
342	break;
343    case 4:	/* gc_time */
344	Make_Float(&result, (double) collection_time_ / clock_hz);
345	break;
346
347
348    case 8:	/* global stack used */
349	result.val.nint = (char *) TG -
350		(char *) g_emu_.global_trail[0].start;
351	break;
352    case 9:	/* global stack allocated */
353	result.val.nint = (char *) g_emu_.global_trail[0].end -
354		 (char *) g_emu_.global_trail[0].start;
355	break;
356    case 10:	/* global stack peak */
357	result.val.nint = (char *) g_emu_.global_trail[0].peak -
358		 (char *) g_emu_.global_trail[0].start;
359	break;
360    case 11:	/* trail/global stack size */
361	result.val.nint = (char *) g_emu_.global_trail[1].start -
362		 (char *) g_emu_.global_trail[0].start;
363	break;
364    case 12:	/* trail stack used */
365	result.val.nint = (char *) g_emu_.global_trail[1].start -
366		(char *) TT;
367	break;
368    case 13:	/* trail stack allocated */
369	result.val.nint = (char *) g_emu_.global_trail[1].start -
370		 (char *) g_emu_.global_trail[1].end;
371	break;
372    case 14:	/* trail stack peak */
373	result.val.nint = (char *) g_emu_.global_trail[1].start -
374		 (char *) g_emu_.global_trail[1].peak;
375	break;
376    case 15:	/* trail/global stack size */
377	result.val.nint = (char *) g_emu_.global_trail[1].start -
378		 (char *) g_emu_.global_trail[0].start;
379	break;
380
381    case 16:	/* control stack used */
382	result.val.nint = (char *) B.args -
383		(char *) g_emu_.control_local[0].start;
384	break;
385    case 17:	/* control stack allocated */
386	result.val.nint = (char *) g_emu_.control_local[0].end -
387		 (char *) g_emu_.control_local[0].start;
388	break;
389    case 18:	/* control stack peak */
390	result.val.nint = (char *) g_emu_.control_local[0].peak -
391		 (char *) g_emu_.control_local[0].start;
392	break;
393    case 19:	/* local/control stack size */
394	result.val.nint = (char *) g_emu_.control_local[1].start -
395		 (char *) g_emu_.control_local[0].start;
396	break;
397    case 20:	/* local stack used */
398	result.val.nint = (char *) g_emu_.control_local[1].start -
399		(char *) SP;
400	break;
401    case 21:	/* local stack allocated */
402	result.val.nint = (char *) g_emu_.control_local[1].start -
403		 (char *) g_emu_.control_local[1].end;
404	break;
405    case 22:	/* local stack peak */
406	result.val.nint = (char *) g_emu_.control_local[1].start -
407		 (char *) g_emu_.control_local[1].peak;
408	break;
409    case 23:	/* local/control stack size */
410	result.val.nint = (char *) g_emu_.control_local[1].start -
411		 (char *) g_emu_.control_local[0].start;
412	break;
413
414    default:
415	result.val.nint = 0;
416	break;
417    }
418    Return_Unify_Pw(vval, tval, result.val, result.tag);
419}
420
421static int
422p_stat_reset(void)
423{
424    collections_ = 0;
425    total_garbage_ = 0.0;
426    average_area_ = 0;
427    collection_time_ = 0;
428    average_ratio_ = 1.0;
429    g_emu_.global_trail[0].peak = g_emu_.global_trail[0].end;
430    g_emu_.global_trail[1].peak = g_emu_.global_trail[1].end;
431    g_emu_.control_local[0].peak = g_emu_.control_local[0].end;
432    g_emu_.control_local[1].peak = g_emu_.control_local[1].end;
433    Succeed_
434}
435
436
437
438/*------------------------------------------------------------------
439 * The toplevel function for collecting the global stack:
440 *
441 * collect_stacks(arity)
442 *	arity gives the number of active argument registers.
443 *	All VM registers have to be exported.
444 *	TG, TT and GB must be imported after the collection.
445 *	We assume that on top of the local stack there is a return
446 *	address pointing behind the environment size of the current
447 *	environment.
448 *------------------------------------------------------------------*/
449
450
451collect_stacks(word arity, word gc_forced)
452{
453    word total, garbage, trail_garb_count, gc_time;
454    pword **trail_garb_list;
455    pword *ideal_gc_trigger, *min_gc_trigger, *max_gc_trigger;
456    control_ptr GCB;
457    int leave_choicepoint = 0;
458
459    /*
460     * Find GCB from GCTG
461     * GCB is a conceptual register, pointing to the newest choice point
462     * that already existed at the time of the last garbage collection.
463     */
464    Compute_Gcb(GCB.args);
465
466
467    /*
468     * Now decide whether to garbage collect or to just expand the stack
469     *
470     * min_gc_trigger makes sure we collect at least gc_interval bytes
471     *    (except when we can't grow the stack to achieve that).
472     * ideal_gc_trigger is the point we should ideally collect beyond
473     *    in order to avoid quadratic collection time behaviour.
474     * max_gc_trigger has been introduced to reduce intervals again when
475     *    we approach the final stack limit (i.e. TT). Otherwise big atomic
476     *    allocations can cause overflow when we haven't collected for
477     *    a long time.
478     */
479    Safe_Add_To_Pointer(GCTG, GCTG - BChp(GCB.args)->tg, (pword *) TT, ideal_gc_trigger);
480    Safe_Add_To_Pointer(GCTG, TG_SEG, (pword *) TT, min_gc_trigger);
481    max_gc_trigger = GCTG + ((pword *) TT - GCTG) / 2;
482
483#if 0
484    p_fprintf(log_output_, "Remaining space              %12d\n", (char*)TT - (char*)TG);
485    p_fprintf(log_output_, "Distance to min_gc_trigger   %12d\n", (char*)min_gc_trigger - (char*)TG);
486    p_fprintf(log_output_, "Distance to ideal_gc_trigger %12d\n", (char*)ideal_gc_trigger - (char*)TG);
487    p_fprintf(log_output_, "Distance to max_gc_trigger   %12d\n", (char*)max_gc_trigger - (char*)TG);
488    if (!(TG < max_gc_trigger))
489	p_fprintf(log_output_, "gc because beyond max_gc_trigger\n");
490#endif
491
492    if (!gc_forced &&           /* not triggered by garbage_collect/0 */
493        (NbStreamsFree > 0) &&  /* not triggered by running out of streams */
494        ( ( GlobalFlags & GC_ADAPTIVE
495            && TG < ideal_gc_trigger  &&  TG < max_gc_trigger )
496        || TG < min_gc_trigger
497        ))
498    {
499	/*
500	 * Try to expand the stack rather than doing gc
501	 */
502	trim_global_trail(TG_SEG);
503
504	/*
505	 * trim_global_trail() may expand the stack less than desired,
506	 * because of lack of memory, but this doesn't matter much.
507	 * As long as the new TG_LIM is larger than the current trigger
508	 * point, we delay the collection until TG_LIM is reached.
509	 */
510	if (TG_LIM > TG_SLS)
511	{
512	    Set_Tg_Soft_Lim(TG_LIM);
513	    return 0;
514	}
515	if (GlobalFlags & GC_VERBOSE)
516	{
517	    (void) ec_outfs(log_output_,"GC: couldn't grow global stack as requested, forcing gc\n");
518	    ec_flush(log_output_);
519	}
520    }
521
522
523    /*
524     * Do the garbage collection, if enabled
525     */
526    if (GlobalFlags & GC_ENABLED)
527    {
528	gc_time = user_time();
529
530	if (GlobalFlags & GC_VERBOSE) {
531	    (void) ec_outfs(log_output_,"GC ."); ec_flush(log_output_);
532	}
533#ifdef DEBUG_GC
534	if (collections_ == stop_at_)
535	    total = 0;
536	if (SV)
537	    Print_Err("SV (suspending variables list) not empty\n");
538#endif
539		/*
540		 * If an incremental choicepoint has been buried under a
541		 * regular one, invalidate it to avoid loss of garbage.
542		 * This is done by copying the fields from the chp below.
543		 */
544	if (GCB.top < B.top && IsGcFrame(GCB.top - 1))
545	{
546	    control_ptr chp;
547	    GCB.chp = (GCB.top - 1)->frame.chp;	/* set GCB one deeper */
548	    incremental_ = 0;
549	    chp.top = GCB.top - 1;
550	    chp.chp = chp.top->frame.chp;
551	    GCB.chp->tg = chp.chp->tg;
552	    GCB.chp->tt = chp.chp->tt;
553	    GCB.chp->ld = chp.chp->ld;
554	}
555
556		/*
557		 * For the duration of the GC, we use GCTG to cache Chp_Tg(GCB)
558		 */
559	GCTG = Chp_Tg(GCB);
560	total = TG - Chp_Tg(GCB);
561
562	make_choicepoint(arity);
563		/*
564		 * disallow exit_block while GC is runnning
565		 */
566	Disable_Exit();
567		/*
568		 * Mark GCB's witness pword first (This should normally be
569		 * Mark_from_pointer(TREF, (pword *) &Chp_Tg(GCB), NO);
570		 * but eg. InCurrentSegment() keeps using Chp_Tg(GCB)).
571		 */
572	Set_Bit(MARK, Chp_Tg(GCB));
573		/*
574		 * mark what is reachable from variables older than GCB
575		 */
576	mark_from_trail(GCB);
577		/*
578		 * Take care of the coroutining registers.
579		 * The LD list is handled separately.
580		 */
581	Mark_from_pointer(TSUSP, &DE, NO);
582	Mark_from_pointer(TLIST, (pword *) &MU, NO);
583	Mark_from(TAGGED_WL.tag.kernel, &TAGGED_WL, NO);
584	Mark_from(POSTED.tag.kernel, &POSTED, NO);
585	Mark_from(POSTED_LAST.tag.kernel, &POSTED_LAST, NO);
586	Mark_from_pointer(WP_STAMP.tag.kernel, &WP_STAMP, NO);
587	Mark_from_pointer(PostponedList.tag.kernel, &PostponedList, NO);
588		/*
589		 * Mark the list of cut actions
590		 */
591	Mark_from_pointer(TCOMP, (pword *) &LCA, NO);
592#ifdef NEW_ORACLE
593		/*
594		 * Mark the oracle registers
595		 */
596	if (TO) Mark_from_pointer(TCOMP, (pword *) &TO, NO);
597#endif
598		/*
599		 * Mark the explicit global variables
600		 */
601	Mark_from_pointer(TCOMP, (pword *) &g_emu_.global_variable, NO);
602	_mark_from_global_variables();
603		/*
604		 * process control frames and the related environments,
605		 * do virtual backtracking and trail garbage detection
606		 */
607	trail_garb_list = mark_from_control_frames(GCB, &trail_garb_count);
608	reset_env_marks(GCB);
609		/*
610		 * end of the marking phase
611		 */
612	if (GlobalFlags & GC_VERBOSE) {
613	    (void) ec_outfs(log_output_,"."); ec_flush(log_output_);
614	}
615		/*
616		 * compact global stack and trail
617		 */
618	compact_and_update();
619	if (trail_garb_count) compact_trail(trail_garb_list);
620		/*
621		 * scan the choicepoints and update the tt entries
622		 */
623	update_trail_ptrs(GCB);
624		/*
625		 * restore the (updated) machine state
626		 */
627	pop_choicepoint();
628		/*
629		 * statistics
630		 */
631	garbage = total - (TG - Chp_Tg(GCB));
632	average_area_ =
633	    ((average_area_ * collections_) + total) / (collections_ + 1);
634	if (garbage || total_garbage_ > 0.0)
635	    average_ratio_ *=
636		(total_garbage_ + garbage)
637		/ (total_garbage_ + average_ratio_ * total);
638	total_garbage_ += garbage;
639	collections_++;
640	gc_time = user_time() - gc_time;
641	collection_time_ += gc_time;
642
643	if (GlobalFlags & GC_VERBOSE)
644	{
645	    word trail_total = Chp_Tt(GCB) - TT + trail_garb_count;
646
647	    p_fprintf(log_output_,
648		". global: %d - %d (%.1f %%), trail: %d - %d (%.1f %%), time: %.3f\n",
649		sizeof(pword) * total,
650		sizeof(pword) * garbage,
651		(100.0*garbage)/total,
652		4 * trail_total,
653		4 * trail_garb_count,
654		trail_total ? (100.0*trail_garb_count)/trail_total : 0.0,
655		(double)gc_time/clock_hz
656	    );
657	    ec_flush(log_output_);
658	}
659
660	    /*
661	     * Remember the stack pointer's value after the collection
662	     */
663	GCTG = TG;
664
665	    /* We may trim the local stack only when we are sure that there are
666	     * no garbage trail entries pointing above the top of SP !
667	     * This is the case after a gc.
668	     */
669	(void) trim_control_local();
670
671	/* Shrink the dynamic event queue to at least
672	 *  MIN_DYNAMIC_EVENT_SLOTS free
673	 */
674	trim_dynamic_event_queue();
675    }
676
677
678    /*
679     * re-adjust the stacks
680     */
681    trim_global_trail(TG_SEG);
682    if (TG_LIM - TG < TG_MIN_SEG)
683    {
684	VM_FLAGS &= ~(NO_EXIT|WAS_EXIT);
685	ov_reset();		/* overflow even after collection */
686    }
687    Set_Tg_Soft_Lim(TG_LIM);
688
689
690    /*
691     * release the exit_block protection and execute a
692     * delayed exit, if necessary
693     */
694    Enable_Exit()
695    return leave_choicepoint;
696}
697
698
699/*
700 * save the VM registers in a new choicepoint
701 * This is to simplify the algorithm
702 */
703
704static void
705make_choicepoint(word ar)
706{
707    chp_ptr chp;
708    top_ptr top;
709    pword *pw;
710
711    if (GB != Chp_Tg(B))
712    {
713	Print_Err("GB != B->tg");
714    }
715
716    Disable_Int()
717    chp = (B.chp)++;
718    chp->sp = SP;
719    chp->tg = TG;
720    chp->tt = TT;
721    chp->e = E;
722    chp->ld = LD;
723    pw = &g_emu_.emu_args[1];
724    for(; ar > 0; ar--) {
725	*((B.args)++) = *(pw++);
726    }
727    top = (B.top)++;
728    top->frame.chp = chp;
729    top->backtrack = gc_fail_code_;
730    Enable_Int()
731
732    pw = TG++;				/* push a dummy word (needed	*/
733    pw->tag.kernel = TNIL;		/* for updating chp->tg)	*/
734}
735
736
737/*
738 * restore from the choicepoint the VM registers that may have changed
739 * during garbage collection
740 */
741
742static void
743pop_choicepoint(void)
744{
745    control_ptr chp;
746    top_ptr top;
747    pword *pw;
748
749    top = B.top - 1;
750    chp.chp = top->frame.chp;
751    TT = chp.chp->tt;
752    TG = chp.chp->tg;
753    LD = chp.chp->ld;
754    chp.chp++;
755    pw = &g_emu_.emu_args[1];			/* reload arguments	*/
756    while(chp.top < top)
757	*pw++ = *(chp.args)++;
758    B.any_frame = top->frame;	/* pop the choicepoint	*/
759
760    GB = Chp_Tg(B);
761
762    /* Now mark the other arguments invalid (for recursive emulators).
763     * Caution: There may be a module argument which must be skipped first.
764     */
765    while(++pw < &g_emu_.emu_args[NARGREGS] && pw->tag.kernel != TEND)
766    {
767	pw->tag.kernel = TEND;
768	pw->val.nint = 0x11111111;
769    }
770}
771
772
773/*-------------------------------------------------------------------
774 * marking phase
775 *-------------------------------------------------------------------*/
776
777
778/*
779 * process the trail entries younger than the control frame fp:
780 * - remove unnecessary trails of locations newer than fp
781 * - early untrail and remove trails of (so far) unreachable locations
782 * - link other entries into relocation chains
783 */
784static pword **
785early_untrail(control_ptr GCB, register pword **tr, control_ptr fp, pword **garbage_list, word *trail_garbage)
786{
787    register pword *trailed_item;
788    register word i, what, trailed_tag;
789    register pword **prev_tt = fp.chp->tt;
790    register pword *prev_tg = fp.chp->tg;
791    pword *prev_sp = fp.chp->sp;
792    pword *gcb_tg = Chp_Tg(GCB);
793    pword *gcb_sp = Chp_Sp(GCB);
794
795    while (tr < prev_tt)		/* partial untrailing */
796    {
797	switch ((word) *tr & 3)
798	{
799	case TRAIL_ADDRESS:
800	    trailed_item = *tr;
801	    if (trailed_item < prev_tg)
802	    {
803		if (trailed_item >= gcb_tg)
804		{
805		    if (!Marked(trailed_item->tag.kernel))
806		    {
807			/* early reset, since this variable is
808			 * only reachable after backtracking
809			 */
810#ifdef DEBUG_GC
811			if (IsLink(trailed_item->tag.kernel))
812			    Print_Err("unmarked link in early_reset\n");
813#endif
814			trailed_item->val.ptr = trailed_item;
815			trailed_item->tag.kernel = TREF;
816			(*trail_garbage)++;
817			*tr = (pword *)garbage_list;
818			garbage_list = tr;
819		    }
820		    else
821		    {
822			Into_Reloc_Chain(trailed_item,(pword*)tr)
823		    }
824		}
825		else
826		{
827		    /* reset ALREADY_MARKED_FROM, it was set in mark_from_trail */
828		    Clr_Bit(ALREADY_MARKED_FROM, trailed_item);
829		}
830	    }
831	    else if (trailed_item < prev_sp
832#ifdef AS_EMU
833		&& (trailed_item < TG_LIM || trailed_item >= spmax_)
834#endif
835		)
836	    {
837		/* such trail entries can only occur after
838		 * a cut and before a fail through this cut.
839		 */
840		(*trail_garbage)++;
841		*tr = (pword *)garbage_list;
842		garbage_list = tr;
843	    }
844	    else if (trailed_item >= gcb_sp)
845	    {
846		/* reset ALREADY_MARKED_FROM, it was set in mark_from_trail */
847		Clr_Bit(ALREADY_MARKED_FROM, trailed_item);
848	    }
849	    tr++;
850	    break;
851
852	case TRAIL_TAG:
853	    trailed_item = *(tr+1);
854	    if (trailed_item < prev_tg)
855	    {
856		if (trailed_item >= gcb_tg)
857		{
858		    if (!Marked(trailed_item->tag.kernel))
859		    {
860			/* early reset, since this variable is
861			 * only reachable after backtracking
862			 */
863#ifdef DEBUG_GC
864			 if (IsLink(trailed_item->tag.kernel))
865			    Print_Err( "unmarked link in early_reset\n");
866#endif
867			trailed_item->val.ptr = trailed_item;
868			trailed_item->tag.kernel = TrailedTag(*tr);
869			*trail_garbage += 2;
870			*(tr+1) = (pword *)garbage_list;
871			garbage_list = tr;
872		    }
873		    else
874		    {
875			trailed_tag = TrailedTag(*tr);
876			/*
877			 * CAUTION: we mark here with a non-standard tag which
878			 * has the TREFBIT removed. The reason is that this
879			 * should be treated as a self-reference although it
880			 * doesn't look like one.
881			 */
882			Mark_from_pointer(trailed_tag & ~TREFBIT, (pword *) (tr + 1), NO);
883		    }
884		}
885		else
886		{
887		    /* reset ALREADY_MARKED_FROM, it was set in mark_from_trail */
888		    Clr_Bit(ALREADY_MARKED_FROM, trailed_item);
889		}
890	    }
891	    else if (trailed_item < prev_sp
892#ifdef AS_EMU
893		&& (trailed_item < TG_LIM || trailed_item >= spmax_)
894#endif
895		)
896	    {
897		/* cut garbage, remove the trail entry */
898		*trail_garbage += 2;
899		*(tr+1) = (pword *)garbage_list;
900		garbage_list = tr;
901	    }
902	    else if (trailed_item >= gcb_sp)
903	    {
904		/* reset ALREADY_MARKED_FROM, it was set in mark_from_trail */
905		Clr_Bit(ALREADY_MARKED_FROM, trailed_item);
906	    }
907	    tr += 2;
908	    break;
909
910	case TRAIL_MULT:
911	    i = (word) *tr;
912	    what = TrailedType(i);
913	    trailed_item = *(tr+1);
914	    if (trailed_item >= prev_tg && trailed_item < prev_sp
915#ifdef AS_EMU
916		&& (trailed_item < TG_LIM || trailed_item >= spmax_)
917#endif
918		)
919	    {
920		/* cut garbage, remove the trail entry */
921		i = TrailedNumber(i) + 3;
922		*trail_garbage += i;
923		*(tr+1) = (pword *)garbage_list;
924		garbage_list = tr;
925		tr += i;
926		break;
927	    }
928	    if (trailed_item >= gcb_tg && trailed_item < gcb_sp
929#ifdef AS_EMU
930		&& (trailed_item < TG_LIM || trailed_item >= spmax_)
931#endif
932		)
933	    {
934		/*
935		 * Special case of the trailed WAKE bit in a TDE tag:
936		 * We have to disable early untrail, otherwise some
937		 * woken goals would show up as unwoken in the LD list.
938		 */
939		if (what == TRAILED_WORD32 &&
940		    TrailedOffset(i) == 1 &&
941		    TagTypeC((word) *(tr+2)) == TDE)
942		{
943		    /* The flag MARK_FULL_DE is used to tell the
944		     * marking routine to ignore the WAKE bit and to
945		     * mark the full suspension as if it were unwoken.
946		     */
947		    Mark_from_pointer(MARK_FULL_DE|TSUSP, tr+1, NO);
948		    tr += TrailedNumber(i) + 3;
949		}
950		else if (!Marked(trailed_item->tag.kernel))
951		{
952		    /* early untrail, since this item is
953		     * only reachable after backtracking
954		     */
955#ifdef DEBUG_GC
956		    if (IsLink(trailed_item->tag.kernel))
957			Print_Err( "unmarked link in early_reset\n");
958#endif
959		    trailed_item = (pword *) ((uword *) trailed_item
960			+ TrailedOffset(i));
961		    i = TrailedNumber(i);
962		    *trail_garbage += i + 3;
963		    *(tr+1) = (pword *)garbage_list;
964		    garbage_list = tr;
965		    tr += 2;
966		    do {
967#if 0
968			/*
969			 * This actually occurs, but shouldn't - needs investigation
970			 */
971			if (IsLink(trailed_item->tag.kernel) || Marked(trailed_item->tag.kernel))
972			{
973			    Print_Err("unexpected mark/link during early_reset\n");
974			}
975#endif
976			trailed_item->val.ptr = *tr++;
977			trailed_item = (pword *)
978			    ((uword *) trailed_item + 1);
979		    } while (i--);
980		}
981		else /* the whole item is already marked */
982		{
983		    Into_Reloc_Chain(trailed_item,(pword*)(tr+1))
984		    trailed_item = (pword *) ((uword *) trailed_item
985			+ TrailedOffset(i));
986
987		    i = TrailedNumber(i);
988		    tr += 2;
989		    if (what == TRAILED_PWORD)
990		    {
991			i /= 2;
992			do
993			{
994			    /*
995			     * CAUTION: for trailed self-references, we mark
996			     * here with a non-standard tag which has the
997			     * TREFBIT removed. The reason is that for marking
998			     * purposes this should be treated as a self-
999			     * reference although it doesn't look like one.
1000			     */
1001			    trailed_tag = ((pword*)tr)->tag.kernel;
1002			    if (((pword*)tr)->val.ptr == trailed_item)
1003				trailed_tag &= ~TREFBIT;
1004			    Mark_from(trailed_tag, ((pword*)tr), NO);
1005			    tr = (pword **)((pword*)tr + 1);
1006			    ++trailed_item;
1007			} while (i--);
1008		    }
1009		    else if (what == TRAILED_REF)
1010			do
1011			{
1012			    trailed_tag = TREF;
1013			    if (*tr == trailed_item)	/* CAUTION: see above */
1014				trailed_tag &= ~TREFBIT;
1015			    Mark_from_pointer(trailed_tag, ((pword*)tr), NO);
1016			    tr++;
1017			    trailed_item = (pword*) ((uword*)trailed_item + 1);
1018			} while (i--);
1019		    else if (what == TRAILED_COMP)
1020			do
1021			{
1022			    Mark_from_pointer(TCOMP, ((pword*)tr), NO);
1023			    tr++;
1024			} while (i--);
1025		    else if (what == TRAILED_WORD32)
1026			tr += i + 1 ;
1027		    else
1028		    {
1029			Print_Err(
1030			"bad extension trail entry in early_reset\n");
1031			tr += 2;
1032		    }
1033		}
1034		break;
1035	    }
1036	    /*
1037	     * The following code is to detect unnecessary pointer trails.
1038	     * Applies to trailed locations (trailed_item) in the heap
1039	     * and old parts of local and global stack, e.g. suspending
1040	     * list pointers and setarg'd structure arguments.
1041	     * We assume: (trailed_item < gcb_tg || trailed_item >= gcb_sp)
1042	     *
1043	     * ??? shouldn't that (the cut garbage bit) more generally apply to
1044	     * (trailed_item < prev_tg || trailed_item >= prev_sp) ???
1045	     */
1046	    if ((what == TRAILED_REF || what == TRAILED_COMP) &&
1047		TrailedNumber(i) == 0)
1048	    {
1049		pword *trailed_ptr = *(tr+2);
1050		if (trailed_ptr >= prev_tg && trailed_ptr < prev_sp
1051#ifdef AS_EMU
1052		    && (trailed_ptr < TG_LIM || trailed_ptr >= spmax_)
1053#endif
1054		    )
1055		{
1056		    /* cut garbage, remove the trail entry */
1057		    *trail_garbage += 3;
1058		    *(tr+1) = (pword *)garbage_list;
1059		    garbage_list = tr;
1060		}
1061		else	/* mark from the old value */
1062		{
1063		    /* CAUTION: see above */
1064		    trailed_tag = (what == TRAILED_COMP) ? (word) TCOMP
1065		    	: (*(tr+2) == trailed_item) ? (word) (TREF & ~TREFBIT)
1066			: (word) TREF;
1067		    Mark_from(trailed_tag, (pword *)(tr+2), NO);
1068		}
1069		/* reset ALREADY_MARKED_FROM, it was set in mark_from_trail */
1070		Clr_Bit(ALREADY_MARKED_FROM, trailed_item);
1071	    }
1072	    else if (what == TRAILED_PWORD && TrailedNumber(i) == 1)
1073	    {
1074		pword *trailed_ptr = ((pword *)(tr+2))->val.ptr;
1075		trailed_tag = ((pword *)(tr+2))->tag.kernel;
1076		if (ISPointer(trailed_tag) &&
1077		    trailed_ptr >= prev_tg && trailed_ptr < prev_sp
1078#ifdef AS_EMU
1079		    && (trailed_ptr < TG_LIM || trailed_ptr >= spmax_)
1080#endif
1081		    )
1082		{
1083		    /* cut garbage, remove the trail entry */
1084		    *trail_garbage += 4;
1085		    *(tr+1) = (pword *)garbage_list;
1086		    garbage_list = tr;
1087		}
1088		else	/* mark from the old value */
1089		{
1090		    /* CAUTION: see above */
1091		    if (((pword *)(tr+2))->val.ptr == trailed_item)
1092		    	trailed_tag &= ~TREFBIT;
1093		    Mark_from(trailed_tag, (pword *)(tr+2), NO);
1094		}
1095		/* reset ALREADY_MARKED_FROM, it was set in mark_from_trail */
1096		Clr_Bit(ALREADY_MARKED_FROM, trailed_item);
1097	    }
1098	    tr += TrailedNumber(i) + 3;
1099	    break;
1100
1101	case TRAIL_EXT:
1102	    i = (word) tr[TRAIL_UNDO_FLAGS];
1103	    trailed_item = tr[TRAIL_UNDO_ADDRESS];
1104	    switch(TrailedEtype(i))
1105	    {
1106
1107	    case TRAIL_UNDO:
1108		if (InCurrentSegment(trailed_item))
1109		{
1110		    if (!Marked(trailed_item->tag.kernel))
1111		    {
1112			untrail_ext(tr, UNDO_GC);	/* early untrail */
1113			*trail_garbage += TrailedEsize(i);
1114			*(tr+1) = (pword *)garbage_list;
1115			garbage_list = tr;
1116			break;
1117		    }
1118		    else	/* enter in relocation chains	*/
1119		    {
1120			Into_Reloc_Chain(trailed_item,(pword*)(tr+TRAIL_UNDO_ADDRESS))
1121		    }
1122		}
1123		/* Mark the data if it contains pwords.  This is
1124		 * simpler than marking the untrail data in a value
1125		 * trail, because is will just be used, not restored.
1126		 */
1127		if (TrailedType(i) == TRAILED_PWORD)
1128		{
1129		    word n_pwords = (TrailedEsize(i) - TRAIL_UNDO_SIMPLE_HEADER_SIZE)/2;
1130		    pword *pdata = (pword *) (tr + TRAIL_UNDO_SIMPLE_HEADER_SIZE);
1131		    for(; n_pwords > 0; --n_pwords, ++pdata)
1132		    {
1133			Mark_from(pdata->tag.kernel, pdata, NO);
1134		    }
1135		}
1136		break;
1137
1138	    case TRAIL_UNDO_STAMPED:
1139	    {
1140		pword *stamp = tr[TRAIL_UNDO_STAMP_ADDRESS];
1141		/* first reset ALREADY_MARKED_FROM, if it was set in mark_from_trail */
1142		if (!InCurrentSegment(stamp))
1143		{
1144		    Clr_Bit(ALREADY_MARKED_FROM, stamp);
1145		}
1146		/*
1147		 * Three cases now:
1148		 * - timestamp too new: frame is cut garbage, just delete it
1149		 * - item unreachable: early untrail and delete frame
1150		 * - otherwise: keep the frame
1151		 */
1152		if (tr[TRAIL_UNDO_OLDSTAMP] >= prev_tg)
1153		{
1154		    /* Timestamp's old value indicates the frame is cut garbage.
1155		     * Caution: The timestamp could be reset here, but if
1156		     * !InCurrentSegment(stamp), the timestamp has been
1157		     * marked_from and its value may be overwritten.
1158		     * If Marked(), tag and possibly value are overwritten.
1159		     * The remaining case (InCurrentSegment(stamp) && !Marked(stamp))
1160		     * is unlikely. We therefore never reset the stamp.
1161		     * The only consequence of this is that the stamp may keep
1162		     * an extra witness pword alive.
1163		     */
1164		    *trail_garbage += TrailedEsize(i);
1165		    *(tr+1) = (pword *)garbage_list;
1166		    garbage_list = tr;
1167		    break;
1168		}
1169		else if (InCurrentSegment(trailed_item) && !Marked(trailed_item->tag.kernel))
1170		{
1171		    /* early untrail: item not reachable until after failure */
1172		    /* Above comment on timestamp applies here as well */
1173		    untrail_ext(tr, UNDO_GC);
1174		    *trail_garbage += TrailedEsize(i);
1175		    *(tr+1) = (pword *)garbage_list;
1176		    garbage_list = tr;
1177		    break;
1178		}
1179		else			/* useful trail, mark */
1180		{
1181		    /* Enter (weak) item pointer into relocation chain */
1182		    if (InCurrentSegment(trailed_item)) /* && Marked(trailed_item->tag.kernel) */
1183		    {
1184			Into_Reloc_Chain(trailed_item,(pword*)(tr+TRAIL_UNDO_ADDRESS))
1185		    }
1186		    /* current stamp: mark or just enter into relocation chain.
1187		     * Note that the stamp pointer is a strong pointer.  */
1188		    if (InCurrentSegment(stamp))
1189		    {
1190			Mark_from_pointer(TREF, ((pword*)(tr+TRAIL_UNDO_STAMP_ADDRESS)), NO);
1191		    }
1192		    /* mark the old stamp */
1193		    Mark_from_pointer(TREF, ((pword*)(tr+TRAIL_UNDO_OLDSTAMP)), NO);
1194		    /* Mark the data if it contains pwords.  This is
1195		     * simpler than marking the untrail data in a value
1196		     * trail, because is will just be used, not restored.
1197		     */
1198		    if (TrailedType(i) == TRAILED_PWORD)
1199		    {
1200			word n_pwords = (TrailedEsize(i) - TRAIL_UNDO_STAMPED_HEADER_SIZE)/2;
1201			pword *pdata = (pword *) (tr + TRAIL_UNDO_STAMPED_HEADER_SIZE);
1202			for(; n_pwords > 0; --n_pwords, ++pdata)
1203			{
1204			    Mark_from(pdata->tag.kernel, pdata, NO);
1205			}
1206		    }
1207		}
1208	    }
1209		break;
1210
1211/**** BEGIN EXTENSION SLOT ****
1212
1213Name:	GC_EARLY_UNTRAIL
1214
1215Parameters:
1216pword **tr      points to extension trail frame, which is already in a
1217		relocation chain, so the address field is overwritten
1218
1219Code Template:
1220	    case TRAIL_EXTENSION:
1221		if the trail frame contains pointers or pwords,
1222		use them for marking
1223
1224****** END EXTENSION SLOT *****/
1225	    }
1226	    tr += TrailedEsize(*tr);
1227	    break;
1228
1229	}
1230    }
1231    return garbage_list;
1232}
1233
1234
1235
1236/*
1237* Go through the environment chain of this frame, marking from
1238* the permanent variables. Stop if the chain merges with a
1239* previously processed chain (mergepoint).
1240* Compute the mergepoint for the chain that will be processed next.
1241* In the waking routines we have environments of statically unknown
1242* size. They are marked in the code with a size of -1.
1243* The real size is computed from the tag of Y1.
1244*/
1245
1246/* Walk_Env_Chain(fp,mergepoint,next_chain,next_mergepoint,edesc) */
1247#define Walk_Env_Chain(SlotAction) { \
1248	pword *env = fp.chp->e; \
1249 \
1250	/* start of next environment chain */ \
1251	next_chain = (fp.top-1)->frame.chp->e; \
1252	next_mergepoint = (env >= next_chain) ? env : (pword *)0; \
1253 \
1254	/* process environments up to and including the shared one */ \
1255	/* while (env <= mergepoint) */ \
1256	for(;;) \
1257	{ \
1258	    if (EdescIsSize(edesc)) { \
1259		/* we have only an environment size, all slots active */ \
1260		word sz = EdescSize(edesc,env); \
1261		Check_Size(sz) \
1262		for (pw = env - sz; pw < env; pw++) \
1263		{ \
1264		    SlotAction /*(pw)*/ \
1265		} \
1266	    } else { \
1267		/* we have an environment activity bitmap */ \
1268		uword *eam_ptr = EdescEamPtr(edesc); \
1269		pw = env; \
1270		do { \
1271		    int i=EAM_CHUNK_SZ; \
1272		    uword eam = EamPtrEam(eam_ptr); \
1273		    for(;eam;--i) { \
1274			--(pw); \
1275			if (eam & 1) { \
1276			    SlotAction /*(pw)*/ \
1277			} \
1278			eam >>= 1; \
1279		    } \
1280		    pw -= i; \
1281		} while (EamPtrNext(eam_ptr)); \
1282	    } \
1283	    if (env >= mergepoint) \
1284		break; \
1285 \
1286	    edesc = EnvDesc((pword**)env + 1); \
1287	    env = PrevEnv(env); \
1288 \
1289	    if (!next_mergepoint && env >= next_chain) \
1290		next_mergepoint = env; \
1291	} \
1292 \
1293	/* compute the next mergepoint	*/ \
1294	if (next_mergepoint) \
1295	    mergepoint = next_mergepoint; \
1296	else \
1297	{ \
1298	    do \
1299	    { \
1300		env = PrevEnv(env); \
1301	    } \
1302	    while(env < next_chain); \
1303	    mergepoint = env; \
1304	} \
1305}
1306
1307
1308/*
1309 * Go down control frames and environments, marking their contents,
1310 * and interleaving an early-reset step between control frames.
1311 * The collection choicepoint must be on top of control stack.
1312 * GCB must point to a frame that has tg,sp,tt and e fields!
1313 */
1314
1315static pword **
1316mark_from_control_frames(control_ptr GCB, word *trail_garb_count)
1317{
1318    control_ptr		fp, top, pfp;
1319    register pword	*env, *pw, *prev_de;
1320    pword		*next_de,
1321			*next_chain, *mergepoint, *next_mergepoint;
1322    pword		**tr, **trail_garb_list;
1323    word		edesc;
1324
1325    tr = TT;
1326    mergepoint = Chp_E(GCB);
1327    trail_garb_list = (pword **) 0;
1328    *trail_garb_count = 0;
1329    prev_de = (pword *) 0;
1330    next_de = LD;
1331
1332    pfp.args = B.args;
1333    top.top = pfp.top - 1;
1334    fp.any_frame = top.top->frame;
1335
1336    do	/* loop through control frames until we reach GCB */
1337    {
1338#ifdef DEBUG_GC
1339	if (IsInterruptFrame(top.top)
1340	    || IsRecursionFrame(top.top)
1341	    || IsExceptionFrame(top.top))
1342	{
1343	    Print_Err("bad frame in mark_from_choicepoints\n");
1344	}
1345#endif
1346
1347/**** BEGIN EXTENSION SLOT ****
1348
1349Name:	GC_MARK_CONTROL_FRAME
1350
1351Parameters:
1352	control_ptr top		points to the top frame of a control frame
1353	control_ptr fp		points to the bottom of this frame
1354
1355Code Template:
1356	else if ( this_is_an_extension_frame(top) )
1357	{
1358	    Go through the frame and call Mark_from(pw->tag.kernel, pw, NO)
1359	    for every pword pw stored in the frame.
1360	    The 4 standard frame entries Sp,Tg,Tt and E are handled by
1361	    the subsequent code.
1362	}
1363
1364****** END EXTENSION SLOT *****/
1365
1366	if (IsRetryMeInlineFrame(top.top))
1367	{
1368	    edesc = EnvDescPP(top.top->backtrack + RETRY_ME_INLINE_SIZE - 1);
1369	    pw = (pword *)(fp.chp + 1);
1370	}
1371	else if (IsTrustMeInlineFrame(top.top))
1372	{
1373	    edesc = EnvDescPP(top.top->backtrack + TRUST_ME_INLINE_SIZE - 1);
1374	    pw = (pword *)(fp.chp + 1);
1375	}
1376	else if (IsRetryInlineFrame(top.top))
1377	{
1378	    edesc = EnvDescPP(top.top->backtrack + RETRY_INLINE_SIZE - 1);
1379	    pw = (pword *)(fp.chp + 1);
1380	}
1381	else if (IsTrustInlineFrame(top.top))
1382	{
1383	    edesc = EnvDescPP(top.top->backtrack + TRUST_INLINE_SIZE - 1);
1384	    pw = (pword *)(fp.chp + 1);
1385	}
1386	else if (IsParallelFrame(top.top))
1387	{
1388	    edesc = EnvDesc(fp.chp->sp);
1389	    pw = (pword *)(fp.chp_par + 1);
1390	}
1391	else /* if (IsChoicePoint(top.top)) */
1392	{
1393	    edesc = EnvDesc(fp.chp->sp);
1394	    pw = (pword *)(fp.chp + 1);
1395	}
1396
1397	for (; pw < top.args; pw++)	/* mark from arguments	*/
1398	{
1399	    Mark_from(pw->tag.kernel, pw, NO)
1400	}
1401
1402	top.top = fp.top - 1;		/* find next full frame	*/
1403
1404	Walk_Env_Chain( /* (fp,mergepoint,next_chain,next_mergepoint,edesc) */
1405	    if (!AlreadyMarkedFrom(pw->tag.kernel))
1406	    {
1407		Mark_from(pw->tag.kernel, pw, NO)
1408		Set_Bit(ALREADY_MARKED_FROM, pw)
1409	    }
1410	)
1411
1412	/*
1413	 * Process the LD list in this stack segment. Deterministically
1414	 * woken goals are removed from the list. Nondeterministically
1415	 * woken ones are already marked from the trail at this time
1416	 * (recognisable e.g. from the marked module field).
1417	 * Some unmarked woken goals may be marked later from a second
1418	 * suspending variable, but since they are already woken it's
1419	 * no problem that they are missing from the LD list.
1420	 */
1421	fp.chp->ld = prev_de;		/* add ld field to backpatch chain */
1422	prev_de = (pword *) &fp.chp->ld;
1423	while (next_de >= top.top->frame.chp->tg)
1424	{
1425	    if (Marked(next_de->tag.kernel) &&
1426		Marked(next_de[SUSP_MODULE].tag.kernel)
1427	    || !Marked(next_de->tag.kernel) && !SuspDead(next_de))
1428	    {
1429		/*
1430		 * Found a non-garbage suspension next_de.
1431		 * Update all fields in the prev_de chain to point to it.
1432		 */
1433		do {
1434		    pw = prev_de->val.ptr;
1435		    prev_de->val.ptr = next_de;
1436		    Mark_from_pointer(TSUSP, prev_de, NO); /* the NO is ok! */
1437		    prev_de = pw;
1438		} while(prev_de);
1439		prev_de = &next_de[SUSP_LD];	/* start a new chain */
1440		next_de = next_de[SUSP_LD].val.ptr;
1441		prev_de->val.ptr = (pword *) 0;
1442	    }
1443	    else /* deterministically woken, skip it */
1444	    {
1445		pw = next_de[SUSP_LD].val.ptr;
1446		next_de[SUSP_LD].val.ptr = (pword *) 0;	/* not necessary */
1447		next_de = pw;
1448	    }
1449	}
1450
1451	/*
1452	 * Enter the frame's (and the previous small frame's) TG fields
1453	 * into relocation chains so that they are updated in the
1454	 * compaction phase.
1455	 * These used to be non-marking references. Now we have a
1456	 * "witness" TNIL pword pushed with every choicepoint which must
1457	 * be preserved, so we mark it here.
1458	 */
1459	do {
1460	    pfp.any_frame = (pfp.top - 1)->frame;
1461	    Mark_from_pointer(TREF, ((pword*)&pfp.chp->tg), NO);
1462	} while (pfp.args > fp.args);
1463
1464	/*
1465	 * replace the TT field by the (future) offset from TT
1466	 */
1467	tr = fp.chp->tt;		/* remember its original value */
1468	fp.chp->tt = (pword **)(fp.chp->tt - TT - *trail_garb_count);
1469
1470	fp.any_frame = top.top->frame;
1471
1472	/*
1473	 * Do virtual backtracking and trail garbage detection
1474	 * for the trail segment newer than fp->tt.
1475	 * Note that the last invocation of early_untrail does
1476	 * not do any further untrails.
1477	 * It is only necessary to collect trail cut garbage!
1478	 */
1479	trail_garb_list =
1480	    early_untrail(GCB, tr, fp, trail_garb_list, trail_garb_count);
1481
1482    } while (fp.top >= GCB.top);
1483
1484#ifdef DEBUG_GC
1485    if (InCurrentSegment(next_de))
1486	_gc_error("next_de in current segement");
1487#endif
1488    do {
1489	pw = prev_de->val.ptr;
1490	prev_de->val.ptr = next_de;
1491	prev_de = pw;
1492    } while(prev_de);
1493
1494    return trail_garb_list;
1495}
1496
1497
1498static void
1499reset_env_marks(control_ptr GCB)
1500{
1501    control_ptr		fp, top;
1502    register pword	*env, *pw;
1503    pword		*next_chain, *mergepoint, *next_mergepoint;
1504    word		edesc;
1505
1506    mergepoint = Chp_E(GCB);
1507
1508    top.top = B.top - 1;
1509    fp.any_frame = top.top->frame;
1510
1511    do	/* loop through control frames until we reach GCB */
1512    {
1513#ifdef DEBUG_GC
1514	if (IsInterruptFrame(top.top)
1515	    || IsRecursionFrame(top.top)
1516	    || IsExceptionFrame(top.top))
1517	{
1518	    Print_Err("bad frame in mark_from_choicepoints\n");
1519	    edesc = EnvDesc(fp.chp->sp);
1520	}
1521#endif
1522
1523/**** BEGIN EXTENSION SLOT ****
1524
1525Name:	GC_MARK_CONTROL_FRAME
1526
1527Parameters:
1528	control_ptr top		points to the top frame of a control frame
1529	control_ptr fp		points to the bottom of this frame
1530
1531Code Template:
1532	else if ( this_is_an_extension_frame(top) )
1533	{
1534	    Find environment descriptor from execution context
1535	}
1536
1537****** END EXTENSION SLOT *****/
1538
1539	else if (IsRetryMeInlineFrame(top.top))
1540	{
1541	    edesc = EnvDescPP(top.top->backtrack + RETRY_ME_INLINE_SIZE - 1);
1542	}
1543	else if (IsTrustMeInlineFrame(top.top))
1544	{
1545	    edesc = EnvDescPP(top.top->backtrack + TRUST_ME_INLINE_SIZE - 1);
1546	}
1547	else if (IsRetryInlineFrame(top.top))
1548	{
1549	    edesc = EnvDescPP(top.top->backtrack + RETRY_INLINE_SIZE - 1);
1550	}
1551	else if (IsTrustInlineFrame(top.top))
1552	{
1553	    edesc = EnvDescPP(top.top->backtrack + TRUST_INLINE_SIZE - 1);
1554	}
1555	else /* if (IsChoicePoint(top.top)) */
1556	{
1557	    edesc = EnvDesc(fp.chp->sp);
1558	}
1559
1560	top.top = fp.top - 1;		/* find next full frame	*/
1561
1562	Walk_Env_Chain( /* (fp,mergepoint,next_chain,next_mergepoint,edesc) */
1563	    if (AlreadyMarkedFrom(pw->tag.kernel))
1564	    {
1565		Clr_Bit(ALREADY_MARKED_FROM, pw)
1566	    }
1567	)
1568
1569	fp.any_frame = top.top->frame;
1570
1571    } while (fp.top >= GCB.top);
1572}
1573
1574
1575static void
1576non_marking_reference(pword **ref)
1577{
1578    pword *pw = *ref;
1579
1580    if (InCurrentSegment(pw))
1581    {
1582	Into_Reloc_Chain_Nonmarking(pw, (pword *)ref);
1583    }
1584}
1585
1586
1587/*
1588 * Scan the trail for locations that have been bound since the creation
1589 * of the GCB choicepoint, and use these locations as marking roots.
1590 *
1591 * Because of value trailing, it it possible to encounter multiple
1592 * trail entries for the same location.  These may be several
1593 * value-trails, or one address-trail plus one or more value-trails.
1594 * Since our marking process is destructive, we cannot mark twice from
1595 * the same location.  To avoid this, we set the ALREADY_MARKED_FROM
1596 * bit in the tag of the trailed (and marked-from) location on the
1597 * first encounter, and suppress all subsequent marking attempts (the
1598 * corresponding check is in mark_from()).  These subsequent marking
1599 * attempts may occur either in mark_from_trail() itself or during
1600 * explicit marking of certain global locations in collect_stack().
1601 * The bits are reset during the second trail traversal, in
1602 * early_untrail().  Great care must be taken to ensure that for every
1603 * bit-setting in mark_from_trail() there is corresponding code in
1604 * early_untrail() to reset it.
1605 * Caution: the ALREADY_MARKED_FROM is the same physical bit as the
1606 * MARK bit, but there is no conflict because MARK bits are only set
1607 * within the current collection segment, while ALREADY_MARKED_FROM
1608 * bits are set only outside of it.
1609 */
1610
1611static void
1612mark_from_trail(control_ptr GCB)
1613{
1614    register pword *gc_tg = Chp_Tg(GCB);
1615    register pword **limit_tt = Chp_Tt(GCB);
1616    pword *gc_sp = Chp_Sp(GCB);
1617    register pword **tr = TT;
1618    register pword *trailed_item;
1619    word i, what;
1620
1621    while (tr < limit_tt)
1622	switch ((word) *tr & 3)
1623	{
1624	case TRAIL_ADDRESS:
1625	    trailed_item = *tr++;
1626	    if (trailed_item < gc_tg || trailed_item > gc_sp
1627#ifdef AS_EMU
1628		|| (trailed_item > TG_LIM && trailed_item < spmax_)
1629#endif
1630	    )
1631	    {
1632		Mark_from(trailed_item->tag.kernel, trailed_item, NO)
1633		Set_Bit(ALREADY_MARKED_FROM, trailed_item);
1634	    }
1635	    break;
1636	case TRAIL_TAG:
1637	    trailed_item = *(tr+1);
1638	    tr += 2;
1639	    if (trailed_item < gc_tg || trailed_item > TG_LIM)
1640	    {
1641		Mark_from(trailed_item->tag.kernel, trailed_item, NO)
1642		Set_Bit(ALREADY_MARKED_FROM, trailed_item);
1643	    }
1644	    break;
1645	case TRAIL_MULT:
1646	    i = (word) *tr++;
1647	    trailed_item = (pword *)((uword *)(*tr++) + TrailedOffset(i));
1648	    what = TrailedType(i);
1649	    i = TrailedNumber(i);
1650	    if (trailed_item < gc_tg || trailed_item > TG_LIM)
1651	    {
1652		if (what == TRAILED_PWORD)
1653		{
1654		    i /= 2;
1655		    if (i > 0)
1656		    {
1657			do
1658			{
1659			    Mark_from(((pword*)tr)->tag.kernel,
1660							((pword*)tr), NO);
1661			    if (trailed_item < gc_tg || trailed_item > gc_sp)
1662				Mark_from(trailed_item->tag.kernel,
1663							trailed_item, NO);
1664			    trailed_item++;
1665			    tr = (pword **)((pword*)tr + 1);
1666			} while (i--);
1667		    }
1668		    else
1669		    {
1670			/* Mark only from the current value, the old
1671			 * value is handled later in early_untrail()
1672			 */
1673			if (trailed_item < gc_tg || trailed_item > gc_sp)
1674			{
1675			    Mark_from(trailed_item->tag.kernel,
1676						    trailed_item, NO);
1677			    Set_Bit(ALREADY_MARKED_FROM, trailed_item);
1678			}
1679			tr = (pword **)((pword*)tr + 1);
1680		    }
1681		}
1682		else if (what == TRAILED_REF || what == TRAILED_COMP)
1683		{
1684		    word trailed_tag = trailed_item->tag.kernel;
1685#ifdef DEBUG_GC
1686		    if ((what == TRAILED_REF && !IsTag(trailed_tag,TVAR_TAG))
1687			|| (what == TRAILED_COMP && !IsTag(trailed_tag,TCOMP)))
1688		    {
1689			_gc_error("Illegal TRAILED_REF or TRAILED_COMP");
1690		    }
1691#endif
1692		    if (i > 0)
1693			do
1694			{
1695			    Mark_from_pointer(trailed_tag, ((pword*)tr), NO); /* old */
1696			    if (trailed_item < gc_tg || trailed_item > gc_sp)
1697				Mark_from_pointer(trailed_tag, trailed_item, NO);
1698			    trailed_item++;
1699			    tr++;
1700			} while (i--);
1701		    else
1702		    {
1703			/* Mark only from the current value, the old
1704			 * value is handled later in early_untrail()
1705			 */
1706			if (trailed_item < gc_tg || trailed_item > gc_sp)
1707			{
1708			    Mark_from_pointer(trailed_tag, trailed_item, NO);
1709			    Set_Bit(ALREADY_MARKED_FROM, trailed_item);
1710			}
1711			tr++;
1712		    }
1713		}
1714		else if (what == TRAILED_WORD32)
1715		    tr += i + 1;
1716		else
1717		    Print_Err1(
1718			"bad extension trail entry in mark_from_trail: %x\n",
1719			(word) *tr);
1720	    }
1721	    else	/* skip the trail entry */
1722		tr += i + 1;
1723	    break;
1724
1725	case TRAIL_EXT:
1726	    switch (TrailedEtype(*tr))
1727	    {
1728	    case TRAIL_UNDO:
1729		break;
1730
1731	    case TRAIL_UNDO_STAMPED:
1732		{
1733		    pword *stamp = tr[TRAIL_UNDO_STAMP_ADDRESS];
1734		    if (!InCurrentSegment(stamp))
1735		    {
1736			/* Mark only from the current value, the old
1737			 * value is handled later in early_untrail()
1738			 */
1739			Mark_from(stamp->tag.kernel, stamp, NO);
1740			Set_Bit(ALREADY_MARKED_FROM, stamp);
1741		    }
1742		}
1743		break;
1744
1745/**** BEGIN EXTENSION SLOT ****
1746
1747Name:	GC_MARK_TRAIL
1748
1749Parameters:
1750	pword **tr      points to extension trail frame
1751
1752Code Template:
1753	    case TRAIL_EXTENSION:
1754		if the trailed object is older than GCB then mark from the
1755		new value of the trailed object. For value trails the old
1756		value must be used for marking as well!
1757		break;
1758
1759****** END EXTENSION SLOT *****/
1760
1761	    default:
1762		Print_Err("unknown extension trail frame type in mark_from_trail\n");
1763		break;
1764	    }
1765	    tr += TrailedEsize(*tr);
1766	    break;
1767	}
1768}
1769
1770static void
1771_mark_from_global_variables(void)
1772{
1773    ec_ref ref = g_emu_.allrefs.next;
1774
1775    while(ref != &g_emu_.allrefs)
1776    {
1777	Mark_from(ref->var.tag.kernel, &ref->var, NO)
1778	ref = ref->next;
1779    }
1780}
1781
1782
1783/*
1784 * The basic marking procedure. It should not be called directly,
1785 * but the macro Mark_from() should always be used.
1786 *
1787 * ref	points to the word that has the reference.
1788 *	It is NOT always the value part of a pword !
1789 * tag	is the type of this reference (Ref or Compound tag)
1790 * ref_in_segment is YES, if the reference is within the
1791 *	collection segment, NO otherwise.
1792 *
1793 * NOTE: ref->tag may be already overwritten and hence different from tag
1794 *	or it may not even exist (eg. references from the trail)
1795 *
1796 *	Recursion has been removed using an explicit stack on the local.
1797 */
1798
1799#define Pdl_Init()	pword *pdl_bottom = SP
1800#define Pdl_Empty()	(SP == pdl_bottom)
1801#define Pdl_Arity()	SP->tag.kernel
1802#define Pdl_Target()	SP->val.ptr
1803#define Pdl_Pop()	++SP
1804#define Pdl_Push(i,t) { \
1805	if (--SP <= g_emu_.sp_limit && local_ov()) \
1806	    ec_panic("Out of local stack space","garbage collection"); \
1807	SP->tag.kernel = (i); \
1808	SP->val.ptr = (t); \
1809    }
1810
1811
1812static void
1813mark_from(
1814	word tag,		/* type of the reference */
1815	pword *ref,		/* location of the reference */
1816	int ref_in_segment)	/* true if ref is in the current segment */
1817{
1818    register pword *target;
1819    register word target_tag;
1820    register int i;
1821
1822    Pdl_Init();
1823
1824    /*
1825     * If the reference is from outside the collection segment, we may
1826     * already have used it for marking. In this case, ignore it now.
1827     */
1828    if (!ref_in_segment && AlreadyMarkedFrom(tag))
1829	return;
1830
1831    for(;;)		/* tail recursion loop */
1832    {
1833	target = ref->val.ptr;
1834	if (!InCurrentSegment(target))
1835	    goto _return_;
1836
1837	target_tag = target->tag.kernel;	/* save the original tag */
1838
1839	if (ref_in_segment && ref < target)
1840	{
1841	    Set_Bit(MARK, target)
1842	}
1843	else	/* a reference from outside into the current segment	*/
1844		/* or a down-pointer within the current segment		*/
1845	{
1846	    Into_Reloc_Chain(target, ref)
1847	}
1848
1849	/*
1850	 * CAUTION: the tag of the target is now destroyed !
1851	 * It is still available in target_tag.
1852	 */
1853
1854	if (ISRef(tag) && ref != target)	/* handling of untyped references	*/
1855	{
1856	    if (Marked(target_tag))
1857		goto _return_;
1858	    Check_Tag(target_tag)
1859	    Check_Tag_Range(target_tag)
1860	    /* Mark_from(target_tag, target, YES) */
1861	}
1862	else switch(TagTypeC(tag))	/* handling of typed pointers	*/
1863	{
1864
1865	case TLIST:
1866	case TRAT:
1867	case TMETA:			/* self reference or from trail */
1868	case THANDLE:
1869	    if (!Marked(target_tag))
1870	    {
1871		Check_Tag(target_tag)
1872		/* Mark_from(target_tag, target, YES) */
1873		if (ISPointer(target_tag))
1874		{
1875		    Pdl_Push(1,target+1);
1876		    goto _mark_from_pointer_;
1877		}
1878	    }
1879	    target_tag = (++target)->tag.kernel;
1880	    if (Marked(target_tag))
1881		goto _return_;
1882	    Check_Tag(target_tag)
1883	    Set_Bit(MARK, target)
1884	    /* Mark_from(target_tag, target, YES) */
1885	    break;
1886
1887	case TCOMP:
1888	    if (Marked(target_tag))
1889		goto _return_;		/* the structure is already marked as a whole */
1890	    Check_Tag(target_tag)
1891	    Check_Functor(target_tag)
1892	    i = DidArity(target->val.did);
1893	    ++target;
1894	    goto _mark_pwords_;		/* (i,target) */
1895
1896	case TVAR_TAG:
1897	case TNAME:
1898	case TUNIV:
1899	    if (Marked(target_tag))
1900		goto _return_;
1901	    Check_Tag(target_tag)
1902	    /* Mark_from(target_tag, target, YES) */
1903	    break;
1904
1905	case TSUSP:
1906	    if (!(tag & MARK_FULL_DE))
1907	    {
1908		if (Marked(target_tag))
1909		    goto _return_;
1910		Check_Tag(target_tag)
1911		Check_Susp(target_tag)
1912		/*
1913		 * mark suspensions according to their woken bit,
1914		 * either completely or only the header
1915		 */
1916		if (SuspTagDead(target_tag))
1917		    goto _return_;
1918	    }
1919	    else if (!Marked(target_tag))
1920	    {
1921		Check_Susp(target_tag)
1922	    }
1923	    /* mark the subsequent pwords: state, goal, module */
1924	    i = SUSP_SIZE - SUSP_HEADER_SIZE;
1925	    target += SUSP_HEADER_SIZE;
1926	    goto _mark_pwords_;		/* (i,target) */
1927
1928	case TDBL:
1929	case TBIG:
1930	case TIVL:
1931	case TSTRG:
1932	case TEXTERN:
1933	case TPTR:
1934	    goto _return_;		/* nothing to mark recursively	*/
1935
1936/**** BEGIN EXTENSION SLOT ****
1937
1938Name:	GC_MARK_TYPED_POINTER
1939
1940Desc:	The target item is referenced by a TEXTENSION_POINTER pointer.
1941	The target tag is already overwritten, but still available in
1942	target_tag. The code here should recursively mark what is
1943	referenced by the pointed-to item.
1944
1945Parameters:
1946	word target_tag		Tag and address of the first pword
1947	pword *target			referenced by the typed pointer
1948
1949Code Template:
1950	case TEXTENSION_POINTER:
1951	    Set the MARK bit and call Mark_from() for all pwords
1952	    contained in the referenced item and Mark_from_pointer()
1953	    for all potential references into the global stack.
1954	    The tail recursive call should be replaced by break;
1955	    If there is nothing to mark recursively: goto _return_;
1956
1957****** END EXTENSION SLOT *****/
1958
1959	default:
1960	    Print_Err1("bad pointer tag (%x) in mark_from\n", tag);
1961	    ec_flush(current_err_);
1962	    break;
1963	}
1964
1965/* _mark_from_: */		/* Mark_from(target_tag, target, YES) */
1966	if (!ISPointer(target_tag))
1967	    goto _return_;
1968
1969_mark_from_pointer_:		/* mark_from(target_tag, target, YES) */
1970	tag = target_tag;	/* setup parameters for tail recursion	*/
1971	ref = target;
1972	ref_in_segment = YES;
1973	continue;
1974
1975_return_:
1976	if (Pdl_Empty())
1977	    return;
1978	i = Pdl_Arity();
1979	target = Pdl_Target();
1980	Pdl_Pop();
1981
1982_mark_pwords_:			/* (i, target) */
1983	while(i-- > 0)
1984	{
1985	    target_tag = target->tag.kernel;
1986	    if (!Marked(target_tag))
1987	    {
1988		Check_Tag(target_tag)
1989		Set_Bit(MARK, target)
1990		/* Mark_from(target_tag, target, YES) */
1991		if (ISPointer(target_tag))
1992		{
1993		    if (i>0) { Pdl_Push(i,target+1); }
1994		    goto _mark_from_pointer_;
1995		}
1996	    }
1997	    ++target;
1998	}
1999	goto _return_;
2000
2001    } /* end for */
2002}
2003
2004
2005/*-------------------------------------------------------------------
2006 * compaction phase
2007 *-------------------------------------------------------------------*/
2008
2009/*
2010 * Compact the global stack in one bottom-up pass, updating the relocation
2011 * chains on-the-fly.
2012 * Note that, if there was no garbage, the items are copied onto themselves.
2013 * Otherwise, the destination is at least 1 pword below.
2014 */
2015
2016static void
2017compact_and_update(void)
2018{
2019    register pword *current, *compact, *ref;
2020    register word link_or_tag, current_tag;
2021
2022    current = compact = GCTG;
2023    while (current < TG)
2024    {
2025	link_or_tag = current_tag = current->tag.kernel;
2026	/* first update the relocation chain, if any	*/
2027	while (IsLink(link_or_tag))
2028	{
2029	    ref = LinkToPointer(link_or_tag);
2030	    link_or_tag = ref->val.all;
2031	    ref->val.ptr = compact;
2032	}
2033
2034	if (ISPointer(link_or_tag))
2035	{
2036	    if (Marked(current_tag))
2037	    {
2038		compact->tag.kernel = link_or_tag & ~MARK;
2039		if ((ref = current->val.ptr) > current && ref < TG)
2040		{
2041		    Into_Reloc_Chain(ref,compact)
2042		}
2043		else
2044		    compact->val.all = current->val.all;
2045		compact++;
2046	    }
2047	    current++;
2048	}
2049	else if (!ISSpecial(link_or_tag))	/* simple types */
2050	{
2051	    if (Marked(current_tag))
2052	    {
2053		compact->tag.kernel = link_or_tag & ~MARK;
2054		(compact++)->val.all = current->val.all;
2055	    }
2056	    current++;
2057	}
2058	else
2059	    switch (TagTypeC(link_or_tag))
2060	    {
2061	    case TDE:	/* treat suspension, except goal and module field */
2062		if (Marked(current_tag)) {
2063		    compact->tag.kernel = link_or_tag & ~MARK;
2064		    if ((ref = current->val.ptr) > current)	/* LD link */
2065		    {
2066#ifdef DEBUG_GC
2067			/* this case should never occur: LD goes down */
2068			_gc_error("LD list corrupted (5)\n");
2069#endif
2070			Into_Reloc_Chain(ref,compact)
2071		    }
2072		    else
2073			compact->val.all = current->val.all;
2074		    compact[SUSP_PRI] = current[SUSP_PRI];
2075		    compact[SUSP_INVOC] = current[SUSP_INVOC];
2076		    compact += SUSP_HEADER_SIZE;
2077		}
2078		current += SUSP_HEADER_SIZE;
2079		break;
2080
2081	    case TEXTERN:
2082		if (Marked(current_tag))
2083		{
2084		    compact->tag.kernel = link_or_tag & ~MARK;
2085		    (compact++)->val.all = current->val.all;
2086		    *compact++ = current[1];
2087		}
2088		current += 2;
2089		break;
2090
2091	    case TBUFFER:
2092		if (Marked(current_tag))
2093		{
2094		    int i = BufferPwords(current);
2095		    compact->tag.kernel = link_or_tag & ~MARK;
2096		    (compact++)->val.all = (current++)->val.all;
2097		    do
2098			*compact++ = *current++;
2099		    while (--i > 1);
2100		}
2101		else
2102		    current += BufferPwords(current);
2103		break;
2104
2105/**** BEGIN EXTENSION SLOT ****
2106
2107Name:	GC_COMPACT
2108
2109Parameters:
2110	current	 	old address of the object
2111	compact	 	new address of the object
2112
2113Code Template:
2114	    case TEXTENSION:
2115		if (Marked(current_tag))
2116		{
2117		    copy the object down from current to compact;
2118		    if it contains pointers UP the global stack,
2119		    these must be entered into a relocation chain
2120		    rather than copied
2121		}
2122		else
2123		{
2124		    skip the object by incrementing current
2125		}
2126		break;
2127
2128****** END EXTENSION SLOT *****/
2129
2130	    default:
2131		Print_Err1("illegal tag (%d) in compact_and_update\n",
2132		    (word) TagTypeC(link_or_tag));
2133		ec_flush(current_err_);
2134		current++;
2135		break;
2136	    }
2137    }
2138#ifdef WIPE_FREE_GLOBAL
2139    while (compact < current)
2140    {
2141    	compact->val.ptr = 0;
2142	(compact++)->tag.kernel = TEND;
2143    }
2144#endif
2145}
2146
2147
2148/*
2149 * Compact the trail by copying down all the space between
2150 * the elements of the garbage list.
2151 */
2152static void
2153compact_trail(register pword **garbage_list)
2154{
2155    register pword **compact, **from, **to;
2156
2157    End_Of_Frame(garbage_list, compact);
2158    from = garbage_list;
2159    garbage_list = (pword **)TrailedLocation(garbage_list);
2160    while (garbage_list) {
2161	End_Of_Frame(garbage_list, to);
2162	while (from > to)
2163	    *--compact = *--from;
2164	from = garbage_list;
2165	garbage_list = (pword **)TrailedLocation(garbage_list);
2166    }
2167    to = TT;
2168    while (from > to)
2169	*--compact = *--from;
2170    TT = compact;
2171}
2172
2173
2174/*
2175 * Set the tt fields of the control frames to their new values
2176 */
2177static void
2178update_trail_ptrs(control_ptr GCB)
2179{
2180    register control_ptr fp, top;
2181
2182    fp.top = B.top;
2183    do {
2184	top.top = (fp.top - 1);
2185	fp.any_frame.chp = top.top->frame.chp;
2186	fp.chp->tt = TT + (word)(fp.chp->tt);
2187    } while (fp.top > GCB.top);
2188}
2189
2190
2191/*-------------------------------------------------------------------
2192 * overflow in spite of GC or in a position where no GC can be done
2193 *-------------------------------------------------------------------*/
2194
2195/*
2196 * TT has grown below TT_LIM
2197 *
2198 * We first trigger a gc and reduce the gap from TRAIL_GAP to GLOBAL_TRAIL_GAP.
2199 * The gc will hopefully reduce the trail. If not, we get a second overflow,
2200 * then we allocate a new page.
2201 */
2202
2203#define	TRAIL_GAP	(GLOBAL_TRAIL_GAP + 128)
2204
2205void
2206trail_ov(void)
2207{
2208    TT_LIM = (pword **)
2209	    ((pword *) g_emu_.global_trail[1].end + GLOBAL_TRAIL_GAP);
2210    if (TT > TT_LIM)
2211    {
2212	/* There is still some space, schedule a global stack collection only
2213	 */
2214	if (TG_SLS > TG)
2215	{
2216	    Restore_Tg_Soft_Lim(TG)
2217	}
2218	return;
2219    }
2220
2221    /* grow the trail */
2222    if (!adjust_stacks(g_emu_.global_trail,
2223	    g_emu_.global_trail[0].end,
2224	    (uword *) ((pword *) TT - TRAIL_GAP), 0))
2225    {
2226	/* stacks collide, make a last try with shrinking the global */
2227	if (!adjust_stacks(g_emu_.global_trail,
2228		(uword *) (TG + GLOBAL_TRAIL_GAP),
2229		(uword *) ((pword *) TT - TRAIL_GAP), 0))
2230	{
2231	    ov_reset();		/* give up */
2232	}
2233	Set_Tg_Lim((pword *) g_emu_.global_trail[0].end - GLOBAL_TRAIL_GAP)
2234    }
2235    TT_LIM = (pword **)
2236	    ((pword *) g_emu_.global_trail[1].end + TRAIL_GAP);
2237    return;
2238}
2239
2240/*
2241 * TG has grown above TG_LIM (and above TG_SL)
2242 * Should happen only outside the emulator (when no GC can be done)
2243 * or due to some erroneous big allocation inside the emulator.
2244 * We increase TG_LIM as much as necessary. This is first tried
2245 * without, and if that fails, with shrinking the trail.
2246 */
2247void
2248global_ov(void)
2249{
2250    if (final_overflow())
2251	ov_reset();
2252}
2253
2254
2255/*
2256 * The same as global_ov(), but returns true or false
2257 */
2258
2259int
2260final_overflow(void)
2261{
2262    if (!adjust_stacks(g_emu_.global_trail,
2263	    (uword *) (TG + GLOBAL_TRAIL_GAP + 1), /* +1 to avoid looping */
2264	    g_emu_.global_trail[1].end, 0))
2265    {
2266	/* stacks collide, make a last try with shrinking the trail */
2267	if (!adjust_stacks(g_emu_.global_trail,
2268		(uword *) (TG + GLOBAL_TRAIL_GAP + 1),
2269		(uword *) ((pword *) TT - TRAIL_GAP), 0))
2270	{
2271	    return 1;
2272	}
2273	TT_LIM = (pword **)
2274	    ((pword *) g_emu_.global_trail[1].end + TRAIL_GAP);
2275    }
2276    Set_Tg_Lim((pword *) g_emu_.global_trail[0].end - GLOBAL_TRAIL_GAP)
2277    return 0;
2278}
2279
2280
2281/*
2282 * SP has grown below sp_limit
2283 */
2284
2285int
2286local_ov(void)
2287{
2288    if (!adjust_stacks(g_emu_.control_local,
2289	    g_emu_.control_local[0].end,
2290	    (uword *) (SP - LOCAL_CONTROL_GAP), 0))
2291    {
2292	if (!adjust_stacks(g_emu_.control_local,
2293		(uword *) (B.args + LOCAL_CONTROL_GAP),
2294		(uword *) (SP - LOCAL_CONTROL_GAP), 0))
2295	{
2296	    return 1;
2297	}
2298	g_emu_.b_limit =
2299	    (pword *) g_emu_.control_local[0].end - LOCAL_CONTROL_GAP;
2300    }
2301    g_emu_.sp_limit = (pword *) g_emu_.control_local[1].end + LOCAL_CONTROL_GAP;
2302    return 0;
2303}
2304
2305int
2306control_ov(void)
2307{
2308    if (!adjust_stacks(g_emu_.control_local,
2309	    (uword *) (B.args + LOCAL_CONTROL_GAP),
2310	    g_emu_.control_local[1].end, 0))
2311    {
2312	if (!adjust_stacks(g_emu_.control_local,
2313		(uword *) (B.args + LOCAL_CONTROL_GAP),
2314		(uword *) (SP - LOCAL_CONTROL_GAP), 0))
2315	{
2316	    return 1;
2317	}
2318	g_emu_.sp_limit =
2319	    (pword *) g_emu_.control_local[1].end + LOCAL_CONTROL_GAP;
2320    }
2321    g_emu_.b_limit = (pword *) g_emu_.control_local[0].end - LOCAL_CONTROL_GAP;
2322    return 0;
2323}
2324
2325
2326/*
2327 * Adjust the stacks such that the global stack has space for margin pwords.
2328 * Return 0 if that was not possible.
2329 * Set TG_LIM and TT_LIM according to new stack sizes, leaving proper gaps.
2330 */
2331
2332int
2333trim_global_trail(uword margin)
2334{
2335    pword *tg_new, *tt_new, *split_at;
2336    uword ratio;
2337    int res = 1;
2338
2339    /* compute the current global/trail ratio (careful with boundary conditions) */
2340    /* for small stacks this approaches ratio 32 = 32000/1000 */
2341    ratio = ((uword*)TG - (uword*)TG_ORIG + 32000) / ((uword*)TT_ORIG - (uword*)TT + 1000);
2342    if (ratio == 0) ratio = 1;
2343
2344    Safe_Add_To_Pointer(TG, margin + GLOBAL_TRAIL_GAP, (pword *) TT, tg_new);
2345    Safe_Sub_From_Pointer((pword *) TT, margin/ratio + TRAIL_GAP, (pword *) TG, tt_new);
2346    /* first try to grow global and trail proportionally */
2347    if (!adjust_stacks(g_emu_.global_trail, (uword*) tg_new, (uword *) tt_new, 0))
2348    {
2349	/* try without accommodating margin, just partition the remaining
2350	 * space, roughly preserving the current trail/global ratio
2351	 */
2352	res = 0;
2353	split_at = (pword *) TT - ((pword *) TT - TG)/(ratio + 1);
2354	tg_new = TG + GLOBAL_TRAIL_GAP;
2355	tt_new = (pword *) TT - TRAIL_GAP;
2356
2357	if (!adjust_stacks(g_emu_.global_trail, (uword*) tg_new, (uword*) tt_new, (uword *) split_at))
2358	{
2359	    return res;
2360	}
2361    }
2362    /* the following will also adjust TG_SL if necessary */
2363    Set_Tg_Lim((pword *) g_emu_.global_trail[0].end - GLOBAL_TRAIL_GAP)
2364    TT_LIM = (pword **) ((pword *) g_emu_.global_trail[1].end + TRAIL_GAP);
2365    return res;
2366}
2367
2368
2369/*
2370 * Adjust local control to have some default space above the stack tops
2371 */
2372#define LOCAL_CONTROL_DEFAULT	LOCAL_CONTROL_GAP
2373int
2374trim_control_local(void)
2375{
2376    if (!adjust_stacks(g_emu_.control_local,
2377	    (uword *) (B.args + LOCAL_CONTROL_DEFAULT),
2378	    (uword *) (SP - LOCAL_CONTROL_DEFAULT), 0))
2379    {
2380	return 0;
2381    }
2382    g_emu_.b_limit = (pword *) g_emu_.control_local[0].end - LOCAL_CONTROL_GAP;
2383    g_emu_.sp_limit = (pword *) g_emu_.control_local[1].end + LOCAL_CONTROL_GAP;
2384    return 1;
2385}
2386
2387static void
2388ov_reset(void)
2389{
2390    pword exit_tag;
2391    Make_Atom(&exit_tag, d_.global_trail_overflow);
2392    Exit_Block(exit_tag.val, exit_tag.tag);
2393}
2394
2395
2396/*-------------------------------------------------------------------
2397 * Marking routines for dictionary GC
2398 *-------------------------------------------------------------------*/
2399
2400/*
2401 * Mark the DIDs in a consecutive block of pwords. This block may be in
2402 * the Prolog stacks or on the heap. Note that we do not follow references
2403 * and the like, we just scan the block once, looking for atoms, functors
2404 * (TDICT tags) and variable names.
2405 */
2406
2407void
2408mark_dids_from_pwords(pword *from, register pword *to)
2409{
2410    register pword *pw = from;
2411    dident a;
2412
2413    while (pw < to)
2414    {
2415	switch (TagType(pw->tag))
2416	{
2417	case TDICT:			/* mark atoms and functors */
2418	    if ((a = pw->val.did) != D_UNKNOWN)
2419	    {
2420		Mark_Did(a);
2421	    }
2422	    else
2423	    {
2424		Print_Err("Undefined atom or functor");
2425	    }
2426	    pw++;
2427	    break;
2428
2429	case TSTRG:
2430	    /* handle persistent strings by marking the corresponding atom */
2431	    if (StringInDictionary(pw->val))
2432	    {
2433		a = check_did_n(StringStart(pw->val), StringLength(pw->val), 0);
2434		if (a != D_UNKNOWN)
2435		{
2436		    Mark_Did(a);
2437		}
2438		else
2439		{
2440		    Print_Err("No atom corresponding to persistent string");
2441		}
2442	    }
2443	    pw++;
2444	    break;
2445
2446	case TNAME:			/* mark variable names */
2447	case TMETA:
2448	case TUNIV:
2449	    if (IsNamed(pw->tag.kernel))
2450	    {
2451		Mark_VarName(pw->tag.kernel);
2452	    }
2453	    pw++;
2454	    break;
2455
2456	case TDE:
2457	    pw += SUSP_HEADER_SIZE;
2458	    break;
2459
2460	case TBUFFER:
2461	    pw += BufferPwords(pw);
2462	    break;
2463
2464	case TEXTERN:
2465	    if (IsTag(pw[1].tag.kernel, TPTR))
2466	    {
2467		if (ExternalClass(pw)->mark_dids && ExternalData(pw))
2468		{
2469		    ExternalClass(pw)->mark_dids(ExternalData(pw));
2470		}
2471		pw += 2;
2472	    }
2473	    else
2474	    {
2475		Print_Err("TEXTERN not followed by TPTR");
2476		pw += 1;
2477	    }
2478	    break;
2479
2480/**** BEGIN EXTENSION SLOT ****
2481
2482Name:	GC_MARK_DIDS_FROM_PWORDS
2483
2484Parameters:
2485	pw	 	pword to mark from
2486
2487Code Template:
2488	case TEXTENSION:
2489	    If object contains dictionary references, call Mark_Did()
2490	    or Mark_VarName() and increment pw as needed.
2491	    If no dictionary references, only increment pw.
2492
2493****** END EXTENSION SLOT *****/
2494
2495	default:			/* skip other pword-sized stuff */
2496	    pw++;
2497	    break;
2498	}
2499    }
2500}
2501
2502
2503void
2504mark_dids_from_stacks(word arity)
2505{
2506    make_choicepoint(arity);
2507
2508    /* global */
2509
2510    mark_dids_from_pwords(TG_ORIG, TG);
2511
2512
2513    /* trail */
2514
2515    {
2516	register pword **tt = TT;
2517	word	i;
2518
2519	while(tt < TT_ORIG)
2520	{
2521	    switch((((word) *tt) & 3))
2522	    {
2523	    case TRAIL_ADDRESS:
2524		break;
2525	    case TRAIL_TAG:
2526		if (IsNamed(TrailedTag(*tt)))
2527		{
2528		    Mark_VarName(TrailedTag(*tt));
2529		}
2530		break;
2531	    case TRAIL_MULT:
2532		i = (word) *tt;
2533		switch (TrailedType(i))
2534		{
2535		case TRAILED_PWORD:
2536		    mark_dids_from_pwords((pword *) (tt+2),
2537					(pword *) (tt+3+TrailedNumber(i)));
2538		    break;
2539		}
2540		break;
2541	    case TRAIL_EXT:
2542		i = (word) *tt;
2543		switch (TrailedEtype(i))
2544		{
2545		case TRAIL_UNDO:
2546		    switch (TrailedType(i))
2547		    {
2548		    case TRAILED_PWORD:
2549			mark_dids_from_pwords(
2550			    (pword *) (tt+TRAIL_UNDO_SIMPLE_HEADER_SIZE),
2551			    (pword *) (tt+TrailedEsize(i)));
2552		    break;
2553		    }
2554		    break;
2555		case TRAIL_UNDO_STAMPED:
2556		    /* TRAIL_UNDO_STAMP_ADDRESS and TRAIL_UNDO_OLDSTAMP
2557		     * don't contain dids and don't need to be marked */
2558		    switch (TrailedType(i))
2559		    {
2560		    case TRAILED_PWORD:
2561			mark_dids_from_pwords(
2562			    (pword *) (tt+TRAIL_UNDO_STAMPED_HEADER_SIZE),
2563			    (pword *) (tt+TrailedEsize(*tt)));
2564		    break;
2565		    }
2566		    break;
2567		default:
2568		    break;
2569		}
2570		break;
2571	    }
2572	    End_Of_Frame(tt, tt);
2573	}
2574    }
2575
2576
2577    /* control & local */
2578
2579    {
2580	control_ptr		fp, top;
2581	register pword	*env, *pw;
2582	pword		*next_chain, *mergepoint, *next_mergepoint;
2583	word		edesc;
2584
2585	mergepoint = ((invoc_ptr) (B_ORIG + SAFE_B_AREA))->e;
2586	top.top = B.top - 1;		/* find first full frame	*/
2587	fp.any_frame = top.top->frame;
2588
2589	for (;;)	/* loop through all control frames, except the bottom one */
2590	{
2591	    if (IsRetryMeInlineFrame(top.top))
2592	    {
2593		edesc = EnvDescPP(top.top->backtrack + RETRY_ME_INLINE_SIZE - 1);
2594	    }
2595	    else if (IsTrustMeInlineFrame(top.top))
2596	    {
2597		edesc = EnvDescPP(top.top->backtrack + TRUST_ME_INLINE_SIZE - 1);
2598	    }
2599	    else if (IsRetryInlineFrame(top.top))
2600	    {
2601		edesc = EnvDescPP(top.top->backtrack + RETRY_INLINE_SIZE - 1);
2602	    }
2603	    else if (IsTrustInlineFrame(top.top))
2604	    {
2605		edesc = EnvDescPP(top.top->backtrack + TRUST_INLINE_SIZE - 1);
2606	    }
2607	    else if (IsInterruptFrame(top.top) || IsRecursionFrame(top.top))
2608	    {
2609		break;
2610	    }
2611	    else if (IsExceptionFrame(top.top))
2612	    {
2613		break;	/* must not occur. problem: size cannot be determined! */
2614		/* mark the saved waking stack and the saved arguments
2615		 * mark_dids_from_pwords((pword *)(fp.exception + 1), top.args);
2616		 */
2617	    }
2618	    else if (IsParallelFrame(top.top))
2619	    {
2620		mark_dids_from_pwords((pword *)(fp.chp_par + 1), top.args);
2621		edesc = EnvDesc(fp.chp_par->sp);
2622	    }
2623	    else /* if (IsChoicePoint(top.top)) */
2624	    {
2625		mark_dids_from_pwords((pword *)(fp.chp + 1), top.args);
2626		edesc = EnvDesc(fp.chp->sp);
2627	    }
2628
2629	    top.top = fp.top - 1;		/* find next full frame	*/
2630
2631	    Walk_Env_Chain( /* (fp,mergepoint,next_chain,next_mergepoint,edesc) */
2632		mark_dids_from_pwords(pw, pw+1);
2633	    )
2634
2635	    fp.any_frame = top.top->frame;
2636	}
2637
2638	if (fp.args == B_ORIG + SAFE_B_AREA)
2639	{
2640	    mark_dids_from_pwords(&fp.invoc->arg_0, top.args);
2641	}
2642	else
2643	{
2644	    Print_Err("bad bottom frame in mark_dids_from_stacks()\n");
2645	}
2646    }
2647
2648    pop_choicepoint();
2649}
2650
2651in_exception(void)
2652{
2653    control_ptr		top;
2654
2655    for(top.top = B.top - 1; ; top.top = top.top->frame.top - 1)
2656    {
2657	if (IsInterruptFrame(top.top) || IsRecursionFrame(top.top))
2658	{
2659	    break;
2660	}
2661	else if (IsExceptionFrame(top.top))
2662	{
2663	    return 1;
2664	}
2665    }
2666    return 0;
2667}
2668
2669/*-------------------------------------------------------------------
2670 * Initialisation
2671 *-------------------------------------------------------------------*/
2672
2673void
2674bip_gc_init(int flags)
2675{
2676    if (flags & INIT_SHARED)
2677    {
2678	(void) exported_built_in(in_dict("statistics_reset",0),
2679				p_stat_reset,	B_SAFE);
2680	(void) local_built_in(in_dict("gc_stat", 2),
2681				p_gc_stat,	B_UNSAFE|U_SIMPLE);
2682	(void) local_built_in(in_dict("gc_interval", 1),
2683				p_gc_interval,	B_UNSAFE|U_SIMPLE);
2684    }
2685
2686    if (flags & INIT_PRIVATE)
2687    {
2688	Make_Ref(&g_emu_.allrefs.var,NULL);
2689	g_emu_.allrefs.next = & g_emu_.allrefs ;
2690	g_emu_.allrefs.prev = & g_emu_.allrefs ;
2691    }
2692}
2693