1/***********************************************************************
2*                                                                      *
3*               This software is part of the ast package               *
4*          Copyright (c) 1985-2010 AT&T Intellectual Property          *
5*                      and is licensed under the                       *
6*                  Common Public License, Version 1.0                  *
7*                    by AT&T Intellectual Property                     *
8*                                                                      *
9*                A copy of the License is available at                 *
10*            http://www.opensource.org/licenses/cpl1.0.txt             *
11*         (with md5 checksum 059e8cd6165cb4c31e351f2b69388fd9)         *
12*                                                                      *
13*              Information and Software Systems Research               *
14*                            AT&T Research                             *
15*                           Florham Park NJ                            *
16*                                                                      *
17*                 Glenn Fowler <gsf@research.att.com>                  *
18*                  David Korn <dgk@research.att.com>                   *
19*                   Phong Vo <kpv@research.att.com>                    *
20*                                                                      *
21***********************************************************************/
22#if defined(_UWIN) && defined(_BLD_ast)
23
24void _STUB_vmlast(){}
25
26#else
27
28#include	"vmhdr.h"
29
30/*	Allocation with freeing and reallocing of last allocated block only.
31**
32**	Written by Kiem-Phong Vo, kpv@research.att.com, 01/16/94.
33*/
34
35#if __STD_C
36static Void_t* lastalloc(Vmalloc_t* vm, size_t size)
37#else
38static Void_t* lastalloc(vm, size)
39Vmalloc_t*	vm;
40size_t		size;
41#endif
42{
43	reg Block_t	*tp, *next;
44	reg Seg_t	*seg, *last;
45	reg size_t	s;
46	reg Vmdata_t*	vd = vm->data;
47	reg int		local, inuse;
48	size_t		orgsize = 0;
49
50	SETINUSE(vd, inuse);
51	if(!(local = vd->mode&VM_TRUST))
52	{	GETLOCAL(vd,local);
53		if(ISLOCK(vd,local))
54		{	CLRINUSE(vd, inuse);
55			return NIL(Void_t*);
56		}
57		SETLOCK(vd,local);
58		orgsize = size;
59	}
60
61	size = size < ALIGN ? ALIGN : ROUND(size,ALIGN);
62	for(;;)
63	{	for(last = NIL(Seg_t*), seg = vd->seg; seg; last = seg, seg = seg->next)
64		{	if(!(tp = seg->free) || (SIZE(tp)+sizeof(Head_t)) < size)
65				continue;
66			if(last)
67			{	last->next = seg->next;
68				seg->next = vd->seg;
69				vd->seg = seg;
70			}
71			goto got_block;
72		}
73
74		/* there is no usable free space in region, try extending */
75		if((tp = (*_Vmextend)(vm,size,NIL(Vmsearch_f))) )
76		{	seg = SEG(tp);
77			goto got_block;
78		}
79		else if(vd->mode&VM_AGAIN)
80			vd->mode &= ~VM_AGAIN;
81		else	goto done;
82	}
83
84got_block:
85	if((s = SIZE(tp)) >= size)
86	{	next = (Block_t*)((Vmuchar_t*)tp+size);
87		SIZE(next) = s - size;
88		SEG(next) = seg;
89		seg->free = next;
90	}
91	else	seg->free = NIL(Block_t*);
92
93	vd->free = seg->last = tp;
94
95	if(!local && (vd->mode&VM_TRACE) && _Vmtrace)
96		(*_Vmtrace)(vm, NIL(Vmuchar_t*), (Vmuchar_t*)tp, orgsize, 0);
97
98done:
99	CLRLOCK(vd,local);
100	ANNOUNCE(local, vm, VM_ALLOC, (Void_t*)tp, vm->disc);
101	CLRINUSE(vd, inuse);
102	return (Void_t*)tp;
103}
104
105#if __STD_C
106static int lastfree(Vmalloc_t* vm, reg Void_t* data )
107#else
108static int lastfree(vm, data)
109Vmalloc_t*	vm;
110reg Void_t*	data;
111#endif
112{
113	reg Seg_t*	seg;
114	reg Block_t*	fp;
115	reg size_t	s;
116	reg Vmdata_t*	vd = vm->data;
117	reg int		local, inuse;
118
119	if(!data)
120		return 0;
121
122	SETINUSE(vd, inuse);
123	if(!(local = vd->mode&VM_TRUST) )
124	{	GETLOCAL(vd, local);
125		if(ISLOCK(vd, local))
126		{	CLRINUSE(vd, inuse);
127			return -1;
128		}
129		SETLOCK(vd, local);
130	}
131	if(data != (Void_t*)vd->free)
132	{	if(!local && vm->disc->exceptf)
133			(void)(*vm->disc->exceptf)(vm,VM_BADADDR,data,vm->disc);
134		CLRLOCK(vd, local);
135		CLRINUSE(vd, inuse);
136		return -1;
137	}
138
139	seg = vd->seg;
140	if(!local && (vd->mode&VM_TRACE) && _Vmtrace)
141	{	if(seg->free )
142			s = (Vmuchar_t*)(seg->free) - (Vmuchar_t*)data;
143		else	s = (Vmuchar_t*)BLOCK(seg->baddr) - (Vmuchar_t*)data;
144		(*_Vmtrace)(vm, (Vmuchar_t*)data, NIL(Vmuchar_t*), s, 0);
145	}
146
147	vd->free = NIL(Block_t*);
148	fp = (Block_t*)data;
149	SEG(fp)  = seg;
150	SIZE(fp) = ((Vmuchar_t*)BLOCK(seg->baddr) - (Vmuchar_t*)data) - sizeof(Head_t);
151	seg->free = fp;
152	seg->last = NIL(Block_t*);
153
154	CLRLOCK(vd, local);
155	ANNOUNCE(local, vm, VM_FREE, data, vm->disc);
156
157	CLRINUSE(vd, inuse);
158	return 0;
159}
160
161#if __STD_C
162static Void_t* lastresize(Vmalloc_t* vm, reg Void_t* data, size_t size, int type )
163#else
164static Void_t* lastresize(vm, data, size, type )
165Vmalloc_t*	vm;
166reg Void_t*	data;
167size_t		size;
168int		type;
169#endif
170{
171	reg Block_t*	tp;
172	reg Seg_t	*seg;
173	reg size_t	oldsize;
174	reg ssize_t	s, ds;
175	reg Vmdata_t*	vd = vm->data;
176	reg int		local, inuse;
177	reg Void_t*	addr;
178	Void_t*		orgdata = NIL(Void_t*);
179	size_t		orgsize = 0;
180
181	SETINUSE(vd, inuse);
182	if(!data)
183	{	oldsize = 0;
184		data = lastalloc(vm,size);
185		goto done;
186	}
187	if(size <= 0)
188	{	(void)lastfree(vm,data);
189		CLRINUSE(vd, inuse);
190		return NIL(Void_t*);
191	}
192
193	if(!(local = vd->mode&VM_TRUST))
194	{	GETLOCAL(vd, local);
195		if(ISLOCK(vd, local))
196		{	CLRINUSE(vd, inuse);
197			return NIL(Void_t*);
198		}
199		SETLOCK(vd, local);
200		orgdata = data;
201		orgsize = size;
202	}
203
204	if(data == (Void_t*)vd->free)
205		seg = vd->seg;
206	else
207	{	/* see if it was one of ours */
208		for(seg = vd->seg; seg; seg = seg->next)
209			if(data >= seg->addr && data < (Void_t*)seg->baddr)
210				break;
211		if(!seg || (VLONG(data)%ALIGN) != 0 ||
212		   (seg->last && (Vmuchar_t*)data > (Vmuchar_t*)seg->last) )
213		{	CLRLOCK(vd,0);
214			CLRINUSE(vd, inuse);
215			return NIL(Void_t*);
216		}
217	}
218
219	/* set 's' to be the current available space */
220	if(data != seg->last)
221	{	if(seg->last && (Vmuchar_t*)data < (Vmuchar_t*)seg->last)
222			oldsize = (Vmuchar_t*)seg->last - (Vmuchar_t*)data;
223		else	oldsize = (Vmuchar_t*)BLOCK(seg->baddr) - (Vmuchar_t*)data;
224		s = -1;
225	}
226	else
227	{	s = (Vmuchar_t*)BLOCK(seg->baddr) - (Vmuchar_t*)data;
228		if(!(tp = seg->free) )
229			oldsize = s;
230		else
231		{	oldsize = (Vmuchar_t*)tp - (Vmuchar_t*)data;
232			seg->free = NIL(Block_t*);
233		}
234	}
235
236	size = size < ALIGN ? ALIGN : ROUND(size,ALIGN);
237	if(s < 0 || (ssize_t)size > s)
238	{	if(s >= 0) /* amount to extend */
239		{	ds = size-s; ds = ROUND(ds,vd->incr);
240			addr = (*vm->disc->memoryf)(vm, seg->addr, seg->extent,
241						    seg->extent+ds, vm->disc);
242			if(addr == seg->addr)
243			{	s += ds;
244				seg->size += ds;
245				seg->extent += ds;
246				seg->baddr += ds;
247				SIZE(BLOCK(seg->baddr)) = BUSY;
248			}
249			else	goto do_alloc;
250		}
251		else
252		{ do_alloc:
253			if(!(type&(VM_RSMOVE|VM_RSCOPY)) )
254				data = NIL(Void_t*);
255			else
256			{	tp = vd->free;
257				if(!(addr = KPVALLOC(vm,size,lastalloc)) )
258				{	vd->free = tp;
259					data = NIL(Void_t*);
260				}
261				else
262				{	if(type&VM_RSCOPY)
263					{	ds = oldsize < size ? oldsize : size;
264						memcpy(addr, data, ds);
265					}
266
267					if(s >= 0 && seg != vd->seg)
268					{	tp = (Block_t*)data;
269						SEG(tp) = seg;
270						SIZE(tp) = s - sizeof(Head_t);
271						seg->free = tp;
272					}
273
274					/* new block and size */
275					data = addr;
276					seg = vd->seg;
277					s = (Vmuchar_t*)BLOCK(seg->baddr) -
278					    (Vmuchar_t*)data;
279					seg->free = NIL(Block_t*);
280				}
281			}
282		}
283	}
284
285	if(data)
286	{	if(s >= (ssize_t)(size+sizeof(Head_t)) )
287		{	tp = (Block_t*)((Vmuchar_t*)data + size);
288			SEG(tp) = seg;
289			SIZE(tp) = (s - size) - sizeof(Head_t);
290			seg->free = tp;
291		}
292
293		vd->free = seg->last = (Block_t*)data;
294
295		if(!local && (vd->mode&VM_TRACE) && _Vmtrace)
296			(*_Vmtrace)(vm,(Vmuchar_t*)orgdata,(Vmuchar_t*)data,orgsize,0);
297	}
298
299	CLRLOCK(vd, local);
300	ANNOUNCE(local, vm, VM_RESIZE, data, vm->disc);
301
302done:	if(data && (type&VM_RSZERO) && size > oldsize)
303		memset((Void_t*)((Vmuchar_t*)data + oldsize), 0, size-oldsize);
304
305	CLRINUSE(vd, inuse);
306	return data;
307}
308
309
310#if __STD_C
311static long lastaddr(Vmalloc_t* vm, Void_t* addr)
312#else
313static long lastaddr(vm, addr)
314Vmalloc_t*	vm;
315Void_t*		addr;
316#endif
317{
318	reg Vmdata_t*	vd = vm->data;
319
320	if(!(vd->mode&VM_TRUST) && ISLOCK(vd,0))
321		return -1L;
322	if(!vd->free || addr < (Void_t*)vd->free || addr >= (Void_t*)vd->seg->baddr)
323		return -1L;
324	else	return (Vmuchar_t*)addr - (Vmuchar_t*)vd->free;
325}
326
327#if __STD_C
328static long lastsize(Vmalloc_t* vm, Void_t* addr)
329#else
330static long lastsize(vm, addr)
331Vmalloc_t*	vm;
332Void_t*		addr;
333#endif
334{
335	reg Vmdata_t*	vd = vm->data;
336
337	if(!(vd->mode&VM_TRUST) && ISLOCK(vd,0))
338		return -1L;
339	if(!vd->free || addr != (Void_t*)vd->free )
340		return -1L;
341	else if(vd->seg->free)
342		return (Vmuchar_t*)vd->seg->free - (Vmuchar_t*)addr;
343	else	return (Vmuchar_t*)vd->seg->baddr - (Vmuchar_t*)addr - sizeof(Head_t);
344}
345
346#if __STD_C
347static int lastcompact(Vmalloc_t* vm)
348#else
349static int lastcompact(vm)
350Vmalloc_t*	vm;
351#endif
352{
353	reg Block_t*	fp;
354	reg Seg_t	*seg, *next;
355	reg size_t	s;
356	reg Vmdata_t*	vd = vm->data;
357	reg int		inuse;
358
359	SETINUSE(vd, inuse);
360	if(!(vd->mode&VM_TRUST))
361	{	if(ISLOCK(vd,0))
362		{	CLRINUSE(vd, inuse);
363			return -1;
364		}
365		SETLOCK(vd,0);
366	}
367
368	for(seg = vd->seg; seg; seg = next)
369	{	next = seg->next;
370
371		if(!(fp = seg->free))
372			continue;
373
374		seg->free = NIL(Block_t*);
375		if(seg->size == (s = SIZE(fp)&~BITS))
376			s = seg->extent;
377		else	s += sizeof(Head_t);
378
379		if((*_Vmtruncate)(vm,seg,s,1) == s)
380			seg->free = fp;
381	}
382
383	if((vd->mode&VM_TRACE) && _Vmtrace)
384		(*_Vmtrace)(vm,(Vmuchar_t*)0,(Vmuchar_t*)0,0,0);
385
386	CLRLOCK(vd,0);
387	CLRINUSE(vd, inuse);
388	return 0;
389}
390
391#if __STD_C
392static Void_t* lastalign(Vmalloc_t* vm, size_t size, size_t align)
393#else
394static Void_t* lastalign(vm, size, align)
395Vmalloc_t*	vm;
396size_t		size;
397size_t		align;
398#endif
399{
400	reg Vmuchar_t*	data;
401	reg Seg_t*	seg;
402	reg Block_t*	next;
403	reg int		local, inuse;
404	reg size_t	s, orgsize = 0, orgalign = 0;
405	reg Vmdata_t*	vd = vm->data;
406
407	if(size <= 0 || align <= 0)
408		return NIL(Void_t*);
409
410	SETINUSE(vd, inuse);
411	if(!(local = vd->mode&VM_TRUST) )
412	{	GETLOCAL(vd,local);
413		if(ISLOCK(vd,local) )
414		{	CLRINUSE(vd, inuse);
415			return NIL(Void_t*);
416		}
417		SETLOCK(vd,local);
418		orgsize = size;
419		orgalign = align;
420	}
421
422	size = size <= TINYSIZE ? TINYSIZE : ROUND(size,ALIGN);
423	align = MULTIPLE(align,ALIGN);
424
425	s = size + align;
426	if(!(data = (Vmuchar_t*)KPVALLOC(vm,s,lastalloc)) )
427		goto done;
428
429	/* find the segment containing this block */
430	for(seg = vd->seg; seg; seg = seg->next)
431		if(seg->last == (Block_t*)data)
432			break;
433	/**/ASSERT(seg);
434
435	/* get a suitably aligned address */
436	if((s = (size_t)(VLONG(data)%align)) != 0)
437		data += align-s; /**/ASSERT((VLONG(data)%align) == 0);
438
439	/* free the unused tail */
440	next = (Block_t*)(data+size);
441	if((s = (seg->baddr - (Vmuchar_t*)next)) >= sizeof(Block_t))
442	{	SEG(next) = seg;
443		SIZE(next) = s - sizeof(Head_t);
444		seg->free = next;
445	}
446
447	vd->free = seg->last = (Block_t*)data;
448
449	if(!local && !(vd->mode&VM_TRUST) && _Vmtrace && (vd->mode&VM_TRACE) )
450		(*_Vmtrace)(vm,NIL(Vmuchar_t*),data,orgsize,orgalign);
451
452done:
453	CLRLOCK(vd,local);
454	ANNOUNCE(local, vm, VM_ALLOC, (Void_t*)data, vm->disc);
455
456	CLRINUSE(vd, inuse);
457	return (Void_t*)data;
458}
459
460/* Public method for free-1 allocation */
461static Vmethod_t _Vmlast =
462{
463	lastalloc,
464	lastresize,
465	lastfree,
466	lastaddr,
467	lastsize,
468	lastcompact,
469	lastalign,
470	VM_MTLAST
471};
472
473__DEFINE__(Vmethod_t*,Vmlast,&_Vmlast);
474
475#ifdef NoF
476NoF(vmlast)
477#endif
478
479#endif
480