1/*-
2 * SPDX-License-Identifier: BSD-2-Clause
3 *
4 * Copyright (c) 2006 Semihalf, Rafal Jaworowski <raj@semihalf.com>
5 * Copyright (c) 1996, 1997, 1998 The NetBSD Foundation, Inc.
6 * All rights reserved.
7 *
8 * This code is derived from software contributed to The NetBSD Foundation
9 * by Jason R. Thorpe of the Numerical Aerospace Simulation Facility,
10 * NASA Ames Research Center.
11 *
12 * Redistribution and use in source and binary forms, with or without
13 * modification, are permitted provided that the following conditions
14 * are met:
15 * 1. Redistributions of source code must retain the above copyright
16 *    notice, this list of conditions and the following disclaimer.
17 * 2. Redistributions in binary form must reproduce the above copyright
18 *    notice, this list of conditions and the following disclaimer in the
19 *    documentation and/or other materials provided with the distribution.
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
22 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
23 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
24 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
25 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
31 * POSSIBILITY OF SUCH DAMAGE.
32 */
33
34#include <sys/cdefs.h>
35#define	KTR_BE_IO	0
36#define	KTR_LE_IO	0
37
38#include <sys/param.h>
39#include <sys/systm.h>
40#include <sys/bus.h>
41#include <sys/ktr.h>
42#include <vm/vm.h>
43#include <vm/pmap.h>
44#include <sys/endian.h>
45
46#include <machine/bus.h>
47#include <machine/pio.h>
48#include <machine/md_var.h>
49
50#define TODO panic("%s: not implemented", __func__)
51
52#define	MAX_EARLYBOOT_MAPPINGS	6
53
54static struct {
55	vm_offset_t virt;
56	bus_addr_t addr;
57	bus_size_t size;
58	int flags;
59} earlyboot_mappings[MAX_EARLYBOOT_MAPPINGS];
60static int earlyboot_map_idx = 0;
61
62void bs_remap_earlyboot(void);
63
64static __inline void *
65__ppc_ba(bus_space_handle_t bsh, bus_size_t ofs)
66{
67	return ((void *)(bsh + ofs));
68}
69
70static int
71bs_gen_map(bus_addr_t addr, bus_size_t size, int flags,
72    bus_space_handle_t *bshp)
73{
74	vm_memattr_t ma;
75
76	/*
77	 * Record what we did if we haven't enabled the MMU yet. We
78	 * will need to remap it as soon as the MMU comes up.
79	 */
80	if (!pmap_bootstrapped) {
81		KASSERT(earlyboot_map_idx < MAX_EARLYBOOT_MAPPINGS,
82		    ("%s: too many early boot mapping requests", __func__));
83		earlyboot_mappings[earlyboot_map_idx].addr = addr;
84		earlyboot_mappings[earlyboot_map_idx].virt =
85		    pmap_early_io_map(addr, size);
86		earlyboot_mappings[earlyboot_map_idx].size = size;
87		earlyboot_mappings[earlyboot_map_idx].flags = flags;
88		*bshp = earlyboot_mappings[earlyboot_map_idx].virt;
89		earlyboot_map_idx++;
90	} else {
91		ma = VM_MEMATTR_DEFAULT;
92		switch (flags) {
93			case BUS_SPACE_MAP_CACHEABLE:
94				ma = VM_MEMATTR_CACHEABLE;
95				break;
96			case BUS_SPACE_MAP_PREFETCHABLE:
97				ma = VM_MEMATTR_PREFETCHABLE;
98				break;
99		}
100		*bshp = (bus_space_handle_t)pmap_mapdev_attr(addr, size, ma);
101	}
102
103	return (0);
104}
105
106void
107bs_remap_earlyboot(void)
108{
109	vm_paddr_t pa, spa;
110	vm_offset_t va;
111	int i;
112	vm_memattr_t ma;
113
114	for (i = 0; i < earlyboot_map_idx; i++) {
115		spa = earlyboot_mappings[i].addr;
116
117		if (hw_direct_map &&
118		   PHYS_TO_DMAP(spa) == earlyboot_mappings[i].virt &&
119		   pmap_dev_direct_mapped(spa, earlyboot_mappings[i].size) == 0)
120			continue;
121
122		ma = VM_MEMATTR_DEFAULT;
123		switch (earlyboot_mappings[i].flags) {
124			case BUS_SPACE_MAP_CACHEABLE:
125				ma = VM_MEMATTR_CACHEABLE;
126				break;
127			case BUS_SPACE_MAP_PREFETCHABLE:
128				ma = VM_MEMATTR_PREFETCHABLE;
129				break;
130		}
131
132		pa = trunc_page(spa);
133		va = trunc_page(earlyboot_mappings[i].virt);
134		while (pa < spa + earlyboot_mappings[i].size) {
135			pmap_kenter_attr(va, pa, ma);
136			va += PAGE_SIZE;
137			pa += PAGE_SIZE;
138		}
139	}
140}
141
142static void
143bs_gen_unmap(bus_space_handle_t bsh, bus_size_t size)
144{
145
146	if (!pmap_bootstrapped)
147		return;
148
149	pmap_unmapdev((void *)bsh, size);
150}
151
152static int
153bs_gen_subregion(bus_space_handle_t bsh, bus_size_t ofs,
154    bus_size_t size __unused, bus_space_handle_t *nbshp)
155{
156	*nbshp = bsh + ofs;
157	return (0);
158}
159
160static int
161bs_gen_alloc(bus_addr_t rstart __unused, bus_addr_t rend __unused,
162    bus_size_t size __unused, bus_size_t alignment __unused,
163    bus_size_t boundary __unused, int flags __unused,
164    bus_addr_t *bpap __unused, bus_space_handle_t *bshp __unused)
165{
166	TODO;
167}
168
169static void
170bs_gen_free(bus_space_handle_t bsh __unused, bus_size_t size __unused)
171{
172	TODO;
173}
174
175static void
176bs_gen_barrier(bus_space_handle_t bsh __unused, bus_size_t ofs __unused,
177    bus_size_t size __unused, int flags __unused)
178{
179
180	powerpc_iomb();
181}
182
183/*
184 * Native-endian access functions
185 */
186static uint8_t
187native_bs_rs_1(bus_space_handle_t bsh, bus_size_t ofs)
188{
189	volatile uint8_t *addr;
190	uint8_t res;
191
192	addr = __ppc_ba(bsh, ofs);
193	res = *addr;
194	powerpc_iomb();
195	CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
196	return (res);
197}
198
199static uint16_t
200native_bs_rs_2(bus_space_handle_t bsh, bus_size_t ofs)
201{
202	volatile uint16_t *addr;
203	uint16_t res;
204
205	addr = __ppc_ba(bsh, ofs);
206	res = *addr;
207	powerpc_iomb();
208	CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
209	return (res);
210}
211
212static uint32_t
213native_bs_rs_4(bus_space_handle_t bsh, bus_size_t ofs)
214{
215	volatile uint32_t *addr;
216	uint32_t res;
217
218	addr = __ppc_ba(bsh, ofs);
219	res = *addr;
220	powerpc_iomb();
221	CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
222	return (res);
223}
224
225static uint64_t
226native_bs_rs_8(bus_space_handle_t bsh, bus_size_t ofs)
227{
228	volatile uint64_t *addr;
229	uint64_t res;
230
231	addr = __ppc_ba(bsh, ofs);
232	res = *addr;
233	powerpc_iomb();
234	return (res);
235}
236
237static void
238native_bs_rm_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t *addr, size_t cnt)
239{
240	ins8(__ppc_ba(bsh, ofs), addr, cnt);
241}
242
243static void
244native_bs_rm_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t *addr, size_t cnt)
245{
246	ins16(__ppc_ba(bsh, ofs), addr, cnt);
247}
248
249static void
250native_bs_rm_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t *addr, size_t cnt)
251{
252	ins32(__ppc_ba(bsh, ofs), addr, cnt);
253}
254
255static void
256native_bs_rm_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t *addr, size_t cnt)
257{
258	ins64(__ppc_ba(bsh, ofs), addr, cnt);
259}
260
261static void
262native_bs_rr_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t *addr, size_t cnt)
263{
264	volatile uint8_t *s = __ppc_ba(bsh, ofs);
265
266	while (cnt--)
267		*addr++ = *s++;
268	powerpc_iomb();
269}
270
271static void
272native_bs_rr_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t *addr, size_t cnt)
273{
274	volatile uint16_t *s = __ppc_ba(bsh, ofs);
275
276	while (cnt--)
277		*addr++ = *s++;
278	powerpc_iomb();
279}
280
281static void
282native_bs_rr_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t *addr, size_t cnt)
283{
284	volatile uint32_t *s = __ppc_ba(bsh, ofs);
285
286	while (cnt--)
287		*addr++ = *s++;
288	powerpc_iomb();
289}
290
291static void
292native_bs_rr_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t *addr, size_t cnt)
293{
294	volatile uint64_t *s = __ppc_ba(bsh, ofs);
295
296	while (cnt--)
297		*addr++ = *s++;
298	powerpc_iomb();
299}
300
301static void
302native_bs_ws_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val)
303{
304	volatile uint8_t *addr;
305
306	addr = __ppc_ba(bsh, ofs);
307	*addr = val;
308	powerpc_iomb();
309	CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
310}
311
312static void
313native_bs_ws_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val)
314{
315	volatile uint16_t *addr;
316
317	addr = __ppc_ba(bsh, ofs);
318	*addr = val;
319	powerpc_iomb();
320	CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
321}
322
323static void
324native_bs_ws_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val)
325{
326	volatile uint32_t *addr;
327
328	addr = __ppc_ba(bsh, ofs);
329	*addr = val;
330	powerpc_iomb();
331	CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
332}
333
334static void
335native_bs_ws_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val)
336{
337	volatile uint64_t *addr;
338
339	addr = __ppc_ba(bsh, ofs);
340	*addr = val;
341	powerpc_iomb();
342	CTR4(KTR_BE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
343}
344
345static void
346native_bs_wm_1(bus_space_handle_t bsh, bus_size_t ofs, const uint8_t *addr,
347    bus_size_t cnt)
348{
349	outsb(__ppc_ba(bsh, ofs), addr, cnt);
350}
351
352static void
353native_bs_wm_2(bus_space_handle_t bsh, bus_size_t ofs, const uint16_t *addr,
354    bus_size_t cnt)
355{
356	outsw(__ppc_ba(bsh, ofs), addr, cnt);
357}
358
359static void
360native_bs_wm_4(bus_space_handle_t bsh, bus_size_t ofs, const uint32_t *addr,
361    bus_size_t cnt)
362{
363	outsl(__ppc_ba(bsh, ofs), addr, cnt);
364}
365
366static void
367native_bs_wm_8(bus_space_handle_t bsh, bus_size_t ofs, const uint64_t *addr,
368    bus_size_t cnt)
369{
370	outsll(__ppc_ba(bsh, ofs), addr, cnt);
371}
372
373static void
374native_bs_wr_1(bus_space_handle_t bsh, bus_size_t ofs, const uint8_t *addr,
375    size_t cnt)
376{
377	volatile uint8_t *d = __ppc_ba(bsh, ofs);
378
379	while (cnt--)
380		*d++ = *addr++;
381	powerpc_iomb();
382}
383
384static void
385native_bs_wr_2(bus_space_handle_t bsh, bus_size_t ofs, const uint16_t *addr,
386    size_t cnt)
387{
388	volatile uint16_t *d = __ppc_ba(bsh, ofs);
389
390	while (cnt--)
391		*d++ = *addr++;
392	powerpc_iomb();
393}
394
395static void
396native_bs_wr_4(bus_space_handle_t bsh, bus_size_t ofs, const uint32_t *addr,
397    size_t cnt)
398{
399	volatile uint32_t *d = __ppc_ba(bsh, ofs);
400
401	while (cnt--)
402		*d++ = *addr++;
403	powerpc_iomb();
404}
405
406static void
407native_bs_wr_8(bus_space_handle_t bsh, bus_size_t ofs, const uint64_t *addr,
408    size_t cnt)
409{
410	volatile uint64_t *d = __ppc_ba(bsh, ofs);
411
412	while (cnt--)
413		*d++ = *addr++;
414	powerpc_iomb();
415}
416
417static void
418native_bs_sm_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val, size_t cnt)
419{
420	volatile uint8_t *d = __ppc_ba(bsh, ofs);
421
422	while (cnt--)
423		*d = val;
424	powerpc_iomb();
425}
426
427static void
428native_bs_sm_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val, size_t cnt)
429{
430	volatile uint16_t *d = __ppc_ba(bsh, ofs);
431
432	while (cnt--)
433		*d = val;
434	powerpc_iomb();
435}
436
437static void
438native_bs_sm_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val, size_t cnt)
439{
440	volatile uint32_t *d = __ppc_ba(bsh, ofs);
441
442	while (cnt--)
443		*d = val;
444	powerpc_iomb();
445}
446
447static void
448native_bs_sm_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val, size_t cnt)
449{
450	volatile uint64_t *d = __ppc_ba(bsh, ofs);
451
452	while (cnt--)
453		*d = val;
454	powerpc_iomb();
455}
456
457static void
458native_bs_sr_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val, size_t cnt)
459{
460	volatile uint8_t *d = __ppc_ba(bsh, ofs);
461
462	while (cnt--)
463		*d++ = val;
464	powerpc_iomb();
465}
466
467static void
468native_bs_sr_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val, size_t cnt)
469{
470	volatile uint16_t *d = __ppc_ba(bsh, ofs);
471
472	while (cnt--)
473		*d++ = val;
474	powerpc_iomb();
475}
476
477static void
478native_bs_sr_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val, size_t cnt)
479{
480	volatile uint32_t *d = __ppc_ba(bsh, ofs);
481
482	while (cnt--)
483		*d++ = val;
484	powerpc_iomb();
485}
486
487static void
488native_bs_sr_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val, size_t cnt)
489{
490	volatile uint64_t *d = __ppc_ba(bsh, ofs);
491
492	while (cnt--)
493		*d++ = val;
494	powerpc_iomb();
495}
496
497/*
498 * Byteswapped access functions
499 */
500static uint8_t
501swapped_bs_rs_1(bus_space_handle_t bsh, bus_size_t ofs)
502{
503	volatile uint8_t *addr;
504	uint8_t res;
505
506	addr = __ppc_ba(bsh, ofs);
507	res = *addr;
508	powerpc_iomb();
509	CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
510	return (res);
511}
512
513static uint16_t
514swapped_bs_rs_2(bus_space_handle_t bsh, bus_size_t ofs)
515{
516	volatile uint16_t *addr;
517	uint16_t res;
518
519	addr = __ppc_ba(bsh, ofs);
520	__asm __volatile("lhbrx %0, 0, %1" : "=r"(res) : "r"(addr));
521	powerpc_iomb();
522	CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
523	return (res);
524}
525
526static uint32_t
527swapped_bs_rs_4(bus_space_handle_t bsh, bus_size_t ofs)
528{
529	volatile uint32_t *addr;
530	uint32_t res;
531
532	addr = __ppc_ba(bsh, ofs);
533	__asm __volatile("lwbrx %0, 0, %1" : "=r"(res) : "r"(addr));
534	powerpc_iomb();
535	CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
536	return (res);
537}
538
539static uint64_t
540swapped_bs_rs_8(bus_space_handle_t bsh, bus_size_t ofs)
541{
542	volatile uint64_t *addr;
543	uint64_t res;
544
545	addr = __ppc_ba(bsh, ofs);
546	res = le64toh(*addr);
547	powerpc_iomb();
548	CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x) = %#x", __func__, bsh, ofs, res);
549	return (res);
550}
551
552static void
553swapped_bs_rm_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t *addr, size_t cnt)
554{
555	ins8(__ppc_ba(bsh, ofs), addr, cnt);
556}
557
558static void
559swapped_bs_rm_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t *addr, size_t cnt)
560{
561	ins16rb(__ppc_ba(bsh, ofs), addr, cnt);
562}
563
564static void
565swapped_bs_rm_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t *addr, size_t cnt)
566{
567	ins32rb(__ppc_ba(bsh, ofs), addr, cnt);
568}
569
570static void
571swapped_bs_rm_8(bus_space_handle_t bshh, bus_size_t ofs, uint64_t *addr, size_t cnt)
572{
573	TODO;
574}
575
576static void
577swapped_bs_rr_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t *addr, size_t cnt)
578{
579	volatile uint8_t *s = __ppc_ba(bsh, ofs);
580
581	while (cnt--)
582		*addr++ = *s++;
583	powerpc_iomb();
584}
585
586static void
587swapped_bs_rr_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t *addr, size_t cnt)
588{
589	volatile uint16_t *s = __ppc_ba(bsh, ofs);
590
591	while (cnt--)
592		*addr++ = in16rb(s++);
593	powerpc_iomb();
594}
595
596static void
597swapped_bs_rr_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t *addr, size_t cnt)
598{
599	volatile uint32_t *s = __ppc_ba(bsh, ofs);
600
601	while (cnt--)
602		*addr++ = in32rb(s++);
603	powerpc_iomb();
604}
605
606static void
607swapped_bs_rr_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t *addr, size_t cnt)
608{
609	TODO;
610}
611
612static void
613swapped_bs_ws_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val)
614{
615	volatile uint8_t *addr;
616
617	addr = __ppc_ba(bsh, ofs);
618	*addr = val;
619	powerpc_iomb();
620	CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
621}
622
623static void
624swapped_bs_ws_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val)
625{
626	volatile uint16_t *addr;
627
628	addr = __ppc_ba(bsh, ofs);
629	__asm __volatile("sthbrx %0, 0, %1" :: "r"(val), "r"(addr));
630	powerpc_iomb();
631	CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
632}
633
634static void
635swapped_bs_ws_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val)
636{
637	volatile uint32_t *addr;
638
639	addr = __ppc_ba(bsh, ofs);
640	__asm __volatile("stwbrx %0, 0, %1" :: "r"(val), "r"(addr));
641	powerpc_iomb();
642	CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
643}
644
645static void
646swapped_bs_ws_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val)
647{
648	volatile uint64_t *addr;
649
650	addr = __ppc_ba(bsh, ofs);
651	*addr = htole64(val);
652	powerpc_iomb();
653	CTR4(KTR_LE_IO, "%s(bsh=%#x, ofs=%#x, val=%#x)", __func__, bsh, ofs, val);
654}
655
656static void
657swapped_bs_wm_1(bus_space_handle_t bsh, bus_size_t ofs, const uint8_t *addr,
658    bus_size_t cnt)
659{
660	outs8(__ppc_ba(bsh, ofs), addr, cnt);
661}
662
663static void
664swapped_bs_wm_2(bus_space_handle_t bsh, bus_size_t ofs, const uint16_t *addr,
665    bus_size_t cnt)
666{
667	outs16rb(__ppc_ba(bsh, ofs), addr, cnt);
668}
669
670static void
671swapped_bs_wm_4(bus_space_handle_t bsh, bus_size_t ofs, const uint32_t *addr,
672    bus_size_t cnt)
673{
674	outs32rb(__ppc_ba(bsh, ofs), addr, cnt);
675}
676
677static void
678swapped_bs_wm_8(bus_space_handle_t bsh, bus_size_t ofs, const uint64_t *addr,
679    bus_size_t cnt)
680{
681	TODO;
682}
683
684static void
685swapped_bs_wr_1(bus_space_handle_t bsh, bus_size_t ofs, const uint8_t *addr,
686    size_t cnt)
687{
688	volatile uint8_t *d = __ppc_ba(bsh, ofs);
689
690	while (cnt--)
691		*d++ = *addr++;
692	powerpc_iomb();
693}
694
695static void
696swapped_bs_wr_2(bus_space_handle_t bsh, bus_size_t ofs, const uint16_t *addr,
697    size_t cnt)
698{
699	volatile uint16_t *d = __ppc_ba(bsh, ofs);
700
701	while (cnt--)
702		out16rb(d++, *addr++);
703	powerpc_iomb();
704}
705
706static void
707swapped_bs_wr_4(bus_space_handle_t bsh, bus_size_t ofs, const uint32_t *addr,
708    size_t cnt)
709{
710	volatile uint32_t *d = __ppc_ba(bsh, ofs);
711
712	while (cnt--)
713		out32rb(d++, *addr++);
714	powerpc_iomb();
715}
716
717static void
718swapped_bs_wr_8(bus_space_handle_t bsh, bus_size_t ofs, const uint64_t *addr,
719    size_t cnt)
720{
721	TODO;
722}
723
724static void
725swapped_bs_sm_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val, size_t cnt)
726{
727	volatile uint8_t *d = __ppc_ba(bsh, ofs);
728
729	while (cnt--)
730		*d = val;
731	powerpc_iomb();
732}
733
734static void
735swapped_bs_sm_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val, size_t cnt)
736{
737	volatile uint16_t *d = __ppc_ba(bsh, ofs);
738
739	while (cnt--)
740		out16rb(d, val);
741	powerpc_iomb();
742}
743
744static void
745swapped_bs_sm_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val, size_t cnt)
746{
747	volatile uint32_t *d = __ppc_ba(bsh, ofs);
748
749	while (cnt--)
750		out32rb(d, val);
751	powerpc_iomb();
752}
753
754static void
755swapped_bs_sm_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val, size_t cnt)
756{
757	TODO;
758}
759
760static void
761swapped_bs_sr_1(bus_space_handle_t bsh, bus_size_t ofs, uint8_t val, size_t cnt)
762{
763	volatile uint8_t *d = __ppc_ba(bsh, ofs);
764
765	while (cnt--)
766		*d++ = val;
767	powerpc_iomb();
768}
769
770static void
771swapped_bs_sr_2(bus_space_handle_t bsh, bus_size_t ofs, uint16_t val, size_t cnt)
772{
773	volatile uint16_t *d = __ppc_ba(bsh, ofs);
774
775	while (cnt--)
776		out16rb(d++, val);
777	powerpc_iomb();
778}
779
780static void
781swapped_bs_sr_4(bus_space_handle_t bsh, bus_size_t ofs, uint32_t val, size_t cnt)
782{
783	volatile uint32_t *d = __ppc_ba(bsh, ofs);
784
785	while (cnt--)
786		out32rb(d++, val);
787	powerpc_iomb();
788}
789
790static void
791swapped_bs_sr_8(bus_space_handle_t bsh, bus_size_t ofs, uint64_t val, size_t cnt)
792{
793	TODO;
794}
795
796#if BYTE_ORDER == LITTLE_ENDIAN
797struct bus_space bs_le_tag = {
798#else
799struct bus_space bs_be_tag = {
800#endif
801	/* mapping/unmapping */
802	.bs_map =	bs_gen_map,
803	.bs_unmap =	bs_gen_unmap,
804	.bs_subregion =	bs_gen_subregion,
805
806	/* allocation/deallocation */
807	.bs_alloc =	bs_gen_alloc,
808	.bs_free =	bs_gen_free,
809
810	/* barrier */
811	.bs_barrier =	bs_gen_barrier,
812
813	/* read (single) */
814	.bs_r_1 =	native_bs_rs_1,
815	.bs_r_2 =	native_bs_rs_2,
816	.bs_r_4 =	native_bs_rs_4,
817	.bs_r_8 =	native_bs_rs_8,
818
819	/* read (single) stream */
820	.bs_r_s_2 =	native_bs_rs_2,
821	.bs_r_s_4 =	native_bs_rs_4,
822	.bs_r_s_8 =	native_bs_rs_8,
823
824	/* read multiple */
825	.bs_rm_1 =	native_bs_rm_1,
826	.bs_rm_2 =	native_bs_rm_2,
827	.bs_rm_4 =	native_bs_rm_4,
828	.bs_rm_8 =	native_bs_rm_8,
829
830	/* read multiple stream */
831	.bs_rm_s_2 =	native_bs_rm_2,
832	.bs_rm_s_4 =	native_bs_rm_4,
833	.bs_rm_s_8 =	native_bs_rm_8,
834
835	/* read region */
836	.bs_rr_1 =	native_bs_rr_1,
837	.bs_rr_2 =	native_bs_rr_2,
838	.bs_rr_4 =	native_bs_rr_4,
839	.bs_rr_8 =	native_bs_rr_8,
840
841	/* read region stream */
842	.bs_rr_s_2 =	native_bs_rr_2,
843	.bs_rr_s_4 =	native_bs_rr_4,
844	.bs_rr_s_8 =	native_bs_rr_8,
845
846	/* write (single) */
847	.bs_w_1 =	native_bs_ws_1,
848	.bs_w_2 =	native_bs_ws_2,
849	.bs_w_4 =	native_bs_ws_4,
850	.bs_w_8 =	native_bs_ws_8,
851
852	/* write (single) stream */
853	.bs_w_s_2 =	native_bs_ws_2,
854	.bs_w_s_4 =	native_bs_ws_4,
855	.bs_w_s_8 =	native_bs_ws_8,
856
857	/* write multiple */
858	.bs_wm_1 =	native_bs_wm_1,
859	.bs_wm_2 =	native_bs_wm_2,
860	.bs_wm_4 =	native_bs_wm_4,
861	.bs_wm_8 =	native_bs_wm_8,
862
863	/* write multiple stream */
864	.bs_wm_s_2 =	native_bs_wm_2,
865	.bs_wm_s_4 =	native_bs_wm_4,
866	.bs_wm_s_8 =	native_bs_wm_8,
867
868	/* write region */
869	.bs_wr_1 =	native_bs_wr_1,
870	.bs_wr_2 =	native_bs_wr_2,
871	.bs_wr_4 =	native_bs_wr_4,
872	.bs_wr_8 =	native_bs_wr_8,
873
874	/* write region stream */
875	.bs_wr_s_2 =	native_bs_wr_2,
876	.bs_wr_s_4 =	native_bs_wr_4,
877	.bs_wr_s_8 =	native_bs_wr_8,
878
879	/* set multiple */
880	.bs_sm_1 =	native_bs_sm_1,
881	.bs_sm_2 =	native_bs_sm_2,
882	.bs_sm_4 =	native_bs_sm_4,
883	.bs_sm_8 =	native_bs_sm_8,
884
885	/* set multiple stream */
886	.bs_sm_s_2 =	native_bs_sm_2,
887	.bs_sm_s_4 =	native_bs_sm_4,
888	.bs_sm_s_8 =	native_bs_sm_8,
889
890	/* set region */
891	.bs_sr_1 =	native_bs_sr_1,
892	.bs_sr_2 =	native_bs_sr_2,
893	.bs_sr_4 =	native_bs_sr_4,
894	.bs_sr_8 =	native_bs_sr_8,
895
896	/* set region stream */
897	.bs_sr_s_2 =	native_bs_sr_2,
898	.bs_sr_s_4 =	native_bs_sr_4,
899	.bs_sr_s_8 =	native_bs_sr_8,
900
901	/* copy region */
902	.bs_cr_1 =	NULL, /* UNIMPLEMENTED */
903	.bs_cr_2 =	NULL, /* UNIMPLEMENTED */
904	.bs_cr_4 =	NULL, /* UNIMPLEMENTED */
905	.bs_cr_8 =	NULL, /* UNIMPLEMENTED */
906
907	/* copy region stream */
908	.bs_cr_s_2 =	NULL, /* UNIMPLEMENTED */
909	.bs_cr_s_4 =	NULL, /* UNIMPLEMENTED */
910	.bs_cr_s_8 =	NULL, /* UNIMPLEMENTED */
911};
912
913#if BYTE_ORDER == LITTLE_ENDIAN
914struct bus_space bs_be_tag = {
915#else
916struct bus_space bs_le_tag = {
917#endif
918	/* mapping/unmapping */
919	.bs_map =	bs_gen_map,
920	.bs_unmap =	bs_gen_unmap,
921	.bs_subregion =	bs_gen_subregion,
922
923	/* allocation/deallocation */
924	.bs_alloc =	bs_gen_alloc,
925	.bs_free =	bs_gen_free,
926
927	/* barrier */
928	.bs_barrier =	bs_gen_barrier,
929
930	/* read (single) */
931	.bs_r_1 =	swapped_bs_rs_1,
932	.bs_r_2 =	swapped_bs_rs_2,
933	.bs_r_4 =	swapped_bs_rs_4,
934	.bs_r_8 =	swapped_bs_rs_8,
935
936	/* read (single) stream */
937	.bs_r_s_2 =	native_bs_rs_2,
938	.bs_r_s_4 =	native_bs_rs_4,
939	.bs_r_s_8 =	native_bs_rs_8,
940
941	/* read multiple */
942	.bs_rm_1 =	swapped_bs_rm_1,
943	.bs_rm_2 =	swapped_bs_rm_2,
944	.bs_rm_4 =	swapped_bs_rm_4,
945	.bs_rm_8 =	swapped_bs_rm_8,
946
947	/* read multiple stream */
948	.bs_rm_s_2 =	native_bs_rm_2,
949	.bs_rm_s_4 =	native_bs_rm_4,
950	.bs_rm_s_8 =	native_bs_rm_8,
951
952	/* read region */
953	.bs_rr_1 =	swapped_bs_rr_1,
954	.bs_rr_2 =	swapped_bs_rr_2,
955	.bs_rr_4 =	swapped_bs_rr_4,
956	.bs_rr_8 =	swapped_bs_rr_8,
957
958	/* read region stream */
959	.bs_rr_s_2 =	native_bs_rr_2,
960	.bs_rr_s_4 =	native_bs_rr_4,
961	.bs_rr_s_8 =	native_bs_rr_8,
962
963	/* write (single) */
964	.bs_w_1 =	swapped_bs_ws_1,
965	.bs_w_2 =	swapped_bs_ws_2,
966	.bs_w_4 =	swapped_bs_ws_4,
967	.bs_w_8 =	swapped_bs_ws_8,
968
969	/* write (single) stream */
970	.bs_w_s_2 =	native_bs_ws_2,
971	.bs_w_s_4 =	native_bs_ws_4,
972	.bs_w_s_8 =	native_bs_ws_8,
973
974	/* write multiple */
975	.bs_wm_1 =	swapped_bs_wm_1,
976	.bs_wm_2 =	swapped_bs_wm_2,
977	.bs_wm_4 =	swapped_bs_wm_4,
978	.bs_wm_8 =	swapped_bs_wm_8,
979
980	/* write multiple stream */
981	.bs_wm_s_2 =	native_bs_wm_2,
982	.bs_wm_s_4 =	native_bs_wm_4,
983	.bs_wm_s_8 =	native_bs_wm_8,
984
985	/* write region */
986	.bs_wr_1 =	swapped_bs_wr_1,
987	.bs_wr_2 =	swapped_bs_wr_2,
988	.bs_wr_4 =	swapped_bs_wr_4,
989	.bs_wr_8 =	swapped_bs_wr_8,
990
991	/* write region stream */
992	.bs_wr_s_2 =	native_bs_wr_2,
993	.bs_wr_s_4 =	native_bs_wr_4,
994	.bs_wr_s_8 =	native_bs_wr_8,
995
996	/* set multiple */
997	.bs_sm_1 =	swapped_bs_sm_1,
998	.bs_sm_2 =	swapped_bs_sm_2,
999	.bs_sm_4 =	swapped_bs_sm_4,
1000	.bs_sm_8 =	swapped_bs_sm_8,
1001
1002	/* set multiple stream */
1003	.bs_sm_s_2 =	native_bs_sm_2,
1004	.bs_sm_s_4 =	native_bs_sm_4,
1005	.bs_sm_s_8 =	native_bs_sm_8,
1006
1007	/* set region */
1008	.bs_sr_1 =	swapped_bs_sr_1,
1009	.bs_sr_2 =	swapped_bs_sr_2,
1010	.bs_sr_4 =	swapped_bs_sr_4,
1011	.bs_sr_8 =	swapped_bs_sr_8,
1012
1013	/* set region stream */
1014	.bs_sr_s_2 =	native_bs_sr_2,
1015	.bs_sr_s_4 =	native_bs_sr_4,
1016	.bs_sr_s_8 =	native_bs_sr_8,
1017
1018	/* copy region */
1019	.bs_cr_1 =	NULL, /* UNIMPLEMENTED */
1020	.bs_cr_2 =	NULL, /* UNIMPLEMENTED */
1021	.bs_cr_4 =	NULL, /* UNIMPLEMENTED */
1022	.bs_cr_8 =	NULL, /* UNIMPLEMENTED */
1023
1024	/* copy region stream */
1025	.bs_cr_s_2 =	NULL, /* UNIMPLEMENTED */
1026	.bs_cr_s_4 =	NULL, /* UNIMPLEMENTED */
1027	.bs_cr_s_8 =	NULL, /* UNIMPLEMENTED */
1028};
1029