1/* SPDX-License-Identifier: GPL-2.0 OR Linux-OpenIB */
2/* Copyright (c) 2015 - 2019 Intel Corporation */
3#ifndef IRDMA_PBLE_H
4#define IRDMA_PBLE_H
5
6#define PBLE_SHIFT		6
7#define PBLE_PER_PAGE		512
8#define HMC_PAGED_BP_SHIFT	12
9#define PBLE_512_SHIFT		9
10#define PBLE_INVALID_IDX	0xffffffff
11
12enum irdma_pble_level {
13	PBLE_LEVEL_0 = 0,
14	PBLE_LEVEL_1 = 1,
15	PBLE_LEVEL_2 = 2,
16};
17
18enum irdma_alloc_type {
19	PBLE_NO_ALLOC	  = 0,
20	PBLE_SD_CONTIGOUS = 1,
21	PBLE_SD_PAGED	  = 2,
22};
23
24struct irdma_chunk;
25
26struct irdma_pble_chunkinfo {
27	struct irdma_chunk *pchunk;
28	u64 bit_idx;
29	u64 bits_used;
30};
31
32struct irdma_pble_info {
33	u64 *addr;
34	u32 idx;
35	u32 cnt;
36	struct irdma_pble_chunkinfo chunkinfo;
37};
38
39struct irdma_pble_level2 {
40	struct irdma_pble_info root;
41	struct irdma_pble_info *leaf;
42	struct irdma_virt_mem leafmem;
43	u32 leaf_cnt;
44};
45
46struct irdma_pble_alloc {
47	u32 total_cnt;
48	enum irdma_pble_level level;
49	union {
50		struct irdma_pble_info level1;
51		struct irdma_pble_level2 level2;
52	};
53};
54
55struct sd_pd_idx {
56	u32 sd_idx;
57	u32 pd_idx;
58	u32 rel_pd_idx;
59};
60
61struct irdma_add_page_info {
62	struct irdma_chunk *chunk;
63	struct irdma_hmc_sd_entry *sd_entry;
64	struct irdma_hmc_info *hmc_info;
65	struct sd_pd_idx idx;
66	u32 pages;
67};
68
69struct irdma_chunk {
70	struct list_head list;
71	struct irdma_dma_info dmainfo;
72	unsigned long *bitmapbuf;
73
74	u32 sizeofbitmap;
75	u64 size;
76	void *vaddr;
77	u64 fpm_addr;
78	u32 pg_cnt;
79	enum irdma_alloc_type type;
80	struct irdma_sc_dev *dev;
81	struct irdma_virt_mem chunkmem;
82};
83
84struct irdma_pble_prm {
85	struct list_head clist;
86	spinlock_t prm_lock; /* protect prm bitmap */
87	u64 total_pble_alloc;
88	u64 free_pble_cnt;
89	u8 pble_shift;
90};
91
92struct irdma_hmc_pble_rsrc {
93	u32 unallocated_pble;
94	struct mutex pble_mutex_lock; /* protect PBLE resource */
95	struct irdma_sc_dev *dev;
96	u64 fpm_base_addr;
97	u64 next_fpm_addr;
98	struct irdma_pble_prm pinfo;
99	u64 allocdpbles;
100	u64 freedpbles;
101	u32 stats_direct_sds;
102	u32 stats_paged_sds;
103	u64 stats_alloc_ok;
104	u64 stats_alloc_fail;
105	u64 stats_alloc_freed;
106	u64 stats_lvl1;
107	u64 stats_lvl2;
108};
109
110void irdma_destroy_pble_prm(struct irdma_hmc_pble_rsrc *pble_rsrc);
111int irdma_hmc_init_pble(struct irdma_sc_dev *dev,
112			struct irdma_hmc_pble_rsrc *pble_rsrc);
113void irdma_free_pble(struct irdma_hmc_pble_rsrc *pble_rsrc,
114		     struct irdma_pble_alloc *palloc);
115int irdma_get_pble(struct irdma_hmc_pble_rsrc *pble_rsrc,
116		   struct irdma_pble_alloc *palloc, u32 pble_cnt,
117		   u8 lvl);
118int irdma_prm_add_pble_mem(struct irdma_pble_prm *pprm,
119			   struct irdma_chunk *pchunk);
120int irdma_prm_get_pbles(struct irdma_pble_prm *pprm,
121			struct irdma_pble_chunkinfo *chunkinfo, u64 mem_size,
122			u64 **vaddr, u64 *fpm_addr);
123void irdma_prm_return_pbles(struct irdma_pble_prm *pprm,
124			    struct irdma_pble_chunkinfo *chunkinfo);
125void irdma_pble_acquire_lock(struct irdma_hmc_pble_rsrc *pble_rsrc,
126			     unsigned long *flags);
127void irdma_pble_release_lock(struct irdma_hmc_pble_rsrc *pble_rsrc,
128			     unsigned long *flags);
129void irdma_pble_free_paged_mem(struct irdma_chunk *chunk);
130int irdma_pble_get_paged_mem(struct irdma_chunk *chunk, u32 pg_cnt);
131void irdma_prm_rem_bitmapmem(struct irdma_hw *hw, struct irdma_chunk *chunk);
132#endif /* IRDMA_PBLE_H */
133