Lines Matching refs:qp

1084 int qla_create_buf_pool(struct scsi_qla_host *vha, struct qla_qpair *qp)
1088 qp->buf_pool.num_bufs = qp->req->length;
1090 sz = BITS_TO_LONGS(qp->req->length);
1091 qp->buf_pool.buf_map = kcalloc(sz, sizeof(long), GFP_KERNEL);
1092 if (!qp->buf_pool.buf_map) {
1097 sz = qp->req->length * sizeof(void *);
1098 qp->buf_pool.buf_array = kcalloc(qp->req->length, sizeof(void *), GFP_KERNEL);
1099 if (!qp->buf_pool.buf_array) {
1102 kfree(qp->buf_pool.buf_map);
1105 sz = qp->req->length * sizeof(dma_addr_t);
1106 qp->buf_pool.dma_array = kcalloc(qp->req->length, sizeof(dma_addr_t), GFP_KERNEL);
1107 if (!qp->buf_pool.dma_array) {
1110 kfree(qp->buf_pool.buf_map);
1111 kfree(qp->buf_pool.buf_array);
1114 set_bit(0, qp->buf_pool.buf_map);
1118 void qla_free_buf_pool(struct qla_qpair *qp)
1121 struct qla_hw_data *ha = qp->vha->hw;
1123 for (i = 0; i < qp->buf_pool.num_bufs; i++) {
1124 if (qp->buf_pool.buf_array[i] && qp->buf_pool.dma_array[i])
1125 dma_pool_free(ha->fcp_cmnd_dma_pool, qp->buf_pool.buf_array[i],
1126 qp->buf_pool.dma_array[i]);
1127 qp->buf_pool.buf_array[i] = NULL;
1128 qp->buf_pool.dma_array[i] = 0;
1131 kfree(qp->buf_pool.dma_array);
1132 kfree(qp->buf_pool.buf_array);
1133 kfree(qp->buf_pool.buf_map);
1136 /* it is assume qp->qp_lock is held at this point */
1137 int qla_get_buf(struct scsi_qla_host *vha, struct qla_qpair *qp, struct qla_buf_dsc *dsc)
1146 tag = find_first_zero_bit(qp->buf_pool.buf_map, qp->buf_pool.num_bufs);
1147 if (tag >= qp->buf_pool.num_bufs) {
1149 "qp(%d) ran out of buf resource.\n", qp->id);
1153 set_bit(0, qp->buf_pool.buf_map);
1157 "qp(%d) unable to get tag.\n", qp->id);
1163 if (!qp->buf_pool.buf_array[tag]) {
1171 dsc->buf = qp->buf_pool.buf_array[tag] = buf;
1172 dsc->buf_dma = qp->buf_pool.dma_array[tag] = buf_dma;
1173 qp->buf_pool.num_alloc++;
1175 dsc->buf = qp->buf_pool.buf_array[tag];
1176 dsc->buf_dma = qp->buf_pool.dma_array[tag];
1180 qp->buf_pool.num_active++;
1181 if (qp->buf_pool.num_active > qp->buf_pool.max_used)
1182 qp->buf_pool.max_used = qp->buf_pool.num_active;
1185 set_bit(tag, qp->buf_pool.buf_map);
1189 static void qla_trim_buf(struct qla_qpair *qp, u16 trim)
1192 struct qla_hw_data *ha = qp->vha->hw;
1198 j = qp->buf_pool.num_alloc - 1;
1199 if (test_bit(j, qp->buf_pool.buf_map)) {
1200 ql_dbg(ql_dbg_io + ql_dbg_verbose, qp->vha, 0x300b,
1202 qp->id, j, qp->buf_pool.num_alloc);
1206 if (qp->buf_pool.buf_array[j]) {
1207 dma_pool_free(ha->fcp_cmnd_dma_pool, qp->buf_pool.buf_array[j],
1208 qp->buf_pool.dma_array[j]);
1209 qp->buf_pool.buf_array[j] = NULL;
1210 qp->buf_pool.dma_array[j] = 0;
1212 qp->buf_pool.num_alloc--;
1213 if (!qp->buf_pool.num_alloc)
1216 ql_dbg(ql_dbg_io + ql_dbg_verbose, qp->vha, 0x3010,
1218 qp->id, trim, qp->buf_pool.num_alloc);
1221 static void __qla_adjust_buf(struct qla_qpair *qp)
1225 qp->buf_pool.take_snapshot = 0;
1226 qp->buf_pool.prev_max = qp->buf_pool.max_used;
1227 qp->buf_pool.max_used = qp->buf_pool.num_active;
1229 if (qp->buf_pool.prev_max > qp->buf_pool.max_used &&
1230 qp->buf_pool.num_alloc > qp->buf_pool.max_used) {
1232 trim = qp->buf_pool.num_alloc - qp->buf_pool.max_used;
1235 qla_trim_buf(qp, trim);
1236 } else if (!qp->buf_pool.prev_max && !qp->buf_pool.max_used) {
1238 qla_trim_buf(qp, qp->buf_pool.num_alloc);
1242 /* it is assume qp->qp_lock is held at this point */
1243 void qla_put_buf(struct qla_qpair *qp, struct qla_buf_dsc *dsc)
1247 lockdep_assert_held(qp->qp_lock_ptr);
1249 clear_bit(dsc->tag, qp->buf_pool.buf_map);
1250 qp->buf_pool.num_active--;
1253 if (qp->buf_pool.take_snapshot)
1254 __qla_adjust_buf(qp);
1262 struct qla_qpair *qp;
1277 qp = vha->hw->queue_pair_map[i];
1278 if (!qp)
1280 if (!qp->buf_pool.num_alloc)
1283 if (qp->buf_pool.take_snapshot) {
1285 spin_lock_irqsave(qp->qp_lock_ptr, flags);
1286 __qla_adjust_buf(qp);
1287 spin_unlock_irqrestore(qp->qp_lock_ptr, flags);
1289 qp->buf_pool.take_snapshot = 1;