Lines Matching defs:gsb

51 gs_msg_ops_kvmhv_nestedv2_config_fill_info(struct kvmppc_gs_buff *gsb,
60 rc = kvmppc_gse_put_u64(gsb, KVMPPC_GSID_RUN_OUTPUT_MIN_SIZE,
67 rc = kvmppc_gse_put_buff_info(gsb, KVMPPC_GSID_RUN_INPUT,
74 rc = kvmppc_gse_put_buff_info(gsb, KVMPPC_GSID_RUN_OUTPUT,
85 struct kvmppc_gs_buff *gsb)
94 rc = kvmppc_gse_parse(&gsp, gsb);
134 static int gs_msg_ops_vcpu_fill_info(struct kvmppc_gs_buff *gsb,
155 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.dscr);
158 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.mmcra);
161 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.hfscr);
164 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.purr);
167 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.spurr);
170 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.amr);
173 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.uamor);
176 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.siar);
179 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.sdar);
182 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.iamr);
185 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.dawr0);
188 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.dawr1);
191 rc = kvmppc_gse_put_u32(gsb, iden, vcpu->arch.dawrx0);
194 rc = kvmppc_gse_put_u32(gsb, iden, vcpu->arch.dawrx1);
197 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.ciabr);
200 rc = kvmppc_gse_put_u32(gsb, iden, vcpu->arch.wort);
203 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.ppr);
206 rc = kvmppc_gse_put_u32(gsb, iden, vcpu->arch.pspb);
209 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.tar);
212 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.fscr);
215 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.ebbhr);
218 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.ebbrr);
221 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.bescr);
224 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.ic);
227 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.ctrl);
230 rc = kvmppc_gse_put_u32(gsb, iden, vcpu->arch.pid);
235 rc = kvmppc_gse_put_u64(gsb, iden, amor);
239 rc = kvmppc_gse_put_u32(gsb, iden, vcpu->arch.vrsave);
243 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.mmcr[i]);
247 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.sier[i]);
251 rc = kvmppc_gse_put_u32(gsb, iden, vcpu->arch.pmc[i]);
255 rc = kvmppc_gse_put_u64(gsb, iden,
259 rc = kvmppc_gse_put_u32(gsb, iden, vcpu->arch.regs.ccr);
262 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.regs.xer);
265 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.regs.ctr);
268 rc = kvmppc_gse_put_u64(gsb, iden,
272 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.regs.nip);
275 rc = kvmppc_gse_put_u64(gsb, iden,
279 rc = kvmppc_gse_put_u64(gsb, iden,
283 rc = kvmppc_gse_put_u64(gsb, iden,
287 rc = kvmppc_gse_put_u64(gsb, iden,
291 rc = kvmppc_gse_put_u64(gsb, iden,
295 rc = kvmppc_gse_put_u64(gsb, iden,
299 rc = kvmppc_gse_put_u64(gsb, iden,
303 rc = kvmppc_gse_put_u32(gsb, iden,
307 rc = kvmppc_gse_put_u64(gsb, iden,
311 rc = kvmppc_gse_put_u64(gsb, iden,
315 rc = kvmppc_gse_put_u64(gsb, iden,
319 rc = kvmppc_gse_put_u64(gsb, iden,
323 rc = kvmppc_gse_put_u64(gsb, iden, vcpu->arch.fp.fpscr);
329 rc = kvmppc_gse_put_vector128(gsb, iden, &v);
333 rc = kvmppc_gse_put_u32(gsb, iden,
338 rc = kvmppc_gse_put_vector128(gsb, iden,
347 rc = kvmppc_gse_put_u64(gsb, iden, dw);
367 rc = kvmppc_gse_put_u32(gsb, iden, arch_compat);
379 struct kvmppc_gs_buff *gsb)
392 rc = kvmppc_gse_parse(&gsp, gsb);
612 struct kvmppc_gs_buff *gsb, *vcpu_run_output, *vcpu_run_input;
628 gsb = kvmppc_gsb_new(kvmppc_gsm_size(gsm), guest_id, vcpu_id,
630 if (!gsb) {
635 rc = kvmppc_gsb_receive_datum(gsb, gsm,
654 rc = kvmppc_gsb_send_datum(gsb, gsm, KVMPPC_GSID_RUN_OUTPUT);
679 rc = kvmppc_gsb_send_datum(gsb, gsm, KVMPPC_GSID_RUN_INPUT);
698 kvmppc_gsb_free(gsb);
708 kvmppc_gsb_free(gsb);
755 struct kvmppc_gs_buff *gsb;
767 gsb = io->vcpu_run_input;
769 rc = kvmppc_gsb_receive_datum(gsb, &gsm, iden);
792 struct kvmppc_gs_buff *gsb;
797 gsb = io->vcpu_run_input;
799 rc = kvmppc_gsb_send_data(gsb, gsm);
806 kvmppc_gsb_reset(gsb);
807 rc = kvmppc_gsm_fill_info(gsm, gsb);
813 rc = kvmppc_gse_put_u64(gsb, KVMPPC_GSID_HDEC_EXPIRY_TB, time_limit);
831 struct kvmppc_gs_buff *gsb;
840 gsb = kvmppc_gsb_new(size, lpid, 0, GFP_KERNEL);
841 if (!gsb)
849 rc = kvmppc_gse_put_part_table(gsb, KVMPPC_GSID_PARTITION_TABLE, patbl);
855 rc = kvmppc_gse_put_proc_table(gsb, KVMPPC_GSID_PROCESS_TABLE, prtbl);
859 rc = kvmppc_gsb_send(gsb, KVMPPC_GS_FLAGS_WIDE);
865 kvmppc_gsb_free(gsb);
869 kvmppc_gsb_free(gsb);
882 struct kvmppc_gs_buff *gsb;
886 gsb = io->vcpu_run_input;
888 kvmppc_gsb_reset(gsb);
889 rc = kvmppc_gse_put_u64(gsb, KVMPPC_GSID_VPA, vpa);
893 rc = kvmppc_gsb_send(gsb, 0);
898 kvmppc_gsb_reset(gsb);
912 struct kvmppc_gs_buff *gsb;
916 gsb = io->vcpu_run_output;
924 return kvmppc_gsm_refresh_info(&gsm, gsb);
941 struct kvmppc_gs_buff *gsb;
949 gsb = io->vcpu_run_input;
972 rc = kvmppc_gsb_receive_data(gsb, &gsm);