Lines Matching refs:per_trace

213 static void make_topa(insntrace_device_t* ipt_dev, ipt_per_trace_state_t* per_trace) {
214 const size_t run_len_log2 = per_trace->chunk_order;
226 for (uint32_t i = 0; i < per_trace->num_chunks; ++i) {
227 io_buffer_t* buffer = &per_trace->chunks[i];
228 io_buffer_t* topa = &per_trace->topas[curr_table];
246 assert(curr_table + 1 == per_trace->num_tables ||
248 (curr_table == per_trace->num_tables && curr_idx == 0));
254 io_buffer_t* this_table = &per_trace->topas[i];
256 if (i == per_trace->num_tables - 1) {
257 next_table = &per_trace->topas[0];
259 next_table = &per_trace->topas[i + 1];
269 if (curr_table < per_trace->num_tables) {
270 io_buffer_t* this_table = &per_trace->topas[curr_table];
271 io_buffer_t* first_table = &per_trace->topas[0];
280 if (!per_trace->is_circular)
288 ipt_per_trace_state_t* per_trace) {
289 uint32_t num_entries = per_trace->num_chunks;
301 // Walk the tables to discover how much data has been captured for |per_trace|.
304 const ipt_per_trace_state_t* per_trace) {
305 uint64_t curr_table_paddr = per_trace->output_base;
306 uint32_t curr_table_entry_idx = (uint32_t)per_trace->output_mask_ptrs >> 7;
307 uint32_t curr_entry_offset = (uint32_t)(per_trace->output_mask_ptrs >> 32);
309 zxlogf(DEBUG1, "IPT: compute_capture_size: trace %tu\n", per_trace - ipt_dev->per_trace_state);
314 for (uint32_t table = 0; table < per_trace->num_tables; ++table) {
317 zx_paddr_t table_paddr = io_buffer_phys(&per_trace->topas[table]);
324 uint64_t* table_ptr = io_buffer_virt(&per_trace->topas[table]);
337 ipt_per_trace_state_t* per_trace,
343 memset(per_trace, 0, sizeof(*per_trace));
345 per_trace->chunks = calloc(num, sizeof(io_buffer_t));
346 if (per_trace->chunks == NULL)
352 status = io_buffer_init_aligned(&per_trace->chunks[i], ipt_dev->bti,
359 ++per_trace->num_chunks;
363 zx_paddr_t pa = io_buffer_phys(&per_trace->chunks[i]);
371 assert(per_trace->num_chunks == num);
373 per_trace->chunk_order = order;
374 per_trace->is_circular = is_circular;
377 uint32_t entry_count = compute_topa_entry_count(ipt_dev, per_trace);
393 per_trace->topas = calloc(table_count, sizeof(io_buffer_t));
394 if (per_trace->topas == NULL)
398 status = io_buffer_init(&per_trace->topas[i], ipt_dev->bti,
405 ++per_trace->num_tables;
407 assert(per_trace->num_tables == table_count);
409 make_topa(ipt_dev, per_trace);
414 static void x86_pt_free_buffer1(insntrace_device_t* ipt_dev, ipt_per_trace_state_t* per_trace) {
415 if (per_trace->chunks) {
416 for (uint32_t i = 0; i < per_trace->num_chunks; ++i) {
417 io_buffer_release(&per_trace->chunks[i]);
420 free(per_trace->chunks);
421 per_trace->chunks = NULL;
423 if (per_trace->topas) {
424 for (uint32_t i = 0; i < per_trace->num_tables; ++i) {
425 io_buffer_release(&per_trace->topas[i]);
428 free(per_trace->topas);
429 per_trace->topas = NULL;
431 per_trace->allocated = false;
513 ipt_per_trace_state_t* per_trace = &ipt_dev->per_trace_state[descriptor];
514 memset(per_trace, 0, sizeof(*per_trace));
515 zx_status_t status = x86_pt_alloc_buffer1(ipt_dev, per_trace,
518 x86_pt_free_buffer1(ipt_dev, per_trace);
522 per_trace->ctl = config->ctl;
523 per_trace->status = 0;
524 per_trace->output_base = io_buffer_phys(&per_trace->topas[0]);
525 per_trace->output_mask_ptrs = 0;
526 per_trace->cr3_match = config->cr3_match;
527 static_assert(sizeof(per_trace->addr_ranges) == sizeof(config->addr_ranges),
529 memcpy(per_trace->addr_ranges, config->addr_ranges, sizeof(config->addr_ranges));
530 per_trace->allocated = true;
558 ipt_per_trace_state_t* per_trace = &ipt_dev->per_trace_state[descriptor];
559 if (!per_trace->allocated)
561 x86_pt_free_buffer1(ipt_dev, per_trace);
635 ipt_per_trace_state_t* per_trace = &ipt_dev->per_trace_state[i];
636 if (per_trace->allocated)
637 x86_pt_free_buffer1(ipt_dev, per_trace);
731 const ipt_per_trace_state_t* per_trace = &ipt_dev->per_trace_state[descriptor];
732 if (!per_trace->allocated)
735 config.num_chunks = per_trace->num_chunks;
736 config.chunk_order = per_trace->chunk_order;
737 config.is_circular = per_trace->is_circular;
738 config.ctl = per_trace->ctl;
739 config.cr3_match = per_trace->cr3_match;
740 static_assert(sizeof(config.addr_ranges) == sizeof(per_trace->addr_ranges),
742 memcpy(config.addr_ranges, per_trace->addr_ranges, sizeof(per_trace->addr_ranges));
766 const ipt_per_trace_state_t* per_trace = &ipt_dev->per_trace_state[descriptor];
767 if (!per_trace->allocated)
771 data.capture_end = compute_capture_size(ipt_dev, per_trace);
792 const ipt_per_trace_state_t* per_trace = &ipt_dev->per_trace_state[req.descriptor];
793 if (!per_trace->allocated)
795 if (req.chunk_num >= per_trace->num_chunks)
798 zx_status_t status = zx_handle_duplicate(per_trace->chunks[req.chunk_num].vmo_handle, ZX_RIGHT_SAME_RIGHTS, &h);
830 const ipt_per_trace_state_t* per_trace = &ipt_dev->per_trace_state[cpu];
831 if (!per_trace->allocated)
836 const ipt_per_trace_state_t* per_trace = &ipt_dev->per_trace_state[cpu];
839 regs.ctl = per_trace->ctl;
841 regs.status = per_trace->status;
842 regs.output_base = per_trace->output_base;
843 regs.output_mask_ptrs = per_trace->output_mask_ptrs;
844 regs.cr3_match = per_trace->cr3_match;
845 static_assert(sizeof(regs.addr_ranges) == sizeof(per_trace->addr_ranges),
847 memcpy(regs.addr_ranges, per_trace->addr_ranges, sizeof(per_trace->addr_ranges));
881 ipt_per_trace_state_t* per_trace = &ipt_dev->per_trace_state[cpu];
889 per_trace->ctl = regs.ctl;
890 per_trace->status = regs.status;
891 per_trace->output_base = regs.output_base;
892 per_trace->output_mask_ptrs = regs.output_mask_ptrs;
893 per_trace->cr3_match = regs.cr3_match;
894 static_assert(sizeof(per_trace->addr_ranges) == sizeof(regs.addr_ranges),
896 memcpy(per_trace->addr_ranges, regs.addr_ranges, sizeof(regs.addr_ranges));
899 if (per_trace->status & IPT_STATUS_ERROR_MASK) {