Searched refs:last_op (Results 1 - 9 of 9) sorted by relevance

/linux-master/tools/perf/util/
H A Dstrfilter.c84 struct strfilter_node root, *cur, *last_op; local
91 last_op = cur = &root;
97 if (!cur->r || !last_op->r)
99 cur = strfilter_node__alloc(OP_and, last_op->r, NULL);
102 last_op->r = cur;
103 last_op = cur;
112 last_op = cur;
/linux-master/drivers/net/dsa/sja1105/
H A Dsja1105_tas.h45 enum sja1105_ptp_op last_op; member in struct:sja1105_tas_data
H A Dsja1105_tas.c740 if (tas_data->last_op != SJA1105_PTP_ADJUSTFREQ)
785 if (tas_data->last_op != SJA1105_PTP_ADJUSTFREQ) {
820 if (tas_data->last_op != SJA1105_PTP_ADJUSTFREQ) {
853 tas_data->last_op = SJA1105_PTP_CLOCKSTEP;
869 tas_data->last_op = SJA1105_PTP_ADJUSTFREQ;
880 tas_data->last_op = SJA1105_PTP_NONE;
/linux-master/drivers/gpu/drm/xe/
H A Dxe_vm.c808 bool first_op, bool last_op);
1657 bool first_op, bool last_op)
1723 if (last_op) {
1743 bool first_op, bool last_op)
1797 if (last_op) {
1833 bool last_op)
1849 last_op);
1858 if (last_op) {
1864 if (last_op)
1874 bool last_op)
1655 xe_vm_unbind_vma(struct xe_vma *vma, struct xe_exec_queue *q, struct xe_sync_entry *syncs, u32 num_syncs, bool first_op, bool last_op) argument
1741 xe_vm_bind_vma(struct xe_vma *vma, struct xe_exec_queue *q, struct xe_sync_entry *syncs, u32 num_syncs, bool first_op, bool last_op) argument
1830 __xe_vm_bind(struct xe_vm *vm, struct xe_vma *vma, struct xe_exec_queue *q, struct xe_sync_entry *syncs, u32 num_syncs, bool immediate, bool first_op, bool last_op) argument
1871 xe_vm_bind(struct xe_vm *vm, struct xe_vma *vma, struct xe_exec_queue *q, struct xe_bo *bo, struct xe_sync_entry *syncs, u32 num_syncs, bool immediate, bool first_op, bool last_op) argument
1891 xe_vm_unbind(struct xe_vm *vm, struct xe_vma *vma, struct xe_exec_queue *q, struct xe_sync_entry *syncs, u32 num_syncs, bool first_op, bool last_op) argument
2053 xe_vm_prefetch(struct xe_vm *vm, struct xe_vma *vma, struct xe_exec_queue *q, u32 region, struct xe_sync_entry *syncs, u32 num_syncs, bool first_op, bool last_op) argument
2381 struct xe_vma_op *last_op = NULL; local
[all...]
/linux-master/tools/include/uapi/linux/
H A Dio_uring.h623 __u8 last_op; /* last opcode supported */ member in struct:io_uring_probe
/linux-master/include/uapi/linux/
H A Dio_uring.h640 __u8 last_op; /* last opcode supported */ member in struct:io_uring_probe
/linux-master/tools/testing/selftests/bpf/prog_tests/
H A Dreg_bounds.c463 enum op { OP_LT, OP_LE, OP_GT, OP_GE, OP_EQ, OP_NE, first_op = OP_LT, last_op = OP_NE }; enumerator in enum:op
1455 for (sub.op = first_op; sub.op <= last_op; sub.op++) {
1753 ctx.total_case_cnt = (last_op - first_op + 1) * (2 * ctx.range_cnt * ctx.val_cnt);
1792 ctx.total_case_cnt = (last_op - first_op + 1) * (2 * ctx.subrange_cnt * ctx.subval_cnt);
1849 ctx.total_case_cnt = (last_op - first_op + 1) * (2 * rcnt * (rcnt + 1) / 2);
1969 ctx.total_case_cnt = (last_op - first_op + 1) * (2 * ctx.rand_case_cnt);
/linux-master/drivers/mtd/nand/raw/brcmnand/
H A Dbrcmnand.c1792 bool last_op)
1816 if (last_op)
2414 bool last_op; local
2422 last_op = ((i == (op->ninstrs - 1)) && (instr->type != NAND_OP_WAITRDY_INSTR)) ||
2427 brcmnand_low_level_op(host, LL_OP_CMD, instr->ctx.cmd.opcode, last_op);
2433 last_op && (i == (instr->ctx.addr.naddrs - 1)));
2440 last_op && (i == (instr->ctx.data.len - 1)));
2449 last_op && (i == (instr->ctx.data.len - 1)));
1790 brcmnand_low_level_op(struct brcmnand_host *host, enum brcmnand_llop_type type, u32 data, bool last_op) argument
/linux-master/io_uring/
H A Dregister.c110 p->last_op = IORING_OP_LAST - 1;

Completed in 183 milliseconds