Searched refs:tx_ctx (Results 1 - 14 of 14) sorted by relevance

/linux-master/drivers/net/ethernet/fungible/funeth/
H A Dfuneth_ktls.c32 struct fun_ktls_tx_ctx *tx_ctx; local
64 tx_ctx = tls_driver_ctx(sk, direction);
65 tx_ctx->tlsid = rsp.tlsid;
66 tx_ctx->next_seq = start_offload_tcp_sn;
77 struct fun_ktls_tx_ctx *tx_ctx; local
82 tx_ctx = __tls_driver_ctx(tls_ctx, direction);
89 req.tlsid = tx_ctx->tlsid;
100 struct fun_ktls_tx_ctx *tx_ctx; local
106 tx_ctx = tls_driver_ctx(sk, direction);
113 req.tlsid = tx_ctx
[all...]
/linux-master/net/wireless/
H A Dlib80211_crypt_wep.c35 struct arc4_ctx tx_ctx; member in struct:lib80211_wep_data
138 arc4_setkey(&wep->tx_ctx, key, klen);
139 arc4_crypt(&wep->tx_ctx, pos, pos, len + 4);
/linux-master/drivers/infiniband/sw/siw/
H A Dsiw_qp.c137 qp->tx_ctx.tx_suspend = 1;
232 struct siw_iwarp_tx *c_tx = &qp->tx_ctx;
586 if (qp->tx_ctx.mpa_crc_hd) {
587 crypto_shash_init(qp->tx_ctx.mpa_crc_hd);
588 if (crypto_shash_update(qp->tx_ctx.mpa_crc_hd,
594 if (crypto_shash_update(qp->tx_ctx.mpa_crc_hd,
599 crypto_shash_final(qp->tx_ctx.mpa_crc_hd, (u8 *)&crc);
663 qp->tx_ctx.ddp_msn[RDMAP_UNTAGGED_QN_SEND] = 0;
664 qp->tx_ctx.ddp_msn[RDMAP_UNTAGGED_QN_RDMA_READ] = 0;
665 qp->tx_ctx
[all...]
H A Dsiw_qp_tx.c702 struct siw_iwarp_tx *c_tx = &qp->tx_ctx;
790 struct siw_iwarp_tx *c_tx = &qp->tx_ctx;
792 int rv = 0, burst_len = qp->tx_ctx.burst;
912 qp->tx_ctx.burst = burst_len;
1031 if (unlikely(qp->tx_ctx.tx_suspend)) {
1089 qp->tx_ctx.ctrl_sent, qp->tx_ctx.ctrl_len,
1090 qp->tx_ctx.bytes_unsent);
1126 if (!qp->tx_ctx.tx_suspend)
1171 !qp->tx_ctx
[all...]
H A Dsiw.h434 struct siw_iwarp_tx tx_ctx; /* Transmit context */ member in struct:siw_qp
472 #define tx_qp(tx) container_of(tx, struct siw_qp, tx_ctx)
473 #define tx_wqe(qp) (&(qp)->tx_ctx.wqe_active)
H A Dsiw_verbs.c428 qp->tx_ctx.gso_seg_limit = 1;
429 qp->tx_ctx.zcopy_tx = zcopy_tx;
573 qp->tx_ctx.tx_suspend = 1;
622 kfree(qp->tx_ctx.mpa_crc_hd);
965 qp->tx_ctx.in_syscall = 1;
967 if (siw_qp_sq_process(qp) != 0 && !(qp->tx_ctx.tx_suspend))
970 qp->tx_ctx.in_syscall = 0;
H A Dsiw_qp_rx.c1145 if (qp->tx_ctx.orq_fence) {
1166 qp->tx_ctx.orq_fence = 0;
1174 qp->tx_ctx.orq_fence = 0;
H A Dsiw_cm.c392 qp->tx_ctx.tx_suspend = 1;
767 qp->tx_ctx.gso_seg_limit = 0;
1314 cep->qp->tx_ctx.tx_suspend = 1;
1588 qp->tx_ctx.gso_seg_limit = 0;
/linux-master/drivers/net/ethernet/mellanox/mlx5/core/en_accel/
H A Dktls_tx.c98 struct tls_offload_context_tx *tx_ctx; member in struct:mlx5e_ktls_offload_context_tx
496 priv_tx->tx_ctx = tls_offload_ctx_tx(tls_ctx);
625 struct tls_offload_context_tx *tx_ctx = priv_tx->tx_ctx; local
632 spin_lock_irqsave(&tx_ctx->lock, flags);
633 record = tls_get_record(tx_ctx, tcp_seq, &info->rcd_sn);
672 spin_unlock_irqrestore(&tx_ctx->lock, flags);
/linux-master/drivers/net/ethernet/chelsio/inline_crypto/ch_ktls/
H A Dchcr_ktls.c645 struct tls_offload_context_tx *tx_ctx; local
684 tx_ctx = tls_offload_ctx_tx(tls_ctx);
687 ret = xa_insert_bh(&u_ctx->tid_list, tid, tx_ctx,
1922 struct tls_offload_context_tx *tx_ctx; local
1942 tx_ctx = tls_offload_ctx_tx(tls_ctx);
1977 spin_lock_irqsave(&tx_ctx->lock, flags);
1983 record = tls_get_record(tx_ctx, tcp_seq,
1989 spin_unlock_irqrestore(&tx_ctx->lock, flags);
2013 spin_unlock_irqrestore(&tx_ctx->lock,
2044 spin_unlock_irqrestore(&tx_ctx
2183 struct tls_offload_context_tx *tx_ctx; local
[all...]
/linux-master/net/tipc/
H A Dcrypto.c738 struct tipc_crypto_tx_ctx *tx_ctx; local
773 ctx = tipc_aead_mem_alloc(tfm, sizeof(*tx_ctx), &iv, &req, &sg, nsg);
809 tx_ctx = (struct tipc_crypto_tx_ctx *)ctx;
810 tx_ctx->aead = aead;
811 tx_ctx->bearer = b;
812 memcpy(&tx_ctx->dst, dst, sizeof(*dst));
836 struct tipc_crypto_tx_ctx *tx_ctx = TIPC_SKB_CB(skb)->crypto_ctx; local
837 struct tipc_bearer *b = tx_ctx->bearer;
838 struct tipc_aead *aead = tx_ctx->aead;
847 b->media->send_msg(net, skb, b, &tx_ctx
[all...]
/linux-master/drivers/net/ethernet/intel/i40e/
H A Di40e_virtchnl_pf.c634 struct i40e_hmc_obj_txq tx_ctx; local
652 memset(&tx_ctx, 0, sizeof(struct i40e_hmc_obj_txq));
655 tx_ctx.base = info->dma_ring_addr / 128;
656 tx_ctx.qlen = info->ring_len;
657 tx_ctx.rdylist = le16_to_cpu(vsi->info.qs_handle[0]);
658 tx_ctx.rdylist_act = 0;
659 tx_ctx.head_wb_ena = info->headwb_enabled;
660 tx_ctx.head_wb_addr = info->dma_headwb_addr;
673 ret = i40e_set_lan_tx_queue_context(hw, pf_queue_id, &tx_ctx);
H A Di40e_main.c3462 struct i40e_hmc_obj_txq tx_ctx; local
3481 memset(&tx_ctx, 0, sizeof(tx_ctx));
3483 tx_ctx.new_context = 1;
3484 tx_ctx.base = (ring->dma / 128);
3485 tx_ctx.qlen = ring->count;
3488 tx_ctx.fd_ena = 1;
3490 tx_ctx.timesync_ena = 1;
3493 tx_ctx.head_wb_ena = 1;
3494 tx_ctx
[all...]
/linux-master/net/tls/
H A Dtls_sw.c2589 struct tls_sw_context_tx *tx_ctx = tls_sw_ctx_tx(ctx); local
2592 if (tls_is_tx_ready(tx_ctx) &&
2593 !test_and_set_bit(BIT_TX_SCHEDULED, &tx_ctx->tx_bitmask))
2594 schedule_delayed_work(&tx_ctx->tx_work.work, 0);

Completed in 413 milliseconds