1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161
|
/* SPDX-License-Identifier: BSD-3-Clause
* Copyright 2018-2022 Advanced Micro Devices, Inc.
*/
#ifndef _IONIC_RXTX_H_
#define _IONIC_RXTX_H_
#include <stdint.h>
#include "ionic_if.h"
struct ionic_rx_qcq;
struct ionic_tx_qcq;
struct rte_eth_dev;
struct rte_eth_rxconf;
struct rte_eth_rxq_info;
struct rte_eth_txconf;
struct rte_eth_txq_info;
struct rte_mbuf;
struct rte_mempool;
struct ionic_rx_service {
/* cb in */
struct rte_mbuf **rx_pkts;
/* cb out */
uint16_t nb_rx;
};
#define IONIC_CSUM_FLAG_MASK (IONIC_RXQ_COMP_CSUM_F_VLAN - 1)
extern const uint64_t ionic_csum_flags[IONIC_CSUM_FLAG_MASK];
extern const uint32_t ionic_ptype_table[IONIC_RXQ_COMP_PKT_TYPE_MASK];
/* ionic_rxtx.c */
int ionic_dev_rx_queue_setup(struct rte_eth_dev *dev, uint16_t rx_queue_id,
uint16_t nb_desc, uint32_t socket_id,
const struct rte_eth_rxconf *rx_conf, struct rte_mempool *mp);
void ionic_dev_rx_queue_release(struct rte_eth_dev *dev, uint16_t qid);
int ionic_dev_rx_queue_start(struct rte_eth_dev *dev, uint16_t rx_queue_id);
int ionic_dev_rx_queue_stop(struct rte_eth_dev *dev, uint16_t rx_queue_id);
int ionic_dev_tx_queue_setup(struct rte_eth_dev *dev, uint16_t tx_queue_id,
uint16_t nb_desc, uint32_t socket_id,
const struct rte_eth_txconf *tx_conf);
void ionic_dev_tx_queue_release(struct rte_eth_dev *dev, uint16_t qid);
int ionic_dev_tx_queue_start(struct rte_eth_dev *dev, uint16_t tx_queue_id);
int ionic_dev_tx_queue_stop(struct rte_eth_dev *dev, uint16_t tx_queue_id);
/* Helpers for optimized dev_start() */
int ionic_dev_rx_queue_start_firsthalf(struct rte_eth_dev *dev,
uint16_t rx_queue_id);
int ionic_dev_rx_queue_start_secondhalf(struct rte_eth_dev *dev,
uint16_t rx_queue_id);
int ionic_dev_tx_queue_start_firsthalf(struct rte_eth_dev *dev,
uint16_t tx_queue_id);
int ionic_dev_tx_queue_start_secondhalf(struct rte_eth_dev *dev,
uint16_t tx_queue_id);
/* Helpers for optimized dev_stop() */
void ionic_dev_rx_queue_stop_firsthalf(struct rte_eth_dev *dev,
uint16_t rx_queue_id);
void ionic_dev_rx_queue_stop_secondhalf(struct rte_eth_dev *dev,
uint16_t rx_queue_id);
void ionic_dev_tx_queue_stop_firsthalf(struct rte_eth_dev *dev,
uint16_t tx_queue_id);
void ionic_dev_tx_queue_stop_secondhalf(struct rte_eth_dev *dev,
uint16_t tx_queue_id);
void ionic_rxq_info_get(struct rte_eth_dev *dev, uint16_t queue_id,
struct rte_eth_rxq_info *qinfo);
void ionic_txq_info_get(struct rte_eth_dev *dev, uint16_t queue_id,
struct rte_eth_txq_info *qinfo);
int ionic_dev_rx_descriptor_status(void *rx_queue, uint16_t offset);
int ionic_dev_tx_descriptor_status(void *tx_queue, uint16_t offset);
const uint32_t *ionic_dev_supported_ptypes_get(struct rte_eth_dev *dev,
size_t *no_of_elements);
int ionic_tx_tso(struct ionic_tx_qcq *txq, struct rte_mbuf *txm);
uint16_t ionic_prep_pkts(void *tx_queue, struct rte_mbuf **tx_pkts,
uint16_t nb_pkts);
/* ionic_rxtx_simple.c */
uint16_t ionic_recv_pkts(void *rx_queue, struct rte_mbuf **rx_pkts,
uint16_t nb_pkts);
uint16_t ionic_xmit_pkts(void *tx_queue, struct rte_mbuf **tx_pkts,
uint16_t nb_pkts);
int ionic_rx_fill(struct ionic_rx_qcq *rxq);
/* ionic_rxtx_sg.c */
uint16_t ionic_recv_pkts_sg(void *rx_queue, struct rte_mbuf **rx_pkts,
uint16_t nb_pkts);
uint16_t ionic_xmit_pkts_sg(void *tx_queue, struct rte_mbuf **tx_pkts,
uint16_t nb_pkts);
int ionic_rx_fill_sg(struct ionic_rx_qcq *rxq);
static inline void
ionic_rxq_flush(struct ionic_queue *q)
{
#ifndef RTE_LIBRTE_IONIC_PMD_EMBEDDED
struct ionic_rxq_desc *desc_base = q->base;
struct ionic_rxq_desc *cmb_desc_base = q->cmb_base;
if (q->cmb_base) {
if (q->head_idx < q->cmb_head_idx) {
/* copy [cmb_head, num_descs) */
rte_memcpy((void *)&cmb_desc_base[q->cmb_head_idx],
(void *)&desc_base[q->cmb_head_idx],
(q->num_descs - q->cmb_head_idx) * sizeof(*desc_base));
/* copy [0, head) */
rte_memcpy((void *)&cmb_desc_base[0],
(void *)&desc_base[0],
q->head_idx * sizeof(*desc_base));
} else {
/* copy [cmb_head, head) */
rte_memcpy((void *)&cmb_desc_base[q->cmb_head_idx],
(void *)&desc_base[q->cmb_head_idx],
(q->head_idx - q->cmb_head_idx) * sizeof(*desc_base));
}
q->cmb_head_idx = q->head_idx;
}
#endif /* RTE_LIBRTE_IONIC_PMD_EMBEDDED */
ionic_q_flush(q);
}
static inline void
ionic_txq_flush(struct ionic_queue *q)
{
#ifndef RTE_LIBRTE_IONIC_PMD_EMBEDDED
struct ionic_txq_desc *desc_base = q->base;
struct ionic_txq_desc *cmb_desc_base = q->cmb_base;
if (q->cmb_base) {
if (q->head_idx < q->cmb_head_idx) {
/* copy [cmb_head, num_descs) */
rte_memcpy((void *)&cmb_desc_base[q->cmb_head_idx],
(void *)&desc_base[q->cmb_head_idx],
(q->num_descs - q->cmb_head_idx) * sizeof(*desc_base));
/* copy [0, head) */
rte_memcpy((void *)&cmb_desc_base[0],
(void *)&desc_base[0],
q->head_idx * sizeof(*desc_base));
} else {
/* copy [cmb_head, head) */
rte_memcpy((void *)&cmb_desc_base[q->cmb_head_idx],
(void *)&desc_base[q->cmb_head_idx],
(q->head_idx - q->cmb_head_idx) * sizeof(*desc_base));
}
q->cmb_head_idx = q->head_idx;
}
#endif /* RTE_LIBRTE_IONIC_PMD_EMBEDDED */
ionic_q_flush(q);
}
#endif /* _IONIC_RXTX_H_ */
|