Index: sys/net/iflib.c =================================================================== --- sys/net/iflib.c (revision 358099) +++ sys/net/iflib.c (working copy) @@ -1987,7 +1987,12 @@ _iflib_fl_refill(if_ctx_t ctx, iflib_fl_t fl, int * We allocate an uninitialized mbuf + cluster, mbuf is * initialized after rx. * - * If the cluster is still set then we know a minimum sized packet was received + * If the cluster is still set then we know a minimum sized + * packet was received + * + * NB: frag_idx is the latest used index, so start the search + * from next index as to never re-use it even if it's got freed + * since the last refill. */ bit_ffc_at(fl->ifl_rx_bitmap, frag_idx, fl->ifl_size, &frag_idx); @@ -2039,8 +2044,9 @@ _iflib_fl_refill(if_ctx_t ctx, iflib_fl_t fl, int fl->ifl_bus_addrs[i] = bus_addr; fl->ifl_vm_addrs[i] = cl; credits++; + MPASS(credits <= fl->ifl_size); i++; - MPASS(credits <= fl->ifl_size); + frag_idx++; if (++idx == fl->ifl_size) { fl->ifl_gen = 1; idx = 0; @@ -2515,6 +2521,19 @@ prefetch_pkts(iflib_fl_t fl, int cidx) prefetch(fl->ifl_sds.ifsd_cl[(cidx + 4) & (nrxd-1)]); } +static inline void +advance_fl_cidx(iflib_fl_t fl) +{ + int cidx; + + cidx = fl->ifl_cidx; + fl->ifl_cidx = (fl->ifl_cidx + 1) & (fl->ifl_size - 1); + if (__predict_false(fl->ifl_cidx == 0)) + fl->ifl_gen = 0; + bit_clear(fl->ifl_rx_bitmap, cidx); + fl->ifl_credits--; +} + static struct mbuf * rxd_frag_to_sd(iflib_rxq_t rxq, if_rxd_frag_t irf, bool unload, if_rxsd_t sd, int *pf_rv, if_rxd_info_t ri) @@ -2533,10 +2552,22 @@ rxd_frag_to_sd(iflib_rxq_t rxq, if_rxd_frag_t irf, sd->ifsd_cidx = cidx; m = fl->ifl_sds.ifsd_m[cidx]; sd->ifsd_cl = &fl->ifl_sds.ifsd_cl[cidx]; - fl->ifl_credits--; #if MEMORY_LOGGING fl->ifl_m_dequeued++; #endif + + /* + * Some drivers, e.g., vmxnet3, can skip some descriptors + * with no data before returning one or more valid fragments. + * So, we need to skip / consume matching entries in the + * free list as well. + * + * This won't probably play well with bxe if it indeed does SGE + * from non-contiguous elements. + */ + while (__predict_false(fl->ifl_cidx != cidx)) + advance_fl_cidx(fl); + if (rxq->ifr_ctx->ifc_flags & IFC_PREFETCH) prefetch_pkts(fl, cidx); next = (cidx + CACHE_PTR_INCREMENT) & (fl->ifl_size-1); @@ -2544,8 +2575,6 @@ rxd_frag_to_sd(iflib_rxq_t rxq, if_rxd_frag_t irf, map = fl->ifl_sds.ifsd_map[cidx]; next = (cidx + CACHE_LINE_SIZE) & (fl->ifl_size-1); - /* not valid assert if bxe really does SGE from non-contiguous elements */ - MPASS(fl->ifl_cidx == cidx); bus_dmamap_sync(fl->ifl_buf_tag, map, BUS_DMASYNC_POSTREAD); if (rxq->pfil != NULL && PFIL_HOOKED_IN(rxq->pfil) && pf_rv != NULL) { @@ -2587,10 +2616,7 @@ rxd_frag_to_sd(iflib_rxq_t rxq, if_rxd_frag_t irf, if (unload) bus_dmamap_unload(fl->ifl_buf_tag, map); - fl->ifl_cidx = (fl->ifl_cidx + 1) & (fl->ifl_size-1); - if (__predict_false(fl->ifl_cidx == 0)) - fl->ifl_gen = 0; - bit_clear(fl->ifl_rx_bitmap, cidx); + advance_fl_cidx(fl); return (m); }