* descriptors for the same frame has to be set before, to
* avoid race condition.
*/
- wmb();
+ dma_wmb();
p->des3 = cpu_to_le32(tdes3);
}
* descriptors for the same frame has to be set before, to
* avoid race condition.
*/
- wmb();
+ dma_wmb();
p->des3 = cpu_to_le32(tdes3);
}
* descriptor and then barrier is needed to make sure that
* all is coherent before granting the DMA engine.
*/
- smp_wmb();
+ dma_wmb();
if (netif_msg_pktdata(priv)) {
pr_info("%s: curr=%d dirty=%d f=%d, e=%d, f_p=%p, nfrags %d\n",
* descriptor and then barrier is needed to make sure that
* all is coherent before granting the DMA engine.
*/
- smp_wmb();
+ dma_wmb();
}
netdev_sent_queue(dev, skb->len);
netif_dbg(priv, rx_status, priv->dev,
"refill entry #%d\n", entry);
}
- wmb();
+ dma_wmb();
if (unlikely(priv->synopsys_id >= DWMAC_CORE_4_00))
priv->hw->desc->init_rx_desc(p, priv->use_riwt, 0, 0);
else
priv->hw->desc->set_rx_owner(p);
- wmb();
+ dma_wmb();
entry = STMMAC_GET_ENTRY(entry, DMA_RX_SIZE);
}