hwbug = 1;
if (tg3_flag(tp, 4K_FIFO_LIMIT)) {
+ u32 prvidx = *entry;
u32 tmp_flag = flags & ~TXD_FLAG_END;
- while (len > TG3_TX_BD_DMA_MAX) {
+ while (len > TG3_TX_BD_DMA_MAX && *budget) {
u32 frag_len = TG3_TX_BD_DMA_MAX;
len -= TG3_TX_BD_DMA_MAX;
- if (len) {
- tnapi->tx_buffers[*entry].fragmented = true;
- /* Avoid the 8byte DMA problem */
- if (len <= 8) {
- len += TG3_TX_BD_DMA_MAX / 2;
- frag_len = TG3_TX_BD_DMA_MAX / 2;
- }
- } else
- tmp_flag = flags;
-
- if (*budget) {
- tg3_tx_set_bd(&tnapi->tx_ring[*entry], map,
- frag_len, tmp_flag, mss, vlan);
- (*budget)--;
- *entry = NEXT_TX(*entry);
- } else {
- hwbug = 1;
- break;
+ /* Avoid the 8byte DMA problem */
+ if (len <= 8) {
+ len += TG3_TX_BD_DMA_MAX / 2;
+ frag_len = TG3_TX_BD_DMA_MAX / 2;
}
+ tnapi->tx_buffers[*entry].fragmented = true;
+
+ tg3_tx_set_bd(&tnapi->tx_ring[*entry], map,
+ frag_len, tmp_flag, mss, vlan);
+ *budget -= 1;
+ prvidx = *entry;
+ *entry = NEXT_TX(*entry);
+
map += frag_len;
}
if (*budget) {
tg3_tx_set_bd(&tnapi->tx_ring[*entry], map,
len, flags, mss, vlan);
- (*budget)--;
+ *budget -= 1;
*entry = NEXT_TX(*entry);
} else {
hwbug = 1;
+ tnapi->tx_buffers[prvidx].fragmented = false;
}
}
} else {
dev_kfree_skb(new_skb);
ret = -1;
} else {
+ u32 save_entry = *entry;
+
base_flags |= TXD_FLAG_END;
tnapi->tx_buffers[*entry].skb = new_skb;
if (tg3_tx_frag_set(tnapi, entry, budget, new_addr,
new_skb->len, base_flags,
mss, vlan)) {
- tg3_tx_skb_unmap(tnapi, *entry, 0);
+ tg3_tx_skb_unmap(tnapi, save_entry, 0);
dev_kfree_skb(new_skb);
ret = -1;
}
if (dma_mapping_error(&tp->pdev->dev, mapping))
goto dma_error;
- if (tg3_tx_frag_set(tnapi, &entry, &budget, mapping,
+ if (!budget ||
+ tg3_tx_frag_set(tnapi, &entry, &budget, mapping,
len, base_flags |
((i == last) ? TXD_FLAG_END : 0),
- tmp_mss, vlan))
+ tmp_mss, vlan)) {
would_hit_hwbug = 1;
+ break;
+ }
}
}