{
int deferred;
int is_rdma_aligned = 1;
+ struct iser_regd_buf *regd;
/* if we were reading, copy back to unaligned sglist,
* anyway dma_unmap and free the copy
}
if (iser_ctask->dir[ISER_DIR_IN]) {
- deferred = iser_regd_buff_release
- (&iser_ctask->rdma_regd[ISER_DIR_IN]);
+ regd = &iser_ctask->rdma_regd[ISER_DIR_IN];
+ deferred = iser_regd_buff_release(regd);
if (deferred) {
- iser_err("References remain for BUF-IN rdma reg\n");
- BUG();
+ iser_err("%d references remain for BUF-IN rdma reg\n",
+ atomic_read(®d->ref_count));
}
}
if (iser_ctask->dir[ISER_DIR_OUT]) {
- deferred = iser_regd_buff_release
- (&iser_ctask->rdma_regd[ISER_DIR_OUT]);
+ regd = &iser_ctask->rdma_regd[ISER_DIR_OUT];
+ deferred = iser_regd_buff_release(regd);
if (deferred) {
- iser_err("References remain for BUF-OUT rdma reg\n");
- BUG();
+ iser_err("%d references remain for BUF-OUT rdma reg\n",
+ atomic_read(®d->ref_count));
}
}