Commit d800c93b authored by Wenpeng Liang's avatar Wenpeng Liang Committed by Jason Gunthorpe

RDMA/hns: Replace custom macros HNS_ROCE_ALIGN_UP

HNS_ROCE_ALIGN_UP can be replaced by round_up() which is defined in
kernel.h.

Link: https://lore.kernel.org/r/1578313276-29080-7-git-send-email-liweihang@huawei.comSigned-off-by: default avatarWenpeng Liang <liangwenpeng@huawei.com>
Signed-off-by: default avatarWeihang Li <liweihang@huawei.com>
Signed-off-by: default avatarJason Gunthorpe <jgg@mellanox.com>
parent 0c53426c
...@@ -45,8 +45,6 @@ ...@@ -45,8 +45,6 @@
#define HNS_ROCE_MAX_MSG_LEN 0x80000000 #define HNS_ROCE_MAX_MSG_LEN 0x80000000
#define HNS_ROCE_ALIGN_UP(a, b) ((((a) + (b) - 1) / (b)) * (b))
#define HNS_ROCE_IB_MIN_SQ_STRIDE 6 #define HNS_ROCE_IB_MIN_SQ_STRIDE 6
#define HNS_ROCE_BA_SIZE (32 * 4096) #define HNS_ROCE_BA_SIZE (32 * 4096)
......
...@@ -393,40 +393,38 @@ static int hns_roce_set_user_sq_size(struct hns_roce_dev *hr_dev, ...@@ -393,40 +393,38 @@ static int hns_roce_set_user_sq_size(struct hns_roce_dev *hr_dev,
/* Get buf size, SQ and RQ are aligned to page_szie */ /* Get buf size, SQ and RQ are aligned to page_szie */
if (hr_dev->caps.max_sq_sg <= 2) { if (hr_dev->caps.max_sq_sg <= 2) {
hr_qp->buff_size = HNS_ROCE_ALIGN_UP((hr_qp->rq.wqe_cnt << hr_qp->buff_size = round_up((hr_qp->rq.wqe_cnt <<
hr_qp->rq.wqe_shift), PAGE_SIZE) + hr_qp->rq.wqe_shift), PAGE_SIZE) +
HNS_ROCE_ALIGN_UP((hr_qp->sq.wqe_cnt << round_up((hr_qp->sq.wqe_cnt <<
hr_qp->sq.wqe_shift), PAGE_SIZE); hr_qp->sq.wqe_shift), PAGE_SIZE);
hr_qp->sq.offset = 0; hr_qp->sq.offset = 0;
hr_qp->rq.offset = HNS_ROCE_ALIGN_UP((hr_qp->sq.wqe_cnt << hr_qp->rq.offset = round_up((hr_qp->sq.wqe_cnt <<
hr_qp->sq.wqe_shift), PAGE_SIZE); hr_qp->sq.wqe_shift), PAGE_SIZE);
} else { } else {
page_size = 1 << (hr_dev->caps.mtt_buf_pg_sz + PAGE_SHIFT); page_size = 1 << (hr_dev->caps.mtt_buf_pg_sz + PAGE_SHIFT);
hr_qp->sge.sge_cnt = ex_sge_num ? hr_qp->sge.sge_cnt = ex_sge_num ?
max(page_size / (1 << hr_qp->sge.sge_shift), ex_sge_num) : 0; max(page_size / (1 << hr_qp->sge.sge_shift), ex_sge_num) : 0;
hr_qp->buff_size = HNS_ROCE_ALIGN_UP((hr_qp->rq.wqe_cnt << hr_qp->buff_size = round_up((hr_qp->rq.wqe_cnt <<
hr_qp->rq.wqe_shift), page_size) + hr_qp->rq.wqe_shift), page_size) +
HNS_ROCE_ALIGN_UP((hr_qp->sge.sge_cnt << round_up((hr_qp->sge.sge_cnt <<
hr_qp->sge.sge_shift), page_size) + hr_qp->sge.sge_shift), page_size) +
HNS_ROCE_ALIGN_UP((hr_qp->sq.wqe_cnt << round_up((hr_qp->sq.wqe_cnt <<
hr_qp->sq.wqe_shift), page_size); hr_qp->sq.wqe_shift), page_size);
hr_qp->sq.offset = 0; hr_qp->sq.offset = 0;
if (ex_sge_num) { if (ex_sge_num) {
hr_qp->sge.offset = HNS_ROCE_ALIGN_UP( hr_qp->sge.offset = round_up((hr_qp->sq.wqe_cnt <<
(hr_qp->sq.wqe_cnt << hr_qp->sq.wqe_shift),
hr_qp->sq.wqe_shift), page_size);
page_size);
hr_qp->rq.offset = hr_qp->sge.offset + hr_qp->rq.offset = hr_qp->sge.offset +
HNS_ROCE_ALIGN_UP((hr_qp->sge.sge_cnt << round_up((hr_qp->sge.sge_cnt <<
hr_qp->sge.sge_shift), hr_qp->sge.sge_shift),
page_size); page_size);
} else { } else {
hr_qp->rq.offset = HNS_ROCE_ALIGN_UP( hr_qp->rq.offset = round_up((hr_qp->sq.wqe_cnt <<
(hr_qp->sq.wqe_cnt << hr_qp->sq.wqe_shift),
hr_qp->sq.wqe_shift), page_size);
page_size);
} }
} }
...@@ -593,20 +591,18 @@ static int hns_roce_set_kernel_sq_size(struct hns_roce_dev *hr_dev, ...@@ -593,20 +591,18 @@ static int hns_roce_set_kernel_sq_size(struct hns_roce_dev *hr_dev,
/* Get buf size, SQ and RQ are aligned to PAGE_SIZE */ /* Get buf size, SQ and RQ are aligned to PAGE_SIZE */
page_size = 1 << (hr_dev->caps.mtt_buf_pg_sz + PAGE_SHIFT); page_size = 1 << (hr_dev->caps.mtt_buf_pg_sz + PAGE_SHIFT);
hr_qp->sq.offset = 0; hr_qp->sq.offset = 0;
size = HNS_ROCE_ALIGN_UP(hr_qp->sq.wqe_cnt << hr_qp->sq.wqe_shift, size = round_up(hr_qp->sq.wqe_cnt << hr_qp->sq.wqe_shift, page_size);
page_size);
if (hr_dev->caps.max_sq_sg > 2 && hr_qp->sge.sge_cnt) { if (hr_dev->caps.max_sq_sg > 2 && hr_qp->sge.sge_cnt) {
hr_qp->sge.sge_cnt = max(page_size/(1 << hr_qp->sge.sge_shift), hr_qp->sge.sge_cnt = max(page_size/(1 << hr_qp->sge.sge_shift),
(u32)hr_qp->sge.sge_cnt); (u32)hr_qp->sge.sge_cnt);
hr_qp->sge.offset = size; hr_qp->sge.offset = size;
size += HNS_ROCE_ALIGN_UP(hr_qp->sge.sge_cnt << size += round_up(hr_qp->sge.sge_cnt << hr_qp->sge.sge_shift,
hr_qp->sge.sge_shift, page_size); page_size);
} }
hr_qp->rq.offset = size; hr_qp->rq.offset = size;
size += HNS_ROCE_ALIGN_UP((hr_qp->rq.wqe_cnt << hr_qp->rq.wqe_shift), size += round_up((hr_qp->rq.wqe_cnt << hr_qp->rq.wqe_shift), page_size);
page_size);
hr_qp->buff_size = size; hr_qp->buff_size = size;
/* Get wr and sge number which send */ /* Get wr and sge number which send */
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment