sched: fix sched DL RBG allocation for subband CQI

master
Francisco Paisana 3 years ago
parent 1f118aa238
commit ce884ee4c6

@ -168,7 +168,7 @@ public:
/// Get CQI of given subband index /// Get CQI of given subband index
int get_subband_cqi(uint32_t subband_index) const int get_subband_cqi(uint32_t subband_index) const
{ {
if (subband_cqi_enabled()) { if (not subband_cqi_enabled()) {
return get_wb_cqi_info(); return get_wb_cqi_info();
} }
return bp_list[get_bp_index(subband_index)].last_feedback_tti.is_valid() ? subband_cqi[subband_index] : wb_cqi_avg; return bp_list[get_bp_index(subband_index)].last_feedback_tti.is_valid() ? subband_cqi[subband_index] : wb_cqi_avg;
@ -226,14 +226,11 @@ private:
srsran::bounded_vector<float, max_nof_subbands> subband_cqi; srsran::bounded_vector<float, max_nof_subbands> subband_cqi;
}; };
/// Get {RBG index, CQI} tuple which correspond to the set RBG with the lowest CQI /// Get {RBG indexs, CQI} tuple which correspond to the set RBG with the lowest CQI
std::tuple<uint32_t, int> find_min_cqi_rbg(const rbgmask_t& mask, const sched_dl_cqi& dl_cqi); rbgmask_t find_min_cqi_rbgs(const rbgmask_t& mask, const sched_dl_cqi& dl_cqi, int& cqi);
/// Returns the same RBG mask, but with the RBGs of the subband with the lowest CQI reset
rbgmask_t remove_min_cqi_subband(const rbgmask_t& rbgmask, const sched_dl_cqi& dl_cqi);
/// Returns the same RBG mask, but with the RBG with the lowest CQI reset /// Returns the same RBG mask, but with the RBGs with the lowest CQI reset
rbgmask_t remove_min_cqi_rbg(const rbgmask_t& rbgmask, const sched_dl_cqi& dl_cqi); rbgmask_t remove_min_cqi_rbgs(const rbgmask_t& rbgmask, const sched_dl_cqi& dl_cqi);
} // namespace srsenb } // namespace srsenb

@ -51,55 +51,47 @@ rbgmask_t sched_dl_cqi::get_optim_rbgmask(const rbgmask_t& dl_mask, uint32_t req
return emptymask; return emptymask;
} }
std::tuple<uint32_t, int> find_min_cqi_rbg(const rbgmask_t& mask, const sched_dl_cqi& dl_cqi) rbgmask_t find_min_cqi_rbgs(const rbgmask_t& mask, const sched_dl_cqi& dl_cqi, int& min_cqi)
{ {
if (mask.none()) { if (mask.none()) {
return std::make_tuple(mask.size(), -1); min_cqi = -1;
return mask;
} }
int rbg = mask.find_lowest(0, mask.size());
if (not dl_cqi.subband_cqi_enabled()) { if (not dl_cqi.subband_cqi_enabled()) {
return std::make_tuple(rbg, dl_cqi.get_wb_cqi_info()); min_cqi = dl_cqi.get_wb_cqi_info();
return mask;
} }
int min_cqi = std::numeric_limits<int>::max(); rbgmask_t min_mask(mask.size());
uint32_t min_rbg = mask.size(); int rbg = mask.find_lowest(0, mask.size());
min_cqi = std::numeric_limits<int>::max();
for (; rbg != -1; rbg = mask.find_lowest(rbg, mask.size())) { for (; rbg != -1; rbg = mask.find_lowest(rbg, mask.size())) {
uint32_t sb = dl_cqi.rbg_to_sb_index(rbg); uint32_t sb = dl_cqi.rbg_to_sb_index(rbg);
int cqi = dl_cqi.get_subband_cqi(sb); int cqi = dl_cqi.get_subband_cqi(sb);
if (cqi < min_cqi) { if (cqi < min_cqi) {
min_cqi = cqi; min_cqi = cqi;
min_rbg = rbg; min_mask.reset();
min_mask.set(rbg);
} else if (cqi == min_cqi) {
min_mask.set(rbg);
} }
rbg = (int)srsran::ceil_div((sb + 1U) * mask.size(), dl_cqi.nof_subbands()); // skip to next subband index rbg++;
} }
return min_cqi != std::numeric_limits<int>::max() ? std::make_tuple(min_rbg, min_cqi) : std::make_tuple(0u, -1); min_cqi = min_cqi == std::numeric_limits<int>::max() ? -1 : min_cqi;
}
rbgmask_t remove_min_cqi_subband(const rbgmask_t& rbgmask, const sched_dl_cqi& dl_cqi)
{
std::tuple<uint32_t, int> tup = find_min_cqi_rbg(rbgmask, dl_cqi);
if (std::get<1>(tup) < 0) {
return rbgmask_t(rbgmask.size());
}
uint32_t sb = dl_cqi.rbg_to_sb_index(std::get<0>(tup));
uint32_t rbg_begin = sb * rbgmask.size() / dl_cqi.nof_subbands();
uint32_t rbg_end = srsran::ceil_div((sb + 1) * rbgmask.size(), dl_cqi.nof_subbands());
rbgmask_t ret(rbgmask); return min_mask;
ret.fill(rbg_begin, rbg_end, false);
return ret;
} }
rbgmask_t remove_min_cqi_rbg(const rbgmask_t& rbgmask, const sched_dl_cqi& dl_cqi) rbgmask_t remove_min_cqi_rbgs(const rbgmask_t& rbgmask, const sched_dl_cqi& dl_cqi)
{ {
std::tuple<uint32_t, int> tup = find_min_cqi_rbg(rbgmask, dl_cqi); int min_cqi;
if (std::get<1>(tup) < 0) { rbgmask_t minmask = find_min_cqi_rbgs(rbgmask, dl_cqi, min_cqi);
return rbgmask_t(rbgmask.size()); if (min_cqi < 0) {
return minmask;
} }
rbgmask_t ret(rbgmask); minmask = ~minmask & rbgmask;
ret.set(std::get<0>(tup), false); return minmask;
return ret;
} }
} // namespace srsenb } // namespace srsenb

@ -277,8 +277,9 @@ int sched_ue_cell::get_ul_cqi() const
int sched_ue_cell::get_dl_cqi(const rbgmask_t& rbgs) const int sched_ue_cell::get_dl_cqi(const rbgmask_t& rbgs) const
{ {
float dl_cqi = std::get<1>(find_min_cqi_rbg(rbgs, dl_cqi_ctxt)); int min_cqi;
return std::max(0, (int)std::min(dl_cqi + dl_cqi_coeff, 15.0f)); find_min_cqi_rbgs(rbgs, dl_cqi_ctxt, min_cqi);
return std::max(0, (int)std::min(static_cast<float>(min_cqi) + dl_cqi_coeff, 15.0f));
} }
int sched_ue_cell::get_dl_cqi() const int sched_ue_cell::get_dl_cqi() const
@ -558,12 +559,12 @@ bool find_optimal_rbgmask(const sched_ue_cell& ue_cell,
// We start with largest RBG allocation and continue removing RBGs. However, there is no guarantee this is // We start with largest RBG allocation and continue removing RBGs. However, there is no guarantee this is
// going to be the optimal solution // going to be the optimal solution
// Subtract whole CQI subbands until objective is not met // Subtract RBGs with lowest CQI until objective is not met
// TODO: can be optimized // TODO: can be optimized
rbgmask_t smaller_mask; rbgmask_t smaller_mask;
tbs_info tb2; tbs_info tb2;
do { do {
smaller_mask = remove_min_cqi_subband(newtxmask, ue_cell.dl_cqi()); smaller_mask = remove_min_cqi_rbgs(newtxmask, ue_cell.dl_cqi());
tb2 = compute_mcs_and_tbs_lower_bound(ue_cell, tti_tx_dl, smaller_mask, dci_format); tb2 = compute_mcs_and_tbs_lower_bound(ue_cell, tti_tx_dl, smaller_mask, dci_format);
if (tb2.tbs_bytes >= (int)req_bytes.stop() or tb.tbs_bytes <= tb2.tbs_bytes) { if (tb2.tbs_bytes >= (int)req_bytes.stop() or tb.tbs_bytes <= tb2.tbs_bytes) {
tb = tb2; tb = tb2;
@ -573,14 +574,6 @@ bool find_optimal_rbgmask(const sched_ue_cell& ue_cell,
if (tb.tbs_bytes <= (int)req_bytes.stop()) { if (tb.tbs_bytes <= (int)req_bytes.stop()) {
return true; return true;
} }
do {
smaller_mask = remove_min_cqi_rbg(newtxmask, ue_cell.dl_cqi());
tb2 = compute_mcs_and_tbs_lower_bound(ue_cell, tti_tx_dl, smaller_mask, dci_format);
if (tb2.tbs_bytes >= (int)req_bytes.stop() or tb.tbs_bytes <= tb2.tbs_bytes) {
tb = tb2;
newtxmask = smaller_mask;
}
} while (tb2.tbs_bytes > (int)req_bytes.stop());
return true; return true;
} }

@ -59,6 +59,73 @@ void test_neg_phr_scenario()
TESTASSERT(tbinfo.tbs_bytes >= 10); TESTASSERT(tbinfo.tbs_bytes >= 10);
} }
void test_interferer_subband_cqi_scenario()
{
uint32_t Nprb = 50;
sched_interface::cell_cfg_t cell_cfg = generate_default_cell_cfg(Nprb);
sched_interface::sched_args_t sched_cfg = {};
sched_cell_params_t cell_params;
cell_params.set_cfg(0, cell_cfg, sched_cfg);
sched_interface::ue_cfg_t ue_cfg = generate_default_ue_cfg();
sched_ue_cell ue_cc(0x46, cell_params, tti_point(0));
ue_cfg.supported_cc_list[0].dl_cfg.cqi_report.subband_wideband_ratio = 4;
ue_cfg.supported_cc_list[0].dl_cfg.cqi_report.periodic_configured = true;
ue_cc.set_ue_cfg(ue_cfg);
TESTASSERT(ue_cc.dl_cqi().subband_cqi_enabled());
TESTASSERT(ue_cc.dl_cqi().nof_bandwidth_parts() == 3);
TESTASSERT(ue_cc.dl_cqi().nof_subbands() == 9);
ue_cc.set_dl_wb_cqi(tti_point{0}, 10);
ue_cc.set_dl_sb_cqi(tti_point{40}, 1, 15);
ue_cc.set_dl_sb_cqi(tti_point{80}, 3, 15);
ue_cc.set_dl_sb_cqi(tti_point{160}, 8, 0); // interferer in last BP
rbgmask_t test_mask(cell_params.nof_rbgs);
test_mask.fill(0, 12);
rbgmask_t rbgs(cell_params.nof_rbgs);
tbs_info tb;
rbgmask_t grant_mask(cell_params.nof_rbgs);
TESTASSERT(find_optimal_rbgmask(ue_cc,
tti_point{160 + TX_ENB_DELAY},
rbgs,
SRSRAN_DCI_FORMAT1,
srsran::interval<uint32_t>{0, 10000},
tb,
grant_mask));
TESTASSERT(grant_mask == test_mask);
ue_cc.set_dl_wb_cqi(tti_point{0}, 15);
ue_cc.set_dl_sb_cqi(tti_point{40}, 1, 15);
ue_cc.set_dl_sb_cqi(tti_point{80}, 3, 15);
ue_cc.set_dl_sb_cqi(tti_point{160}, 8, 10); // interferer in last BP
TESTASSERT(find_optimal_rbgmask(ue_cc,
tti_point{160 + TX_ENB_DELAY},
rbgs,
SRSRAN_DCI_FORMAT1,
srsran::interval<uint32_t>{0, 10000},
tb,
grant_mask));
TESTASSERT(grant_mask == test_mask);
ue_cc.set_dl_wb_cqi(tti_point{0}, 15);
ue_cc.set_dl_sb_cqi(tti_point{40}, 1, 15);
ue_cc.set_dl_sb_cqi(tti_point{80}, 3, 15);
ue_cc.set_dl_sb_cqi(tti_point{160}, 8, 14); // interferer in last BP
TESTASSERT(find_optimal_rbgmask(ue_cc,
tti_point{160 + TX_ENB_DELAY},
rbgs,
SRSRAN_DCI_FORMAT1,
srsran::interval<uint32_t>{0, 10000},
tb,
grant_mask));
test_mask.reset();
test_mask.fill(0, cell_params.nof_rbgs);
TESTASSERT(grant_mask == test_mask);
}
int main() int main()
{ {
srsenb::set_randseed(seed); srsenb::set_randseed(seed);
@ -71,6 +138,7 @@ int main()
srslog::init(); srslog::init();
test_neg_phr_scenario(); test_neg_phr_scenario();
test_interferer_subband_cqi_scenario();
srslog::flush(); srslog::flush();

Loading…
Cancel
Save