use of min DL cqi across all PRBS to compute required DL PRBs

master
Francisco 4 years ago committed by Francisco Paisana
parent 8564996eaf
commit f8b6eae2bf

@ -153,7 +153,7 @@ private:
tbs_info compute_mcs_and_tbs(uint32_t enb_cc_idx,
tti_point tti_tx_dl,
uint32_t nof_alloc_prbs,
const rbgmask_t& rbgs,
uint32_t cfi,
const srsran_dci_dl_t& dci);

@ -41,12 +41,14 @@ public:
srsran_assert(K <= 4, "K=%d outside of {0, 4}", K);
}
/// Set K value from upper layers. See TS 36.331, CQI-ReportPeriodic
void set_K(uint32_t K_)
{
srsran_assert(K <= 4, "K=%d outside of {0, 4}", K);
K = K_;
}
/// Update wideband CQI
void cqi_wb_info(tti_point tti, uint32_t cqi_value)
{
if (cqi_value > 0) {
@ -57,6 +59,7 @@ public:
wb_cqi_avg = static_cast<float>(cqi_value);
}
/// Update subband CQI for subband "sb_index"
void cqi_sb_info(tti_point tti, uint32_t sb_index, uint32_t cqi_value)
{
if (cqi_value > 0) {
@ -95,7 +98,7 @@ public:
return static_cast<int>(wb_cqi_avg);
}
uint32_t sb_idx = rbg_to_sb_index(rbg);
return bp_list[get_bp_index(sb_idx)].last_feedback_tti.is_valid() ? subband_cqi[sb_idx] : wb_cqi_avg;
return get_subband_cqi_(sb_idx);
}
/// Get average CQI in given RBG interval
@ -107,7 +110,7 @@ public:
float cqi = 0;
uint32_t sbstart = rbg_to_sb_index(interv.start()), sbend = rbg_to_sb_index(interv.stop() - 1) + 1;
for (uint32_t sb = sbstart; sb < sbend; ++sb) {
cqi += bp_list[get_bp_index(sb)].last_feedback_tti.is_valid() ? subband_cqi[sb] : wb_cqi_avg;
cqi += get_subband_cqi_(sb);
}
return static_cast<int>(cqi / (sbend - sbstart));
}
@ -128,20 +131,20 @@ public:
uint32_t count = 0;
for (int rbg = mask.find_lowest(0, mask.size()); rbg != -1; rbg = mask.find_lowest(rbg, mask.size())) {
uint32_t sb = rbg_to_sb_index(rbg);
cqi += bp_list[get_bp_index(sb)].last_feedback_tti.is_valid() ? subband_cqi[sb] : wb_cqi_avg;
cqi += get_subband_cqi_(sb);
count++;
rbg = static_cast<int>(((sb + 1U) * cell_nof_rbg + N() - 1U) / N()); // skip to next subband index
}
return static_cast<int>(cqi / count);
}
/// Get CQI-optimal RBG mask
rbgmask_t get_optim_rbgmask(uint32_t req_rbgs) const
/// Get CQI-optimal RBG mask with at most "req_rbgs" RBGs
rbgmask_t get_optim_rbgmask(uint32_t req_rbgs, bool max_min_flag = true) const
{
rbgmask_t rbgmask(cell_nof_rbg);
return get_optim_rbgmask(rbgmask, req_rbgs);
return get_optim_rbgmask(rbgmask, req_rbgs, max_min_flag);
}
rbgmask_t get_optim_rbgmask(const rbgmask_t& dl_mask, uint32_t req_rbgs) const;
rbgmask_t get_optim_rbgmask(const rbgmask_t& dl_mask, uint32_t req_rbgs, bool max_min_flag = true) const;
/// TS 36.321, 7.2.2 - Parameter N
uint32_t nof_subbands() const { return subband_cqi.size(); }
@ -172,7 +175,7 @@ private:
uint32_t get_bp_index(uint32_t sb_index) const { return sb_index * J() / N(); }
uint32_t get_sb_index(uint32_t prb_index) const { return prb_index * N() / cell_nof_prb; }
uint32_t prb_to_sb_index(uint32_t prb_index) const { return prb_index * N() / cell_nof_prb; }
uint32_t rbg_to_sb_index(uint32_t rbg_index) const { return rbg_index * N() / cell_nof_rbg; }
@ -181,6 +184,11 @@ private:
return srsran::interval<uint32_t>{bp_idx * N() / J(), (bp_idx + 1) * N() / J()};
}
float get_subband_cqi_(uint32_t sb_idx) const
{
return bp_list[get_bp_index(sb_idx)].last_feedback_tti.is_valid() ? subband_cqi[sb_idx] : wb_cqi_avg;
}
uint32_t cell_nof_prb;
uint32_t cell_nof_rbg;
uint32_t K; ///< set in RRC

@ -90,7 +90,7 @@ private:
/// Compute DL grant optimal TBS and MCS given UE cell context and DL grant parameters
tbs_info cqi_to_tbs_dl(const sched_ue_cell& cell,
uint32_t nof_prb,
const rbgmask_t& rbgs,
uint32_t nof_re,
srsran_dci_format_t dci_format,
int req_bytes = -1);

@ -322,8 +322,7 @@ tbs_info sched_ue::allocate_new_dl_mac_pdu(sched::dl_sched_data_t* data,
uint32_t tb)
{
srsran_dci_dl_t* dci = &data->dci;
uint32_t nof_prb = count_prb_per_tb(user_mask);
tbs_info tb_info = compute_mcs_and_tbs(enb_cc_idx, tti_tx_dl, nof_prb, cfi, *dci);
tbs_info tb_info = compute_mcs_and_tbs(enb_cc_idx, tti_tx_dl, user_mask, cfi, *dci);
// Allocate MAC PDU (subheaders, CEs, and SDUS)
int rem_tbs = tb_info.tbs_bytes;
@ -470,25 +469,25 @@ int sched_ue::generate_format1(uint32_t pid,
* Based on the amount of tx data, allocated PRBs, DCI params, etc. compute a valid MCS and resulting TBS
* @param enb_cc_idx user carrier index
* @param tti_tx_dl tti when the tx will occur
* @param nof_alloc_prbs number of PRBs that were allocated
* @param rbgs RBG mask
* @param cfi Number of control symbols in Subframe
* @param dci contains the RBG mask, and alloc type
* @return pair with MCS and TBS (in bytes)
*/
tbs_info sched_ue::compute_mcs_and_tbs(uint32_t enb_cc_idx,
tti_point tti_tx_dl,
uint32_t nof_alloc_prbs,
const rbgmask_t& rbg_mask,
uint32_t cfi,
const srsran_dci_dl_t& dci)
{
assert(cells[enb_cc_idx].configured());
srsran_assert(cells[enb_cc_idx].configured(), "computation of MCS/TBS called for non-configured CC");
srsran::interval<uint32_t> req_bytes = get_requested_dl_bytes(enb_cc_idx);
// Calculate exact number of RE for this PRB allocation
uint32_t nof_re = cells[enb_cc_idx].cell_cfg->get_dl_nof_res(tti_tx_dl, dci, cfi);
// Compute MCS+TBS
tbs_info tb = cqi_to_tbs_dl(cells[enb_cc_idx], nof_alloc_prbs, nof_re, dci.format, req_bytes.stop());
tbs_info tb = cqi_to_tbs_dl(cells[enb_cc_idx], rbg_mask, nof_re, dci.format, req_bytes.stop());
if (tb.tbs_bytes > 0 and tb.tbs_bytes < (int)req_bytes.start()) {
logger.info("SCHED: Could not get PRB allocation that avoids MAC CE or RLC SRB0 PDU segmentation");

@ -15,7 +15,7 @@
using namespace srsenb;
rbgmask_t sched_dl_cqi::get_optim_rbgmask(const rbgmask_t& dl_mask, uint32_t req_rbgs) const
rbgmask_t sched_dl_cqi::get_optim_rbgmask(const rbgmask_t& dl_mask, uint32_t req_rbgs, bool max_flag) const
{
req_rbgs = std::min(req_rbgs, cell_nof_rbg);
if (not subband_cqi_enabled()) {
@ -38,8 +38,14 @@ rbgmask_t sched_dl_cqi::get_optim_rbgmask(const rbgmask_t& dl_mask, uint32_t req
std::stable_sort(sorted_cqi_pos.begin(), sorted_cqi_pos.end(), [&sorted_cqis](uint32_t lhs, uint32_t rhs) {
return sorted_cqis[lhs] > sorted_cqis[rhs];
});
for (size_t i = req_rbgs; i < sorted_cqi_pos.size(); ++i) {
emptymask.set(sorted_cqi_pos[i], false);
if (max_flag) {
for (size_t i = req_rbgs; i < sorted_cqi_pos.size(); ++i) {
emptymask.set(sorted_cqi_pos[i], false);
}
} else {
for (size_t i = 0; i < sorted_cqi_pos.size() - req_rbgs; ++i) {
emptymask.set(sorted_cqi_pos[i], false);
}
}
return emptymask;

@ -231,31 +231,32 @@ std::tuple<int, YType, int, YType> false_position_method(int x1, int x2, YType y
}
tbs_info cqi_to_tbs_dl(const sched_ue_cell& cell,
uint32_t nof_prb,
const rbgmask_t& rbgs,
uint32_t nof_re,
srsran_dci_format_t dci_format,
int req_bytes)
{
bool use_tbs_index_alt = cell.get_ue_cfg()->use_tbs_index_alt and dci_format != SRSRAN_DCI_FORMAT1A;
bool use_tbs_index_alt = cell.get_ue_cfg()->use_tbs_index_alt and dci_format != SRSRAN_DCI_FORMAT1A;
uint32_t nof_prbs = count_prb_per_tb(rbgs);
tbs_info ret;
if (cell.fixed_mcs_dl < 0 or not cell.dl_cqi().is_cqi_info_received()) {
// Dynamic MCS configured or first Tx
uint32_t dl_cqi_avg = cell.dl_cqi().get_grant_avg_cqi(prb_interval(0, nof_prb));
uint32_t dl_cqi_avg = cell.dl_cqi().get_grant_avg_cqi(rbgs);
ret = compute_min_mcs_and_tbs_from_required_bytes(
nof_prb, nof_re, dl_cqi_avg, cell.max_mcs_dl, req_bytes, false, false, use_tbs_index_alt);
nof_prbs, nof_re, dl_cqi_avg, cell.max_mcs_dl, req_bytes, false, false, use_tbs_index_alt);
// If coderate > SRSRAN_MIN(max_coderate, 0.932 * Qm) we should set TBS=0. We don't because it's not correctly
// handled by the scheduler, but we might be scheduling undecodable codewords at very low SNR
if (ret.tbs_bytes < 0) {
ret.mcs = 0;
ret.tbs_bytes = get_tbs_bytes((uint32_t)ret.mcs, nof_prb, use_tbs_index_alt, false);
ret.tbs_bytes = get_tbs_bytes((uint32_t)ret.mcs, nof_prbs, use_tbs_index_alt, false);
}
} else {
// Fixed MCS configured
ret.mcs = cell.fixed_mcs_dl;
ret.tbs_bytes = get_tbs_bytes((uint32_t)cell.fixed_mcs_dl, nof_prb, use_tbs_index_alt, false);
ret.tbs_bytes = get_tbs_bytes((uint32_t)cell.fixed_mcs_dl, nof_prbs, use_tbs_index_alt, false);
}
return ret;
}
@ -293,8 +294,9 @@ int get_required_prb_dl(const sched_ue_cell& cell,
uint32_t req_bytes)
{
auto compute_tbs_approx = [tti_tx_dl, &cell, dci_format](uint32_t nof_prb) {
uint32_t nof_re = cell.cell_cfg->get_dl_lb_nof_re(tti_tx_dl, nof_prb);
tbs_info tb = cqi_to_tbs_dl(cell, nof_prb, nof_re, dci_format, -1);
uint32_t nof_re = cell.cell_cfg->get_dl_lb_nof_re(tti_tx_dl, nof_prb);
rbgmask_t min_cqi_rbgs = cell.dl_cqi().get_optim_rbgmask(nof_prb, false);
tbs_info tb = cqi_to_tbs_dl(cell, min_cqi_rbgs, nof_re, dci_format, -1);
return tb.tbs_bytes;
};

Loading…
Cancel
Save