I can work around dropped counters thing (using instead simple flow_stats rx_pkt/tx_pkt counters). With this convergence at link down and link up, now with the bond fix, is very similar to what I get with the native DPDK testpmd application (meaning on pair with DPDK reference application which is great).
Below is some printout of trex counters when making a link break (highlighting the interesting ones):
Per flow latency (microsec) statistics:
{
"latency": {
"10010": {
"err_cntrs": {
"dup": 0,
"seq_too_high": 1,
"dropped": 6,
"seq_too_low": 0,
"out_of_order": 0
},
"latency": {
"last_max": 0,
"total_max": 924,
"average": 31.0,
"histogram": {
"900": 1,
"40": 12602,
"10": 7,
"50": 10,
"20": 104032,
"60": 3192,
"30": 55
},
"jitter": 7,
"total_min": 13
}
},
"global": {
"bad_hdr": 0,
"old_flow": 0
},
"1010": {
"err_cntrs": {
"dup": 0,
"seq_too_high": 1,
"dropped": 7914,
"seq_too_low": 0,
"out_of_order": 0
},
"latency": {
"last_max": 54,
"total_max": 989,
"average": 26.0,
"histogram": {
"900": 1,
"70": 5,
"40": 63,
"10": 290,
"50": 64377,
"20": 7923,
"60": 8,
"30": 47196
},
"jitter": 27,
"total_min": 13
}
}
},
"flow_stats": {
"10010": {
"rx_bps": {
"0": 0.0,
"total": 0.0
},
"rx_pps": {
"0": 0.0,
"total": 0.0
},
"rx_pkts": {
"0": 119899,
"total": 119899
},
"rx_bytes": {
"0": 8153132,
"total": 8153132
},
"tx_pkts": {
"0": 119905,
"total": 119905
},
"tx_pps": {
"0": 0.0,
"total": 0.0
},
"tx_bps": {
"0": 0.0,
"total": 0.0
},
"tx_bytes": {
"0": 8153540,
"total": 8153540
},
"rx_bps_l1": {
"0": 0.0,
"total": 0.0
},
"tx_bps_l1": {
"0": 0.0,
"total": 0.0
}
},
"global": {
"rx_err": {
"0": 0
},
"tx_err": {
"0": 0
}
},
"1010": {
"rx_bps": {
"0": 0.0,
"total": 0.0
},
"rx_pps": {
"0": 0.0,
"total": 0.0
},
"rx_pkts": {
"0": 119863,
"total": 119863
},
"rx_bytes": {
"0": 8150684,
"total": 8150684
},
"tx_pkts": {
"0": 119905, #### tx-rx = 38 but drop_counters=7914 (the difference is sometimes some x*100k packets – send rate is 1000 PPS)
"total": 119905
},
"tx_pps": {
"0": 0.0,
"total": 0.0
},
"tx_bps": {
"0": 0.0,
"total": 0.0
},
"tx_bytes": {
"0": 8153540,
"total": 8153540
},
"rx_bps_l1": {
"0": 0.0,
"total": 0.0
},
"tx_bps_l1": {
"0": 0.0,
"total": 0.0
}
},
},
"ver_id": {
"10011": 17,
"10010": 19,
"1010": 18,
"1011": 16
}
}
##########
#########
BR//Håkan
--
You received this message because you are subscribed to the Google Groups "TRex Traffic Generator" group.
To unsubscribe from this group and stop receiving emails from it, send an email to trex-tgn+u...@googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/trex-tgn/194a19ff-6168-4bcd-9c22-fb3ddf390675n%40googlegroups.com.