Lines Matching refs:sparx5

24 void sparx5_new_base_time(struct sparx5 *sparx5, const u32 cycle_time,  in sparx5_new_base_time()  argument
35 sparx5_ptp_gettime64(&sparx5->phc[SPARX5_PHC_PORT].info, &ts); in sparx5_new_base_time()
79 static u32 sparx5_lg_get_leak_time(struct sparx5 *sparx5, u32 layer, u32 group) in sparx5_lg_get_leak_time() argument
83 value = spx5_rd(sparx5, HSCH_HSCH_TIMER_CFG(layer, group)); in sparx5_lg_get_leak_time()
87 static void sparx5_lg_set_leak_time(struct sparx5 *sparx5, u32 layer, u32 group, in sparx5_lg_set_leak_time() argument
90 spx5_wr(HSCH_HSCH_TIMER_CFG_LEAK_TIME_SET(leak_time), sparx5, in sparx5_lg_set_leak_time()
94 static u32 sparx5_lg_get_first(struct sparx5 *sparx5, u32 layer, u32 group) in sparx5_lg_get_first() argument
98 value = spx5_rd(sparx5, HSCH_HSCH_LEAK_CFG(layer, group)); in sparx5_lg_get_first()
102 static u32 sparx5_lg_get_next(struct sparx5 *sparx5, u32 layer, u32 group, in sparx5_lg_get_next() argument
108 value = spx5_rd(sparx5, HSCH_SE_CONNECT(idx)); in sparx5_lg_get_next()
112 static u32 sparx5_lg_get_last(struct sparx5 *sparx5, u32 layer, u32 group) in sparx5_lg_get_last() argument
116 itr = sparx5_lg_get_first(sparx5, layer, group); in sparx5_lg_get_last()
119 next = sparx5_lg_get_next(sparx5, layer, group, itr); in sparx5_lg_get_last()
127 static bool sparx5_lg_is_last(struct sparx5 *sparx5, u32 layer, u32 group, in sparx5_lg_is_last() argument
130 return idx == sparx5_lg_get_next(sparx5, layer, group, idx); in sparx5_lg_is_last()
133 static bool sparx5_lg_is_first(struct sparx5 *sparx5, u32 layer, u32 group, in sparx5_lg_is_first() argument
136 return idx == sparx5_lg_get_first(sparx5, layer, group); in sparx5_lg_is_first()
139 static bool sparx5_lg_is_empty(struct sparx5 *sparx5, u32 layer, u32 group) in sparx5_lg_is_empty() argument
141 return sparx5_lg_get_leak_time(sparx5, layer, group) == 0; in sparx5_lg_is_empty()
144 static bool sparx5_lg_is_singular(struct sparx5 *sparx5, u32 layer, u32 group) in sparx5_lg_is_singular() argument
146 if (sparx5_lg_is_empty(sparx5, layer, group)) in sparx5_lg_is_singular()
149 return sparx5_lg_get_first(sparx5, layer, group) == in sparx5_lg_is_singular()
150 sparx5_lg_get_last(sparx5, layer, group); in sparx5_lg_is_singular()
153 static void sparx5_lg_enable(struct sparx5 *sparx5, u32 layer, u32 group, in sparx5_lg_enable() argument
156 sparx5_lg_set_leak_time(sparx5, layer, group, leak_time); in sparx5_lg_enable()
159 static void sparx5_lg_disable(struct sparx5 *sparx5, u32 layer, u32 group) in sparx5_lg_disable() argument
161 sparx5_lg_set_leak_time(sparx5, layer, group, 0); in sparx5_lg_disable()
164 static int sparx5_lg_get_group_by_index(struct sparx5 *sparx5, u32 layer, in sparx5_lg_get_group_by_index() argument
171 if (sparx5_lg_is_empty(sparx5, layer, i)) in sparx5_lg_get_group_by_index()
174 itr = sparx5_lg_get_first(sparx5, layer, i); in sparx5_lg_get_group_by_index()
177 next = sparx5_lg_get_next(sparx5, layer, i, itr); in sparx5_lg_get_group_by_index()
210 static int sparx5_lg_get_adjacent(struct sparx5 *sparx5, u32 layer, u32 group, in sparx5_lg_get_adjacent() argument
215 *first = sparx5_lg_get_first(sparx5, layer, group); in sparx5_lg_get_adjacent()
221 *next = sparx5_lg_get_next(sparx5, layer, group, itr); in sparx5_lg_get_adjacent()
236 static int sparx5_lg_conf_set(struct sparx5 *sparx5, u32 layer, u32 group, in sparx5_lg_conf_set() argument
242 sparx5_lg_disable(sparx5, layer, group); in sparx5_lg_conf_set()
249 HSCH_HSCH_CFG_CFG_HSCH_LAYER, sparx5, HSCH_HSCH_CFG_CFG); in sparx5_lg_conf_set()
252 spx5_wr(HSCH_SE_CONNECT_SE_LEAK_LINK_SET(idx_next), sparx5, in sparx5_lg_conf_set()
257 HSCH_HSCH_LEAK_CFG_LEAK_FIRST, sparx5, in sparx5_lg_conf_set()
261 sparx5_lg_enable(sparx5, layer, group, leak_time); in sparx5_lg_conf_set()
266 static int sparx5_lg_del(struct sparx5 *sparx5, u32 layer, u32 group, u32 idx) in sparx5_lg_del() argument
272 WARN_ON(sparx5_lg_get_adjacent(sparx5, layer, group, idx, &prev, &next, in sparx5_lg_del()
275 if (sparx5_lg_is_singular(sparx5, layer, group)) { in sparx5_lg_del()
277 } else if (sparx5_lg_is_last(sparx5, layer, group, idx)) { in sparx5_lg_del()
281 } else if (sparx5_lg_is_first(sparx5, layer, group, idx)) { in sparx5_lg_del()
290 return sparx5_lg_conf_set(sparx5, layer, group, first, idx, next, in sparx5_lg_del()
294 static int sparx5_lg_add(struct sparx5 *sparx5, u32 layer, u32 new_group, in sparx5_lg_add() argument
303 if (sparx5_lg_get_group_by_index(sparx5, layer, idx, &old_group) >= 0) { in sparx5_lg_add()
306 sparx5_lg_del(sparx5, layer, old_group, idx); in sparx5_lg_add()
316 if (sparx5_lg_is_empty(sparx5, layer, new_group)) in sparx5_lg_add()
319 next = sparx5_lg_get_first(sparx5, layer, new_group); in sparx5_lg_add()
321 return sparx5_lg_conf_set(sparx5, layer, new_group, first, idx, next, in sparx5_lg_add()
329 int (*sparx5_lg_action)(struct sparx5 *, u32, u32, u32); in sparx5_shaper_conf_set()
330 struct sparx5 *sparx5 = port->sparx5; in sparx5_shaper_conf_set() local
339 HSCH_HSCH_CFG_CFG_HSCH_LAYER, sparx5, HSCH_HSCH_CFG_CFG); in sparx5_shaper_conf_set()
343 sparx5, HSCH_SE_CFG(idx)); in sparx5_shaper_conf_set()
348 sparx5, HSCH_CIR_CFG(idx)); in sparx5_shaper_conf_set()
351 sparx5_lg_action(sparx5, layer, group, idx); in sparx5_shaper_conf_set()
370 port->sparx5, HSCH_HSCH_CFG_CFG); in sparx5_dwrr_conf_set()
374 HSCH_SE_CFG_SE_DWRR_CNT, port->sparx5, in sparx5_dwrr_conf_set()
379 HSCH_DWRR_ENTRY_DWRR_COST, port->sparx5, in sparx5_dwrr_conf_set()
386 static int sparx5_leak_groups_init(struct sparx5 *sparx5) in sparx5_leak_groups_init() argument
394 sys_clk_per_100ps = spx5_rd(sparx5, HSCH_SYS_CLK_PER); in sparx5_leak_groups_init()
434 sparx5_lg_disable(sparx5, i, ii); in sparx5_leak_groups_init()
441 int sparx5_qos_init(struct sparx5 *sparx5) in sparx5_qos_init() argument
445 ret = sparx5_leak_groups_init(sparx5); in sparx5_qos_init()
449 ret = sparx5_dcb_init(sparx5); in sparx5_qos_init()
453 sparx5_psfp_init(sparx5); in sparx5_qos_init()
530 sparx5_lg_get_group_by_index(port->sparx5, layer, idx, &group); in sparx5_tc_tbf_del()