Searched refs:allocated_stack (Results 1 – 4 of 4) sorted by relevance
347 int allocated_stack; member474 (((slot < frame->allocated_stack / BPF_REG_SIZE) && \481 iter < frame->allocated_stack / BPF_REG_SIZE; \
277 __naked void allocated_stack(void) in allocated_stack() function
593 int allocated_slots = state->allocated_stack / BPF_REG_SIZE; in is_spi_bounds_valid()832 for (i = 1; i < state->allocated_stack / BPF_REG_SIZE; i++) { in unmark_stack_slots_dynptr()1274 size_t n = src->allocated_stack / BPF_REG_SIZE; in copy_stack_state()1281 dst->allocated_stack = src->allocated_stack; in copy_stack_state()1301 size_t old_n = state->allocated_stack / BPF_REG_SIZE, n; in grow_stack_state()1314 state->allocated_stack = size; in grow_stack_state()4146 for (j = 0; j < func->allocated_stack / BPF_REG_SIZE; j++) { in mark_all_scalars_precise()4176 for (j = 0; j < func->allocated_stack / BPF_REG_SIZE; j++) { in mark_all_scalars_imprecise()4404 if (i >= func->allocated_stack / BPF_REG_SIZE) { in __mark_chain_precision()4406 i, func->allocated_stack / BPF_REG_SIZE); in __mark_chain_precision()[all …]
775 for (i = 0; i < state->allocated_stack / BPF_REG_SIZE; i++) { in print_verifier_state()