Lines Matching +full:0 +full:xa
20 void xa_dump(const struct xarray *xa) { } in xa_dump() argument
23 #define XA_BUG_ON(xa, x) do { \ argument
27 xa_dump(xa); \
32 } while (0)
40 static void *xa_store_index(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_store_index() argument
42 return xa_store(xa, index, xa_mk_index(index), gfp); in xa_store_index()
45 static void xa_insert_index(struct xarray *xa, unsigned long index) in xa_insert_index() argument
47 XA_BUG_ON(xa, xa_insert(xa, index, xa_mk_index(index), in xa_insert_index()
48 GFP_KERNEL) != 0); in xa_insert_index()
51 static void xa_alloc_index(struct xarray *xa, unsigned long index, gfp_t gfp) in xa_alloc_index() argument
55 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(index), xa_limit_32b, in xa_alloc_index()
56 gfp) != 0); in xa_alloc_index()
57 XA_BUG_ON(xa, id != index); in xa_alloc_index()
60 static void xa_erase_index(struct xarray *xa, unsigned long index) in xa_erase_index() argument
62 XA_BUG_ON(xa, xa_erase(xa, index) != xa_mk_index(index)); in xa_erase_index()
63 XA_BUG_ON(xa, xa_load(xa, index) != NULL); in xa_erase_index()
71 static void *xa_store_order(struct xarray *xa, unsigned long index, in xa_store_order() argument
74 XA_STATE_ORDER(xas, xa, index, order); in xa_store_order()
86 static noinline void check_xa_err(struct xarray *xa) in check_xa_err() argument
88 XA_BUG_ON(xa, xa_err(xa_store_index(xa, 0, GFP_NOWAIT)) != 0); in check_xa_err()
89 XA_BUG_ON(xa, xa_err(xa_erase(xa, 0)) != 0); in check_xa_err()
92 XA_BUG_ON(xa, xa_err(xa_store_index(xa, 1, GFP_NOWAIT)) != -ENOMEM); in check_xa_err()
93 XA_BUG_ON(xa, xa_err(xa_store_index(xa, 1, GFP_NOWAIT)) != -ENOMEM); in check_xa_err()
95 XA_BUG_ON(xa, xa_err(xa_store_index(xa, 1, GFP_KERNEL)) != 0); in check_xa_err()
96 XA_BUG_ON(xa, xa_err(xa_store(xa, 1, xa_mk_value(0), GFP_KERNEL)) != 0); in check_xa_err()
97 XA_BUG_ON(xa, xa_err(xa_erase(xa, 1)) != 0); in check_xa_err()
99 // XA_BUG_ON(xa, xa_err(xa_store(xa, 0, xa_mk_internal(0), 0)) != -EINVAL); in check_xa_err()
102 static noinline void check_xas_retry(struct xarray *xa) in check_xas_retry() argument
104 XA_STATE(xas, xa, 0); in check_xas_retry()
107 xa_store_index(xa, 0, GFP_KERNEL); in check_xas_retry()
108 xa_store_index(xa, 1, GFP_KERNEL); in check_xas_retry()
111 XA_BUG_ON(xa, xas_find(&xas, ULONG_MAX) != xa_mk_value(0)); in check_xas_retry()
112 xa_erase_index(xa, 1); in check_xas_retry()
113 XA_BUG_ON(xa, !xa_is_retry(xas_reload(&xas))); in check_xas_retry()
114 XA_BUG_ON(xa, xas_retry(&xas, NULL)); in check_xas_retry()
115 XA_BUG_ON(xa, xas_retry(&xas, xa_mk_value(0))); in check_xas_retry()
117 XA_BUG_ON(xa, xas.xa_node != XAS_RESTART); in check_xas_retry()
118 XA_BUG_ON(xa, xas_next_entry(&xas, ULONG_MAX) != xa_mk_value(0)); in check_xas_retry()
119 XA_BUG_ON(xa, xas.xa_node != NULL); in check_xas_retry()
122 XA_BUG_ON(xa, xa_store_index(xa, 1, GFP_KERNEL) != NULL); in check_xas_retry()
125 XA_BUG_ON(xa, !xa_is_internal(xas_reload(&xas))); in check_xas_retry()
127 XA_BUG_ON(xa, xas_next_entry(&xas, ULONG_MAX) != xa_mk_value(0)); in check_xas_retry()
132 xas_set(&xas, 0); in check_xas_retry()
137 xas_set(&xas, 0); in check_xas_retry()
143 xa_erase_index(xa, 0); in check_xas_retry()
144 xa_erase_index(xa, 1); in check_xas_retry()
147 static noinline void check_xa_load(struct xarray *xa) in check_xa_load() argument
151 for (i = 0; i < 1024; i++) { in check_xa_load()
152 for (j = 0; j < 1024; j++) { in check_xa_load()
153 void *entry = xa_load(xa, j); in check_xa_load()
155 XA_BUG_ON(xa, xa_to_value(entry) != j); in check_xa_load()
157 XA_BUG_ON(xa, entry); in check_xa_load()
159 XA_BUG_ON(xa, xa_store_index(xa, i, GFP_KERNEL) != NULL); in check_xa_load()
162 for (i = 0; i < 1024; i++) { in check_xa_load()
163 for (j = 0; j < 1024; j++) { in check_xa_load()
164 void *entry = xa_load(xa, j); in check_xa_load()
166 XA_BUG_ON(xa, xa_to_value(entry) != j); in check_xa_load()
168 XA_BUG_ON(xa, entry); in check_xa_load()
170 xa_erase_index(xa, i); in check_xa_load()
172 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_load()
175 static noinline void check_xa_mark_1(struct xarray *xa, unsigned long index) in check_xa_mark_1() argument
181 XA_BUG_ON(xa, xa_get_mark(xa, index, XA_MARK_0)); in check_xa_mark_1()
182 xa_set_mark(xa, index, XA_MARK_0); in check_xa_mark_1()
183 XA_BUG_ON(xa, xa_get_mark(xa, index, XA_MARK_0)); in check_xa_mark_1()
186 XA_BUG_ON(xa, xa_store_index(xa, index, GFP_KERNEL) != NULL); in check_xa_mark_1()
187 XA_BUG_ON(xa, xa_get_mark(xa, index, XA_MARK_0)); in check_xa_mark_1()
188 xa_set_mark(xa, index, XA_MARK_0); in check_xa_mark_1()
189 XA_BUG_ON(xa, !xa_get_mark(xa, index, XA_MARK_0)); in check_xa_mark_1()
192 XA_BUG_ON(xa, xa_get_mark(xa, index + 1, XA_MARK_0)); in check_xa_mark_1()
193 XA_BUG_ON(xa, xa_get_mark(xa, index, XA_MARK_1)); in check_xa_mark_1()
196 xa_erase_index(xa, index); in check_xa_mark_1()
197 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_mark_1()
198 XA_BUG_ON(xa, xa_get_mark(xa, index, XA_MARK_0)); in check_xa_mark_1()
199 xa_set_mark(xa, index, XA_MARK_0); in check_xa_mark_1()
200 XA_BUG_ON(xa, xa_get_mark(xa, index, XA_MARK_0)); in check_xa_mark_1()
206 BUG_ON((index % 4) != 0); in check_xa_mark_1()
212 XA_BUG_ON(xa, xa_store_index(xa, index + 1, GFP_KERNEL)); in check_xa_mark_1()
213 xa_set_mark(xa, index + 1, XA_MARK_0); in check_xa_mark_1()
214 XA_BUG_ON(xa, xa_store_index(xa, index + 2, GFP_KERNEL)); in check_xa_mark_1()
215 xa_set_mark(xa, index + 2, XA_MARK_2); in check_xa_mark_1()
216 XA_BUG_ON(xa, xa_store_index(xa, next, GFP_KERNEL)); in check_xa_mark_1()
217 xa_store_order(xa, index, order, xa_mk_index(index), in check_xa_mark_1()
220 XA_STATE(xas, xa, i); in check_xa_mark_1()
221 unsigned int seen = 0; in check_xa_mark_1()
224 XA_BUG_ON(xa, !xa_get_mark(xa, i, XA_MARK_0)); in check_xa_mark_1()
225 XA_BUG_ON(xa, xa_get_mark(xa, i, XA_MARK_1)); in check_xa_mark_1()
226 XA_BUG_ON(xa, !xa_get_mark(xa, i, XA_MARK_2)); in check_xa_mark_1()
233 XA_BUG_ON(xa, seen != 2); in check_xa_mark_1()
236 xas_set(&xas, 0); in check_xa_mark_1()
237 seen = 0; in check_xa_mark_1()
242 XA_BUG_ON(xa, seen != 1); in check_xa_mark_1()
244 XA_BUG_ON(xa, xa_get_mark(xa, next, XA_MARK_0)); in check_xa_mark_1()
245 XA_BUG_ON(xa, xa_get_mark(xa, next, XA_MARK_1)); in check_xa_mark_1()
246 XA_BUG_ON(xa, xa_get_mark(xa, next, XA_MARK_2)); in check_xa_mark_1()
247 xa_erase_index(xa, index); in check_xa_mark_1()
248 xa_erase_index(xa, next); in check_xa_mark_1()
249 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_mark_1()
251 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_mark_1()
254 static noinline void check_xa_mark_2(struct xarray *xa) in check_xa_mark_2() argument
256 XA_STATE(xas, xa, 0); in check_xa_mark_2()
258 unsigned int count = 0; in check_xa_mark_2()
261 xa_store_index(xa, 0, GFP_KERNEL); in check_xa_mark_2()
262 xa_set_mark(xa, 0, XA_MARK_0); in check_xa_mark_2()
267 XA_BUG_ON(xa, !xa_get_mark(xa, 0, XA_MARK_0) == 0); in check_xa_mark_2()
270 xa_store_index(xa, index, GFP_KERNEL); in check_xa_mark_2()
271 xa_set_mark(xa, index, XA_MARK_0); in check_xa_mark_2()
279 XA_BUG_ON(xa, count != 1000); in check_xa_mark_2()
284 XA_BUG_ON(xa, !xa_get_mark(xa, xas.xa_index, XA_MARK_0)); in check_xa_mark_2()
285 XA_BUG_ON(xa, !xas_get_mark(&xas, XA_MARK_0)); in check_xa_mark_2()
289 xa_destroy(xa); in check_xa_mark_2()
292 static noinline void check_xa_mark_3(struct xarray *xa) in check_xa_mark_3() argument
295 XA_STATE(xas, xa, 0x41); in check_xa_mark_3()
297 int count = 0; in check_xa_mark_3()
299 xa_store_order(xa, 0x40, 2, xa_mk_index(0x40), GFP_KERNEL); in check_xa_mark_3()
300 xa_set_mark(xa, 0x41, XA_MARK_0); in check_xa_mark_3()
305 XA_BUG_ON(xa, entry != xa_mk_index(0x40)); in check_xa_mark_3()
307 XA_BUG_ON(xa, count != 1); in check_xa_mark_3()
309 xa_destroy(xa); in check_xa_mark_3()
313 static noinline void check_xa_mark(struct xarray *xa) in check_xa_mark() argument
317 for (index = 0; index < 16384; index += 4) in check_xa_mark()
318 check_xa_mark_1(xa, index); in check_xa_mark()
320 check_xa_mark_2(xa); in check_xa_mark()
321 check_xa_mark_3(xa); in check_xa_mark()
324 static noinline void check_xa_shrink(struct xarray *xa) in check_xa_shrink() argument
326 XA_STATE(xas, xa, 1); in check_xa_shrink()
331 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_shrink()
332 XA_BUG_ON(xa, xa_store_index(xa, 0, GFP_KERNEL) != NULL); in check_xa_shrink()
333 XA_BUG_ON(xa, xa_store_index(xa, 1, GFP_KERNEL) != NULL); in check_xa_shrink()
340 XA_BUG_ON(xa, xas_load(&xas) != xa_mk_value(1)); in check_xa_shrink()
342 XA_BUG_ON(xa, xa_entry_locked(xa, node, 0) != xa_mk_value(0)); in check_xa_shrink()
343 XA_BUG_ON(xa, xas_store(&xas, NULL) != xa_mk_value(1)); in check_xa_shrink()
344 XA_BUG_ON(xa, xa_load(xa, 1) != NULL); in check_xa_shrink()
345 XA_BUG_ON(xa, xas.xa_node != XAS_BOUNDS); in check_xa_shrink()
346 XA_BUG_ON(xa, xa_entry_locked(xa, node, 0) != XA_RETRY_ENTRY); in check_xa_shrink()
347 XA_BUG_ON(xa, xas_load(&xas) != NULL); in check_xa_shrink()
349 XA_BUG_ON(xa, xa_load(xa, 0) != xa_mk_value(0)); in check_xa_shrink()
350 xa_erase_index(xa, 0); in check_xa_shrink()
351 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_shrink()
353 for (order = 0; order < max_order; order++) { in check_xa_shrink()
355 xa_store_order(xa, 0, order, xa_mk_value(0), GFP_KERNEL); in check_xa_shrink()
356 XA_BUG_ON(xa, xa_load(xa, max) != xa_mk_value(0)); in check_xa_shrink()
357 XA_BUG_ON(xa, xa_load(xa, max + 1) != NULL); in check_xa_shrink()
359 node = xa_head(xa); in check_xa_shrink()
361 XA_BUG_ON(xa, xa_store_index(xa, ULONG_MAX, GFP_KERNEL) != in check_xa_shrink()
364 XA_BUG_ON(xa, xa_head(xa) == node); in check_xa_shrink()
366 XA_BUG_ON(xa, xa_load(xa, max + 1) != NULL); in check_xa_shrink()
367 xa_erase_index(xa, ULONG_MAX); in check_xa_shrink()
368 XA_BUG_ON(xa, xa->xa_head != node); in check_xa_shrink()
369 xa_erase_index(xa, 0); in check_xa_shrink()
373 static noinline void check_insert(struct xarray *xa) in check_insert() argument
377 for (i = 0; i < 1024; i++) { in check_insert()
378 xa_insert_index(xa, i); in check_insert()
379 XA_BUG_ON(xa, xa_load(xa, i - 1) != NULL); in check_insert()
380 XA_BUG_ON(xa, xa_load(xa, i + 1) != NULL); in check_insert()
381 xa_erase_index(xa, i); in check_insert()
385 xa_insert_index(xa, 1UL << i); in check_insert()
386 XA_BUG_ON(xa, xa_load(xa, (1UL << i) - 1) != NULL); in check_insert()
387 XA_BUG_ON(xa, xa_load(xa, (1UL << i) + 1) != NULL); in check_insert()
388 xa_erase_index(xa, 1UL << i); in check_insert()
390 xa_insert_index(xa, (1UL << i) - 1); in check_insert()
391 XA_BUG_ON(xa, xa_load(xa, (1UL << i) - 2) != NULL); in check_insert()
392 XA_BUG_ON(xa, xa_load(xa, 1UL << i) != NULL); in check_insert()
393 xa_erase_index(xa, (1UL << i) - 1); in check_insert()
396 xa_insert_index(xa, ~0UL); in check_insert()
397 XA_BUG_ON(xa, xa_load(xa, 0UL) != NULL); in check_insert()
398 XA_BUG_ON(xa, xa_load(xa, ~1UL) != NULL); in check_insert()
399 xa_erase_index(xa, ~0UL); in check_insert()
401 XA_BUG_ON(xa, !xa_empty(xa)); in check_insert()
404 static noinline void check_cmpxchg(struct xarray *xa) in check_cmpxchg() argument
410 XA_BUG_ON(xa, !xa_empty(xa)); in check_cmpxchg()
411 XA_BUG_ON(xa, xa_store_index(xa, 12345678, GFP_KERNEL) != NULL); in check_cmpxchg()
412 XA_BUG_ON(xa, xa_insert(xa, 12345678, xa, GFP_KERNEL) != -EBUSY); in check_cmpxchg()
413 XA_BUG_ON(xa, xa_cmpxchg(xa, 12345678, SIX, FIVE, GFP_KERNEL) != LOTS); in check_cmpxchg()
414 XA_BUG_ON(xa, xa_cmpxchg(xa, 12345678, LOTS, FIVE, GFP_KERNEL) != LOTS); in check_cmpxchg()
415 XA_BUG_ON(xa, xa_cmpxchg(xa, 12345678, FIVE, LOTS, GFP_KERNEL) != FIVE); in check_cmpxchg()
416 XA_BUG_ON(xa, xa_cmpxchg(xa, 5, FIVE, NULL, GFP_KERNEL) != NULL); in check_cmpxchg()
417 XA_BUG_ON(xa, xa_cmpxchg(xa, 5, NULL, FIVE, GFP_KERNEL) != NULL); in check_cmpxchg()
418 XA_BUG_ON(xa, xa_insert(xa, 5, FIVE, GFP_KERNEL) != -EBUSY); in check_cmpxchg()
419 XA_BUG_ON(xa, xa_cmpxchg(xa, 5, FIVE, NULL, GFP_KERNEL) != FIVE); in check_cmpxchg()
420 XA_BUG_ON(xa, xa_insert(xa, 5, FIVE, GFP_KERNEL) == -EBUSY); in check_cmpxchg()
421 xa_erase_index(xa, 12345678); in check_cmpxchg()
422 xa_erase_index(xa, 5); in check_cmpxchg()
423 XA_BUG_ON(xa, !xa_empty(xa)); in check_cmpxchg()
426 static noinline void check_cmpxchg_order(struct xarray *xa) in check_cmpxchg_order() argument
432 XA_BUG_ON(xa, xa_store_order(xa, 0, order, FIVE, GFP_KERNEL)); in check_cmpxchg_order()
435 XA_BUG_ON(xa, xa_get_order(xa, xa_to_value(FIVE)) != order); in check_cmpxchg_order()
438 for (i = 0; i < (1 << order); i++) { in check_cmpxchg_order()
439 XA_BUG_ON(xa, xa_load(xa, i) != FIVE); in check_cmpxchg_order()
440 XA_BUG_ON(xa, xa_get_order(xa, i) != order); in check_cmpxchg_order()
444 XA_BUG_ON(xa, xa_load(xa, 1 << order) != NULL); in check_cmpxchg_order()
450 XA_BUG_ON(xa, xa_store_order(xa, 1 << order, order, FIVE, GFP_KERNEL)); in check_cmpxchg_order()
452 XA_BUG_ON(xa, xa_load(xa, i) != FIVE); in check_cmpxchg_order()
453 XA_BUG_ON(xa, xa_get_order(xa, i) != order); in check_cmpxchg_order()
456 /* Conditionally replace FIVE entry at index '0' with NULL */ in check_cmpxchg_order()
457 XA_BUG_ON(xa, xa_cmpxchg(xa, 0, FIVE, NULL, GFP_KERNEL) != FIVE); in check_cmpxchg_order()
460 XA_BUG_ON(xa, xa_get_order(xa, xa_to_value(FIVE)) != 0); in check_cmpxchg_order()
463 for (i = 0; i < (1 << order); i++) { in check_cmpxchg_order()
464 XA_BUG_ON(xa, xa_load(xa, i) != NULL); in check_cmpxchg_order()
465 XA_BUG_ON(xa, xa_get_order(xa, i) != 0); in check_cmpxchg_order()
470 XA_BUG_ON(xa, xa_load(xa, i) != FIVE); in check_cmpxchg_order()
471 XA_BUG_ON(xa, xa_get_order(xa, i) != order); in check_cmpxchg_order()
474 xa_store_order(xa, 0, BITS_PER_LONG - 1, NULL, GFP_KERNEL); in check_cmpxchg_order()
475 XA_BUG_ON(xa, !xa_empty(xa)); in check_cmpxchg_order()
479 static noinline void check_reserve(struct xarray *xa) in check_reserve() argument
486 XA_BUG_ON(xa, !xa_empty(xa)); in check_reserve()
487 XA_BUG_ON(xa, xa_reserve(xa, 12345678, GFP_KERNEL) != 0); in check_reserve()
488 XA_BUG_ON(xa, xa_empty(xa)); in check_reserve()
489 XA_BUG_ON(xa, xa_load(xa, 12345678)); in check_reserve()
490 xa_release(xa, 12345678); in check_reserve()
491 XA_BUG_ON(xa, !xa_empty(xa)); in check_reserve()
494 XA_BUG_ON(xa, xa_reserve(xa, 12345678, GFP_KERNEL) != 0); in check_reserve()
495 XA_BUG_ON(xa, xa_store_index(xa, 12345678, GFP_NOWAIT) != NULL); in check_reserve()
496 xa_release(xa, 12345678); in check_reserve()
497 xa_erase_index(xa, 12345678); in check_reserve()
498 XA_BUG_ON(xa, !xa_empty(xa)); in check_reserve()
501 XA_BUG_ON(xa, xa_reserve(xa, 12345678, GFP_KERNEL) != 0); in check_reserve()
502 XA_BUG_ON(xa, xa_cmpxchg(xa, 12345678, XA_ZERO_ENTRY, in check_reserve()
504 xa_release(xa, 12345678); in check_reserve()
505 xa_erase_index(xa, 12345678); in check_reserve()
506 XA_BUG_ON(xa, !xa_empty(xa)); in check_reserve()
509 XA_BUG_ON(xa, xa_reserve(xa, 12345678, GFP_KERNEL) != 0); in check_reserve()
510 XA_BUG_ON(xa, xa_insert(xa, 12345678, xa_mk_value(12345678), 0) != in check_reserve()
512 XA_BUG_ON(xa, xa_empty(xa)); in check_reserve()
513 XA_BUG_ON(xa, xa_erase(xa, 12345678) != NULL); in check_reserve()
514 XA_BUG_ON(xa, !xa_empty(xa)); in check_reserve()
517 xa_store_index(xa, 5, GFP_KERNEL); in check_reserve()
518 XA_BUG_ON(xa, xa_reserve(xa, 6, GFP_KERNEL) != 0); in check_reserve()
519 xa_store_index(xa, 7, GFP_KERNEL); in check_reserve()
521 count = 0; in check_reserve()
522 xa_for_each(xa, index, entry) { in check_reserve()
523 XA_BUG_ON(xa, index != 5 && index != 7); in check_reserve()
526 XA_BUG_ON(xa, count != 2); in check_reserve()
529 if (xa->xa_flags & XA_FLAGS_ALLOC) { in check_reserve()
532 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_value(8), in check_reserve()
533 XA_LIMIT(5, 10), GFP_KERNEL) != 0); in check_reserve()
534 XA_BUG_ON(xa, id != 8); in check_reserve()
536 xa_release(xa, 6); in check_reserve()
537 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_value(6), in check_reserve()
538 XA_LIMIT(5, 10), GFP_KERNEL) != 0); in check_reserve()
539 XA_BUG_ON(xa, id != 6); in check_reserve()
542 xa_destroy(xa); in check_reserve()
545 static noinline void check_xas_erase(struct xarray *xa) in check_xas_erase() argument
547 XA_STATE(xas, xa, 0); in check_xas_erase()
551 for (i = 0; i < 200; i++) { in check_xas_erase()
564 xas_store(&xas, xa_mk_value(0)); in check_xas_erase()
571 xas_set(&xas, 0); in check_xas_erase()
574 XA_BUG_ON(xa, entry != xa_mk_index(j)); in check_xas_erase()
579 XA_BUG_ON(xa, !xa_empty(xa)); in check_xas_erase()
584 static noinline void check_multi_store_1(struct xarray *xa, unsigned long index, in check_multi_store_1() argument
587 XA_STATE(xas, xa, index); in check_multi_store_1()
591 xa_store_order(xa, index, order, xa_mk_index(index), GFP_KERNEL); in check_multi_store_1()
592 XA_BUG_ON(xa, xa_load(xa, min) != xa_mk_index(index)); in check_multi_store_1()
593 XA_BUG_ON(xa, xa_load(xa, max - 1) != xa_mk_index(index)); in check_multi_store_1()
594 XA_BUG_ON(xa, xa_load(xa, max) != NULL); in check_multi_store_1()
595 XA_BUG_ON(xa, xa_load(xa, min - 1) != NULL); in check_multi_store_1()
598 XA_BUG_ON(xa, xas_store(&xas, xa_mk_index(min)) != xa_mk_index(index)); in check_multi_store_1()
600 XA_BUG_ON(xa, xa_load(xa, min) != xa_mk_index(min)); in check_multi_store_1()
601 XA_BUG_ON(xa, xa_load(xa, max - 1) != xa_mk_index(min)); in check_multi_store_1()
602 XA_BUG_ON(xa, xa_load(xa, max) != NULL); in check_multi_store_1()
603 XA_BUG_ON(xa, xa_load(xa, min - 1) != NULL); in check_multi_store_1()
605 xa_erase_index(xa, min); in check_multi_store_1()
606 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_store_1()
609 static noinline void check_multi_store_2(struct xarray *xa, unsigned long index, in check_multi_store_2() argument
612 XA_STATE(xas, xa, index); in check_multi_store_2()
613 xa_store_order(xa, index, order, xa_mk_value(0), GFP_KERNEL); in check_multi_store_2()
616 XA_BUG_ON(xa, xas_store(&xas, xa_mk_value(1)) != xa_mk_value(0)); in check_multi_store_2()
617 XA_BUG_ON(xa, xas.xa_index != index); in check_multi_store_2()
618 XA_BUG_ON(xa, xas_store(&xas, NULL) != xa_mk_value(1)); in check_multi_store_2()
620 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_store_2()
623 static noinline void check_multi_store_3(struct xarray *xa, unsigned long index, in check_multi_store_3() argument
626 XA_STATE(xas, xa, 0); in check_multi_store_3()
628 int n = 0; in check_multi_store_3()
630 xa_store_order(xa, index, order, xa_mk_index(index), GFP_KERNEL); in check_multi_store_3()
634 XA_BUG_ON(xa, entry != xa_mk_index(index)); in check_multi_store_3()
637 XA_BUG_ON(xa, n != 1); in check_multi_store_3()
640 XA_BUG_ON(xa, entry != xa_mk_index(index)); in check_multi_store_3()
643 XA_BUG_ON(xa, n != 2); in check_multi_store_3()
646 xa_destroy(xa); in check_multi_store_3()
650 static noinline void check_multi_store(struct xarray *xa) in check_multi_store() argument
657 xa_store_order(xa, 0, 1, xa_mk_value(0), GFP_KERNEL); in check_multi_store()
658 XA_BUG_ON(xa, xa_load(xa, 0) != xa_mk_value(0)); in check_multi_store()
659 XA_BUG_ON(xa, xa_load(xa, 1) != xa_mk_value(0)); in check_multi_store()
660 XA_BUG_ON(xa, xa_load(xa, 2) != NULL); in check_multi_store()
662 XA_BUG_ON(xa, xa_to_node(xa_head(xa))->count != 2); in check_multi_store()
663 XA_BUG_ON(xa, xa_to_node(xa_head(xa))->nr_values != 2); in check_multi_store()
667 xa_store(xa, 3, xa, GFP_KERNEL); in check_multi_store()
668 XA_BUG_ON(xa, xa_load(xa, 0) != xa_mk_value(0)); in check_multi_store()
669 XA_BUG_ON(xa, xa_load(xa, 1) != xa_mk_value(0)); in check_multi_store()
670 XA_BUG_ON(xa, xa_load(xa, 2) != NULL); in check_multi_store()
672 XA_BUG_ON(xa, xa_to_node(xa_head(xa))->count != 3); in check_multi_store()
673 XA_BUG_ON(xa, xa_to_node(xa_head(xa))->nr_values != 2); in check_multi_store()
677 xa_store_order(xa, 0, 2, xa_mk_value(1), GFP_KERNEL); in check_multi_store()
678 XA_BUG_ON(xa, xa_load(xa, 0) != xa_mk_value(1)); in check_multi_store()
679 XA_BUG_ON(xa, xa_load(xa, 1) != xa_mk_value(1)); in check_multi_store()
680 XA_BUG_ON(xa, xa_load(xa, 2) != xa_mk_value(1)); in check_multi_store()
681 XA_BUG_ON(xa, xa_load(xa, 3) != xa_mk_value(1)); in check_multi_store()
682 XA_BUG_ON(xa, xa_load(xa, 4) != NULL); in check_multi_store()
684 XA_BUG_ON(xa, xa_to_node(xa_head(xa))->count != 4); in check_multi_store()
685 XA_BUG_ON(xa, xa_to_node(xa_head(xa))->nr_values != 4); in check_multi_store()
689 xa_store_order(xa, 0, BITS_PER_LONG - 1, NULL, GFP_KERNEL); in check_multi_store()
690 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_store()
693 xa_store_index(xa, 1, GFP_KERNEL); in check_multi_store()
694 xa_store_index(xa, 2, GFP_KERNEL); in check_multi_store()
695 xa_store_order(xa, 0, 2, NULL, GFP_KERNEL); in check_multi_store()
696 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_store()
698 for (i = 0; i < max_order; i++) { in check_multi_store()
699 for (j = 0; j < max_order; j++) { in check_multi_store()
700 xa_store_order(xa, 0, i, xa_mk_index(i), GFP_KERNEL); in check_multi_store()
701 xa_store_order(xa, 0, j, xa_mk_index(j), GFP_KERNEL); in check_multi_store()
703 for (k = 0; k < max_order; k++) { in check_multi_store()
704 void *entry = xa_load(xa, (1UL << k) - 1); in check_multi_store()
706 XA_BUG_ON(xa, entry != NULL); in check_multi_store()
708 XA_BUG_ON(xa, entry != xa_mk_index(j)); in check_multi_store()
711 xa_erase(xa, 0); in check_multi_store()
712 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_store()
716 for (i = 0; i < 20; i++) { in check_multi_store()
717 check_multi_store_1(xa, 200, i); in check_multi_store()
718 check_multi_store_1(xa, 0, i); in check_multi_store()
719 check_multi_store_1(xa, (1UL << i) + 1, i); in check_multi_store()
721 check_multi_store_2(xa, 4095, 9); in check_multi_store()
724 check_multi_store_3(xa, 0, i); in check_multi_store()
725 check_multi_store_3(xa, 1UL << i, i); in check_multi_store()
732 static noinline void check_xa_multi_store_adv_add(struct xarray *xa, in check_xa_multi_store_adv_add() argument
737 XA_STATE(xas, xa, index); in check_xa_multi_store_adv_add()
741 XA_BUG_ON(xa, index & (nrpages - 1)); in check_xa_multi_store_adv_add()
756 XA_BUG_ON(xa, xas_error(&xas) && xas_error(&xas) != -ENOMEM); in check_xa_multi_store_adv_add()
759 XA_BUG_ON(xa, xas_error(&xas)); in check_xa_multi_store_adv_add()
760 XA_BUG_ON(xa, xa_load(xa, index) != p); in check_xa_multi_store_adv_add()
764 static noinline void check_xa_multi_store_adv_del_entry(struct xarray *xa, in check_xa_multi_store_adv_del_entry() argument
768 XA_STATE(xas, xa, index); in check_xa_multi_store_adv_del_entry()
775 static noinline void check_xa_multi_store_adv_delete(struct xarray *xa, in check_xa_multi_store_adv_delete() argument
779 xa_lock_irq(xa); in check_xa_multi_store_adv_delete()
780 check_xa_multi_store_adv_del_entry(xa, index, order); in check_xa_multi_store_adv_delete()
781 xa_unlock_irq(xa); in check_xa_multi_store_adv_delete()
785 static noinline void *test_get_entry(struct xarray *xa, unsigned long index) in test_get_entry() argument
787 XA_STATE(xas, xa, index); in test_get_entry()
789 static unsigned int loops = 0; in test_get_entry()
807 if (++loops % XA_CHECK_SCHED == 0) in test_get_entry()
813 static unsigned long some_val = 0xdeadbeef;
814 static unsigned long some_val_2 = 0xdeaddead;
817 static noinline void check_xa_multi_store_adv(struct xarray *xa, in check_xa_multi_store_adv() argument
830 check_xa_multi_store_adv_add(xa, base, order, &some_val); in check_xa_multi_store_adv()
832 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
833 XA_BUG_ON(xa, test_get_entry(xa, base + i) != &some_val); in check_xa_multi_store_adv()
835 XA_BUG_ON(xa, test_get_entry(xa, next_index) != NULL); in check_xa_multi_store_adv()
837 /* Use order 0 for the next item */ in check_xa_multi_store_adv()
838 check_xa_multi_store_adv_add(xa, next_index, 0, &some_val_2); in check_xa_multi_store_adv()
839 XA_BUG_ON(xa, test_get_entry(xa, next_index) != &some_val_2); in check_xa_multi_store_adv()
842 check_xa_multi_store_adv_delete(xa, next_index, 0); in check_xa_multi_store_adv()
845 check_xa_multi_store_adv_add(xa, next_index, order, &some_val_2); in check_xa_multi_store_adv()
847 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
848 XA_BUG_ON(xa, test_get_entry(xa, next_index + i) != &some_val_2); in check_xa_multi_store_adv()
850 check_xa_multi_store_adv_delete(xa, next_index, order); in check_xa_multi_store_adv()
851 check_xa_multi_store_adv_delete(xa, base, order); in check_xa_multi_store_adv()
852 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_multi_store_adv()
859 check_xa_multi_store_adv_add(xa, next_index, order, &some_val_2); in check_xa_multi_store_adv()
861 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
862 XA_BUG_ON(xa, test_get_entry(xa, base + i) != NULL); in check_xa_multi_store_adv()
864 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
865 XA_BUG_ON(xa, test_get_entry(xa, next_index + i) != &some_val_2); in check_xa_multi_store_adv()
867 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
868 XA_BUG_ON(xa, test_get_entry(xa, next_next_index + i) != NULL); in check_xa_multi_store_adv()
870 check_xa_multi_store_adv_delete(xa, next_index, order); in check_xa_multi_store_adv()
871 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_multi_store_adv()
875 check_xa_multi_store_adv_add(xa, next_next_index, order, &some_val_2); in check_xa_multi_store_adv()
877 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
878 XA_BUG_ON(xa, test_get_entry(xa, base + i) != NULL); in check_xa_multi_store_adv()
880 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
881 XA_BUG_ON(xa, test_get_entry(xa, next_index + i) != NULL); in check_xa_multi_store_adv()
883 for (i = 0; i < nrpages; i++) in check_xa_multi_store_adv()
884 XA_BUG_ON(xa, test_get_entry(xa, next_next_index + i) != &some_val_2); in check_xa_multi_store_adv()
886 check_xa_multi_store_adv_delete(xa, next_next_index, order); in check_xa_multi_store_adv()
887 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_multi_store_adv()
891 static noinline void check_multi_store_advanced(struct xarray *xa) in check_multi_store_advanced() argument
902 for (i = 0; i < max_order; i++) { in check_multi_store_advanced()
903 check_xa_multi_store_adv(xa, pos, i); in check_multi_store_advanced()
904 check_xa_multi_store_adv(xa, pos + 157, i); in check_multi_store_advanced()
910 static noinline void check_xa_alloc_1(struct xarray *xa, unsigned int base) in check_xa_alloc_1() argument
915 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_1()
917 xa_alloc_index(xa, base, GFP_KERNEL); in check_xa_alloc_1()
920 xa_erase_index(xa, base); in check_xa_alloc_1()
921 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_1()
924 xa_alloc_index(xa, base, GFP_KERNEL); in check_xa_alloc_1()
928 xa_alloc_index(xa, i, GFP_KERNEL); in check_xa_alloc_1()
930 xa_erase_index(xa, i); in check_xa_alloc_1()
931 xa_alloc_index(xa, base, GFP_KERNEL); in check_xa_alloc_1()
934 xa_destroy(xa); in check_xa_alloc_1()
937 xa_alloc_index(xa, base, GFP_KERNEL); in check_xa_alloc_1()
940 xa_alloc_index(xa, base + 1, GFP_KERNEL); in check_xa_alloc_1()
941 xa_erase_index(xa, base + 1); in check_xa_alloc_1()
944 xa_store_index(xa, base + 1, GFP_KERNEL); in check_xa_alloc_1()
945 xa_alloc_index(xa, base + 2, GFP_KERNEL); in check_xa_alloc_1()
948 xa_erase_index(xa, base); in check_xa_alloc_1()
949 xa_alloc_index(xa, base, GFP_KERNEL); in check_xa_alloc_1()
951 xa_erase_index(xa, base + 1); in check_xa_alloc_1()
952 xa_erase_index(xa, base + 2); in check_xa_alloc_1()
955 xa_alloc_index(xa, base + i, GFP_KERNEL); in check_xa_alloc_1()
958 xa_destroy(xa); in check_xa_alloc_1()
961 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(UINT_MAX - 1), in check_xa_alloc_1()
963 GFP_KERNEL) != 0); in check_xa_alloc_1()
964 XA_BUG_ON(xa, id != 0xfffffffeU); in check_xa_alloc_1()
965 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(UINT_MAX), in check_xa_alloc_1()
967 GFP_KERNEL) != 0); in check_xa_alloc_1()
968 XA_BUG_ON(xa, id != 0xffffffffU); in check_xa_alloc_1()
970 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(0), in check_xa_alloc_1()
973 XA_BUG_ON(xa, id != 3); in check_xa_alloc_1()
974 xa_destroy(xa); in check_xa_alloc_1()
976 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(10), XA_LIMIT(10, 5), in check_xa_alloc_1()
978 XA_BUG_ON(xa, xa_store_index(xa, 3, GFP_KERNEL) != 0); in check_xa_alloc_1()
979 XA_BUG_ON(xa, xa_alloc(xa, &id, xa_mk_index(10), XA_LIMIT(10, 5), in check_xa_alloc_1()
981 xa_erase_index(xa, 3); in check_xa_alloc_1()
982 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_1()
985 static noinline void check_xa_alloc_2(struct xarray *xa, unsigned int base) in check_xa_alloc_2() argument
992 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_2()
993 XA_BUG_ON(xa, xa_alloc(xa, &id, NULL, xa_limit_32b, GFP_KERNEL) != 0); in check_xa_alloc_2()
994 XA_BUG_ON(xa, id != base); in check_xa_alloc_2()
995 XA_BUG_ON(xa, xa_empty(xa)); in check_xa_alloc_2()
996 XA_BUG_ON(xa, xa_erase(xa, id) != NULL); in check_xa_alloc_2()
997 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_2()
1000 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_2()
1001 XA_BUG_ON(xa, xa_alloc(xa, &id, NULL, xa_limit_32b, GFP_KERNEL) != 0); in check_xa_alloc_2()
1002 XA_BUG_ON(xa, id != base); in check_xa_alloc_2()
1003 XA_BUG_ON(xa, xa_empty(xa)); in check_xa_alloc_2()
1004 xa_destroy(xa); in check_xa_alloc_2()
1005 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_2()
1008 XA_BUG_ON(xa, xa_alloc(xa, &id, NULL, xa_limit_32b, in check_xa_alloc_2()
1009 GFP_KERNEL) != 0); in check_xa_alloc_2()
1010 XA_BUG_ON(xa, id != i); in check_xa_alloc_2()
1013 XA_BUG_ON(xa, xa_store(xa, 3, xa_mk_index(3), GFP_KERNEL) != NULL); in check_xa_alloc_2()
1014 XA_BUG_ON(xa, xa_store(xa, 4, xa_mk_index(4), GFP_KERNEL) != NULL); in check_xa_alloc_2()
1015 XA_BUG_ON(xa, xa_store(xa, 4, NULL, GFP_KERNEL) != xa_mk_index(4)); in check_xa_alloc_2()
1016 XA_BUG_ON(xa, xa_erase(xa, 5) != NULL); in check_xa_alloc_2()
1017 XA_BUG_ON(xa, xa_alloc(xa, &id, NULL, xa_limit_32b, GFP_KERNEL) != 0); in check_xa_alloc_2()
1018 XA_BUG_ON(xa, id != 5); in check_xa_alloc_2()
1020 xa_for_each(xa, index, entry) { in check_xa_alloc_2()
1021 xa_erase_index(xa, index); in check_xa_alloc_2()
1025 XA_BUG_ON(xa, xa_erase(xa, i) != NULL); in check_xa_alloc_2()
1026 XA_BUG_ON(xa, xa_empty(xa)); in check_xa_alloc_2()
1028 XA_BUG_ON(xa, xa_erase(xa, 8) != NULL); in check_xa_alloc_2()
1029 XA_BUG_ON(xa, xa_empty(xa)); in check_xa_alloc_2()
1030 XA_BUG_ON(xa, xa_erase(xa, base + 9) != NULL); in check_xa_alloc_2()
1031 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_2()
1033 xa_destroy(xa); in check_xa_alloc_2()
1036 static noinline void check_xa_alloc_3(struct xarray *xa, unsigned int base) in check_xa_alloc_3() argument
1038 struct xa_limit limit = XA_LIMIT(1, 0x3fff); in check_xa_alloc_3()
1039 u32 next = 0; in check_xa_alloc_3()
1044 XA_BUG_ON(xa, xa_alloc_cyclic(xa, &id, xa_mk_index(1), limit, in check_xa_alloc_3()
1045 &next, GFP_KERNEL) != 0); in check_xa_alloc_3()
1046 XA_BUG_ON(xa, id != 1); in check_xa_alloc_3()
1048 next = 0x3ffd; in check_xa_alloc_3()
1049 XA_BUG_ON(xa, xa_alloc_cyclic(xa, &id, xa_mk_index(0x3ffd), limit, in check_xa_alloc_3()
1050 &next, GFP_KERNEL) != 0); in check_xa_alloc_3()
1051 XA_BUG_ON(xa, id != 0x3ffd); in check_xa_alloc_3()
1052 xa_erase_index(xa, 0x3ffd); in check_xa_alloc_3()
1053 xa_erase_index(xa, 1); in check_xa_alloc_3()
1054 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_3()
1056 for (i = 0x3ffe; i < 0x4003; i++) { in check_xa_alloc_3()
1057 if (i < 0x4000) in check_xa_alloc_3()
1060 entry = xa_mk_index(i - 0x3fff); in check_xa_alloc_3()
1061 XA_BUG_ON(xa, xa_alloc_cyclic(xa, &id, entry, limit, in check_xa_alloc_3()
1063 XA_BUG_ON(xa, xa_mk_index(id) != entry); in check_xa_alloc_3()
1067 if (base != 0) in check_xa_alloc_3()
1068 xa_erase_index(xa, base); in check_xa_alloc_3()
1069 xa_erase_index(xa, base + 1); in check_xa_alloc_3()
1071 XA_BUG_ON(xa, xa_alloc_cyclic(xa, &id, xa_mk_index(UINT_MAX), in check_xa_alloc_3()
1072 xa_limit_32b, &next, GFP_KERNEL) != 0); in check_xa_alloc_3()
1073 XA_BUG_ON(xa, id != UINT_MAX); in check_xa_alloc_3()
1074 XA_BUG_ON(xa, xa_alloc_cyclic(xa, &id, xa_mk_index(base), in check_xa_alloc_3()
1076 XA_BUG_ON(xa, id != base); in check_xa_alloc_3()
1077 XA_BUG_ON(xa, xa_alloc_cyclic(xa, &id, xa_mk_index(base + 1), in check_xa_alloc_3()
1078 xa_limit_32b, &next, GFP_KERNEL) != 0); in check_xa_alloc_3()
1079 XA_BUG_ON(xa, id != base + 1); in check_xa_alloc_3()
1081 xa_for_each(xa, index, entry) in check_xa_alloc_3()
1082 xa_erase_index(xa, index); in check_xa_alloc_3()
1084 XA_BUG_ON(xa, !xa_empty(xa)); in check_xa_alloc_3()
1092 check_xa_alloc_1(&xa0, 0); in check_xa_alloc()
1094 check_xa_alloc_2(&xa0, 0); in check_xa_alloc()
1096 check_xa_alloc_3(&xa0, 0); in check_xa_alloc()
1100 static noinline void __check_store_iter(struct xarray *xa, unsigned long start, in __check_store_iter() argument
1103 XA_STATE_ORDER(xas, xa, start, order); in __check_store_iter()
1105 unsigned int count = 0; in __check_store_iter()
1110 XA_BUG_ON(xa, !xa_is_value(entry)); in __check_store_iter()
1111 XA_BUG_ON(xa, entry < xa_mk_index(start)); in __check_store_iter()
1112 XA_BUG_ON(xa, entry > xa_mk_index(start + (1UL << order) - 1)); in __check_store_iter()
1118 count = 0; in __check_store_iter()
1121 XA_BUG_ON(xa, xas_error(&xas)); in __check_store_iter()
1122 XA_BUG_ON(xa, count != present); in __check_store_iter()
1123 XA_BUG_ON(xa, xa_load(xa, start) != xa_mk_index(start)); in __check_store_iter()
1124 XA_BUG_ON(xa, xa_load(xa, start + (1UL << order) - 1) != in __check_store_iter()
1126 xa_erase_index(xa, start); in __check_store_iter()
1129 static noinline void check_store_iter(struct xarray *xa) in check_store_iter() argument
1134 for (i = 0; i < max_order; i++) { in check_store_iter()
1137 __check_store_iter(xa, 0, i, 0); in check_store_iter()
1138 XA_BUG_ON(xa, !xa_empty(xa)); in check_store_iter()
1139 __check_store_iter(xa, min, i, 0); in check_store_iter()
1140 XA_BUG_ON(xa, !xa_empty(xa)); in check_store_iter()
1142 xa_store_index(xa, min, GFP_KERNEL); in check_store_iter()
1143 __check_store_iter(xa, min, i, 1); in check_store_iter()
1144 XA_BUG_ON(xa, !xa_empty(xa)); in check_store_iter()
1145 xa_store_index(xa, max, GFP_KERNEL); in check_store_iter()
1146 __check_store_iter(xa, min, i, 1); in check_store_iter()
1147 XA_BUG_ON(xa, !xa_empty(xa)); in check_store_iter()
1149 for (j = 0; j < min; j++) in check_store_iter()
1150 xa_store_index(xa, j, GFP_KERNEL); in check_store_iter()
1151 __check_store_iter(xa, 0, i, min); in check_store_iter()
1152 XA_BUG_ON(xa, !xa_empty(xa)); in check_store_iter()
1153 for (j = 0; j < min; j++) in check_store_iter()
1154 xa_store_index(xa, min + j, GFP_KERNEL); in check_store_iter()
1155 __check_store_iter(xa, min, i, min); in check_store_iter()
1156 XA_BUG_ON(xa, !xa_empty(xa)); in check_store_iter()
1159 xa_store_index(xa, 63, GFP_KERNEL); in check_store_iter()
1160 xa_store_index(xa, 65, GFP_KERNEL); in check_store_iter()
1161 __check_store_iter(xa, 64, 2, 1); in check_store_iter()
1162 xa_erase_index(xa, 63); in check_store_iter()
1164 XA_BUG_ON(xa, !xa_empty(xa)); in check_store_iter()
1167 static noinline void check_multi_find_1(struct xarray *xa, unsigned order) in check_multi_find_1() argument
1174 xa_store_order(xa, multi, order, xa_mk_value(multi), GFP_KERNEL); in check_multi_find_1()
1175 XA_BUG_ON(xa, xa_store_index(xa, next, GFP_KERNEL) != NULL); in check_multi_find_1()
1176 XA_BUG_ON(xa, xa_store_index(xa, next + 1, GFP_KERNEL) != NULL); in check_multi_find_1()
1178 index = 0; in check_multi_find_1()
1179 XA_BUG_ON(xa, xa_find(xa, &index, ULONG_MAX, XA_PRESENT) != in check_multi_find_1()
1181 XA_BUG_ON(xa, index != multi); in check_multi_find_1()
1183 XA_BUG_ON(xa, xa_find(xa, &index, ULONG_MAX, XA_PRESENT) != in check_multi_find_1()
1185 XA_BUG_ON(xa, (index < multi) || (index >= next)); in check_multi_find_1()
1186 XA_BUG_ON(xa, xa_find_after(xa, &index, ULONG_MAX, XA_PRESENT) != in check_multi_find_1()
1188 XA_BUG_ON(xa, index != next); in check_multi_find_1()
1189 XA_BUG_ON(xa, xa_find_after(xa, &index, next, XA_PRESENT) != NULL); in check_multi_find_1()
1190 XA_BUG_ON(xa, index != next); in check_multi_find_1()
1192 xa_erase_index(xa, multi); in check_multi_find_1()
1193 xa_erase_index(xa, next); in check_multi_find_1()
1194 xa_erase_index(xa, next + 1); in check_multi_find_1()
1195 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_find_1()
1199 static noinline void check_multi_find_2(struct xarray *xa) in check_multi_find_2() argument
1205 for (i = 0; i < max_order; i++) { in check_multi_find_2()
1207 for (j = 0; j < index; j++) { in check_multi_find_2()
1208 XA_STATE(xas, xa, j + index); in check_multi_find_2()
1209 xa_store_index(xa, index - 1, GFP_KERNEL); in check_multi_find_2()
1210 xa_store_order(xa, index, i, xa_mk_index(index), in check_multi_find_2()
1214 xa_erase_index(xa, index); in check_multi_find_2()
1217 xa_erase_index(xa, index - 1); in check_multi_find_2()
1218 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_find_2()
1223 static noinline void check_multi_find_3(struct xarray *xa) in check_multi_find_3() argument
1230 XA_BUG_ON(xa, !xa_empty(xa)); in check_multi_find_3()
1231 xa_store_order(xa, 0, order - 4, xa_mk_index(0), GFP_KERNEL); in check_multi_find_3()
1232 XA_BUG_ON(xa, xa_find_after(xa, &index, ULONG_MAX, XA_PRESENT)); in check_multi_find_3()
1233 xa_erase_index(xa, 0); in check_multi_find_3()
1237 static noinline void check_find_1(struct xarray *xa) in check_find_1() argument
1241 XA_BUG_ON(xa, !xa_empty(xa)); in check_find_1()
1244 * Check xa_find with all pairs between 0 and 99 inclusive, in check_find_1()
1245 * starting at every index between 0 and 99 in check_find_1()
1247 for (i = 0; i < 100; i++) { in check_find_1()
1248 XA_BUG_ON(xa, xa_store_index(xa, i, GFP_KERNEL) != NULL); in check_find_1()
1249 xa_set_mark(xa, i, XA_MARK_0); in check_find_1()
1250 for (j = 0; j < i; j++) { in check_find_1()
1251 XA_BUG_ON(xa, xa_store_index(xa, j, GFP_KERNEL) != in check_find_1()
1253 xa_set_mark(xa, j, XA_MARK_0); in check_find_1()
1254 for (k = 0; k < 100; k++) { in check_find_1()
1256 void *entry = xa_find(xa, &index, ULONG_MAX, in check_find_1()
1259 XA_BUG_ON(xa, index != j); in check_find_1()
1261 XA_BUG_ON(xa, index != i); in check_find_1()
1263 XA_BUG_ON(xa, entry != NULL); in check_find_1()
1266 entry = xa_find(xa, &index, ULONG_MAX, in check_find_1()
1269 XA_BUG_ON(xa, index != j); in check_find_1()
1271 XA_BUG_ON(xa, index != i); in check_find_1()
1273 XA_BUG_ON(xa, entry != NULL); in check_find_1()
1275 xa_erase_index(xa, j); in check_find_1()
1276 XA_BUG_ON(xa, xa_get_mark(xa, j, XA_MARK_0)); in check_find_1()
1277 XA_BUG_ON(xa, !xa_get_mark(xa, i, XA_MARK_0)); in check_find_1()
1279 xa_erase_index(xa, i); in check_find_1()
1280 XA_BUG_ON(xa, xa_get_mark(xa, i, XA_MARK_0)); in check_find_1()
1282 XA_BUG_ON(xa, !xa_empty(xa)); in check_find_1()
1285 static noinline void check_find_2(struct xarray *xa) in check_find_2() argument
1290 xa_for_each(xa, index, entry) { in check_find_2()
1291 XA_BUG_ON(xa, true); in check_find_2()
1294 for (i = 0; i < 1024; i++) { in check_find_2()
1295 xa_store_index(xa, index, GFP_KERNEL); in check_find_2()
1296 j = 0; in check_find_2()
1297 xa_for_each(xa, index, entry) { in check_find_2()
1298 XA_BUG_ON(xa, xa_mk_index(index) != entry); in check_find_2()
1299 XA_BUG_ON(xa, index != j++); in check_find_2()
1303 xa_destroy(xa); in check_find_2()
1306 static noinline void check_find_3(struct xarray *xa) in check_find_3() argument
1308 XA_STATE(xas, xa, 0); in check_find_3()
1312 for (i = 0; i < 100; i++) { in check_find_3()
1313 for (j = 0; j < 100; j++) { in check_find_3()
1315 for (k = 0; k < 100; k++) { in check_find_3()
1320 XA_BUG_ON(xa, in check_find_3()
1325 xa_store_index(xa, i, GFP_KERNEL); in check_find_3()
1326 xa_set_mark(xa, i, XA_MARK_0); in check_find_3()
1328 xa_destroy(xa); in check_find_3()
1331 static noinline void check_find_4(struct xarray *xa) in check_find_4() argument
1333 unsigned long index = 0; in check_find_4()
1336 xa_store_index(xa, ULONG_MAX, GFP_KERNEL); in check_find_4()
1338 entry = xa_find_after(xa, &index, ULONG_MAX, XA_PRESENT); in check_find_4()
1339 XA_BUG_ON(xa, entry != xa_mk_index(ULONG_MAX)); in check_find_4()
1341 entry = xa_find_after(xa, &index, ULONG_MAX, XA_PRESENT); in check_find_4()
1342 XA_BUG_ON(xa, entry); in check_find_4()
1344 xa_erase_index(xa, ULONG_MAX); in check_find_4()
1347 static noinline void check_find(struct xarray *xa) in check_find() argument
1351 check_find_1(xa); in check_find()
1352 check_find_2(xa); in check_find()
1353 check_find_3(xa); in check_find()
1354 check_find_4(xa); in check_find()
1357 check_multi_find_1(xa, i); in check_find()
1358 check_multi_find_2(xa); in check_find()
1359 check_multi_find_3(xa); in check_find()
1363 static noinline unsigned long xa_find_entry(struct xarray *xa, void *item) in xa_find_entry() argument
1365 XA_STATE(xas, xa, 0); in xa_find_entry()
1366 unsigned int checked = 0; in xa_find_entry()
1376 if ((checked % 4) != 0) in xa_find_entry()
1385 static noinline void check_find_entry(struct xarray *xa) in check_find_entry() argument
1391 for (order = 0; order < 20; order++) { in check_find_entry()
1392 for (offset = 0; offset < (1UL << (order + 3)); in check_find_entry()
1394 for (index = 0; index < (1UL << (order + 5)); in check_find_entry()
1396 xa_store_order(xa, index, order, in check_find_entry()
1398 XA_BUG_ON(xa, xa_load(xa, index) != in check_find_entry()
1400 XA_BUG_ON(xa, xa_find_entry(xa, in check_find_entry()
1403 XA_BUG_ON(xa, xa_find_entry(xa, xa) != -1); in check_find_entry()
1404 xa_destroy(xa); in check_find_entry()
1409 XA_BUG_ON(xa, xa_find_entry(xa, xa) != -1); in check_find_entry()
1410 xa_store_index(xa, ULONG_MAX, GFP_KERNEL); in check_find_entry()
1411 XA_BUG_ON(xa, xa_find_entry(xa, xa) != -1); in check_find_entry()
1412 XA_BUG_ON(xa, xa_find_entry(xa, xa_mk_index(ULONG_MAX)) != -1); in check_find_entry()
1413 xa_erase_index(xa, ULONG_MAX); in check_find_entry()
1414 XA_BUG_ON(xa, !xa_empty(xa)); in check_find_entry()
1417 static noinline void check_pause(struct xarray *xa) in check_pause() argument
1419 XA_STATE(xas, xa, 0); in check_pause()
1423 unsigned int count = 0; in check_pause()
1425 for (order = 0; order < order_limit; order++) { in check_pause()
1426 XA_BUG_ON(xa, xa_store_order(xa, index, order, in check_pause()
1433 XA_BUG_ON(xa, entry != xa_mk_index(1UL << count)); in check_pause()
1437 XA_BUG_ON(xa, count != order_limit); in check_pause()
1439 count = 0; in check_pause()
1440 xas_set(&xas, 0); in check_pause()
1443 XA_BUG_ON(xa, entry != xa_mk_index(1UL << count)); in check_pause()
1448 XA_BUG_ON(xa, count != order_limit); in check_pause()
1450 xa_destroy(xa); in check_pause()
1453 static noinline void check_move_tiny(struct xarray *xa) in check_move_tiny() argument
1455 XA_STATE(xas, xa, 0); in check_move_tiny()
1457 XA_BUG_ON(xa, !xa_empty(xa)); in check_move_tiny()
1459 XA_BUG_ON(xa, xas_next(&xas) != NULL); in check_move_tiny()
1460 XA_BUG_ON(xa, xas_next(&xas) != NULL); in check_move_tiny()
1462 xa_store_index(xa, 0, GFP_KERNEL); in check_move_tiny()
1464 xas_set(&xas, 0); in check_move_tiny()
1465 XA_BUG_ON(xa, xas_next(&xas) != xa_mk_index(0)); in check_move_tiny()
1466 XA_BUG_ON(xa, xas_next(&xas) != NULL); in check_move_tiny()
1467 xas_set(&xas, 0); in check_move_tiny()
1468 XA_BUG_ON(xa, xas_prev(&xas) != xa_mk_index(0)); in check_move_tiny()
1469 XA_BUG_ON(xa, xas_prev(&xas) != NULL); in check_move_tiny()
1471 xa_erase_index(xa, 0); in check_move_tiny()
1472 XA_BUG_ON(xa, !xa_empty(xa)); in check_move_tiny()
1475 static noinline void check_move_max(struct xarray *xa) in check_move_max() argument
1477 XA_STATE(xas, xa, 0); in check_move_max()
1479 xa_store_index(xa, ULONG_MAX, GFP_KERNEL); in check_move_max()
1481 XA_BUG_ON(xa, xas_find(&xas, ULONG_MAX) != xa_mk_index(ULONG_MAX)); in check_move_max()
1482 XA_BUG_ON(xa, xas_find(&xas, ULONG_MAX) != NULL); in check_move_max()
1485 xas_set(&xas, 0); in check_move_max()
1487 XA_BUG_ON(xa, xas_find(&xas, ULONG_MAX) != xa_mk_index(ULONG_MAX)); in check_move_max()
1489 XA_BUG_ON(xa, xas_find(&xas, ULONG_MAX) != NULL); in check_move_max()
1492 xa_erase_index(xa, ULONG_MAX); in check_move_max()
1493 XA_BUG_ON(xa, !xa_empty(xa)); in check_move_max()
1496 static noinline void check_move_small(struct xarray *xa, unsigned long idx) in check_move_small() argument
1498 XA_STATE(xas, xa, 0); in check_move_small()
1501 xa_store_index(xa, 0, GFP_KERNEL); in check_move_small()
1502 xa_store_index(xa, idx, GFP_KERNEL); in check_move_small()
1505 for (i = 0; i < idx * 4; i++) { in check_move_small()
1508 XA_BUG_ON(xa, xas.xa_node == XAS_RESTART); in check_move_small()
1509 XA_BUG_ON(xa, xas.xa_index != i); in check_move_small()
1510 if (i == 0 || i == idx) in check_move_small()
1511 XA_BUG_ON(xa, entry != xa_mk_index(i)); in check_move_small()
1513 XA_BUG_ON(xa, entry != NULL); in check_move_small()
1516 XA_BUG_ON(xa, xas.xa_index != i); in check_move_small()
1522 XA_BUG_ON(xa, xas.xa_node == XAS_RESTART); in check_move_small()
1523 XA_BUG_ON(xa, xas.xa_index != i); in check_move_small()
1524 if (i == 0 || i == idx) in check_move_small()
1525 XA_BUG_ON(xa, entry != xa_mk_index(i)); in check_move_small()
1527 XA_BUG_ON(xa, entry != NULL); in check_move_small()
1528 } while (i > 0); in check_move_small()
1531 XA_BUG_ON(xa, xas_next(&xas) != NULL); in check_move_small()
1532 XA_BUG_ON(xa, xas.xa_index != ULONG_MAX); in check_move_small()
1533 XA_BUG_ON(xa, xas_next(&xas) != xa_mk_value(0)); in check_move_small()
1534 XA_BUG_ON(xa, xas.xa_index != 0); in check_move_small()
1535 XA_BUG_ON(xa, xas_prev(&xas) != NULL); in check_move_small()
1536 XA_BUG_ON(xa, xas.xa_index != ULONG_MAX); in check_move_small()
1539 xa_erase_index(xa, 0); in check_move_small()
1540 xa_erase_index(xa, idx); in check_move_small()
1541 XA_BUG_ON(xa, !xa_empty(xa)); in check_move_small()
1544 static noinline void check_move(struct xarray *xa) in check_move() argument
1546 XA_STATE(xas, xa, (1 << 16) - 1); in check_move()
1549 for (i = 0; i < (1 << 16); i++) in check_move()
1550 XA_BUG_ON(xa, xa_store_index(xa, i, GFP_KERNEL) != NULL); in check_move()
1556 XA_BUG_ON(xa, entry != xa_mk_index(i)); in check_move()
1557 XA_BUG_ON(xa, i != xas.xa_index); in check_move()
1558 } while (i != 0); in check_move()
1560 XA_BUG_ON(xa, xas_prev(&xas) != NULL); in check_move()
1561 XA_BUG_ON(xa, xas.xa_index != ULONG_MAX); in check_move()
1565 XA_BUG_ON(xa, entry != xa_mk_index(i)); in check_move()
1566 XA_BUG_ON(xa, i != xas.xa_index); in check_move()
1572 xa_erase_index(xa, i); in check_move()
1581 XA_BUG_ON(xa, entry != xa_mk_index(i)); in check_move()
1583 XA_BUG_ON(xa, entry != NULL); in check_move()
1584 XA_BUG_ON(xa, i != xas.xa_index); in check_move()
1585 } while (i != 0); in check_move()
1587 XA_BUG_ON(xa, xas_prev(&xas) != NULL); in check_move()
1588 XA_BUG_ON(xa, xas.xa_index != ULONG_MAX); in check_move()
1593 XA_BUG_ON(xa, entry != xa_mk_index(i)); in check_move()
1595 XA_BUG_ON(xa, entry != NULL); in check_move()
1596 XA_BUG_ON(xa, i != xas.xa_index); in check_move()
1601 xa_destroy(xa); in check_move()
1603 check_move_tiny(xa); in check_move()
1604 check_move_max(xa); in check_move()
1606 for (i = 0; i < 16; i++) in check_move()
1607 check_move_small(xa, 1UL << i); in check_move()
1610 check_move_small(xa, (1UL << i) - 1); in check_move()
1613 static noinline void xa_store_many_order(struct xarray *xa, in xa_store_many_order() argument
1616 XA_STATE_ORDER(xas, xa, index, order); in xa_store_many_order()
1617 unsigned int i = 0; in xa_store_many_order()
1621 XA_BUG_ON(xa, xas_find_conflict(&xas)); in xa_store_many_order()
1625 for (i = 0; i < (1U << order); i++) { in xa_store_many_order()
1626 XA_BUG_ON(xa, xas_store(&xas, xa_mk_index(index + i))); in xa_store_many_order()
1633 XA_BUG_ON(xa, xas_error(&xas)); in xa_store_many_order()
1636 static noinline void check_create_range_1(struct xarray *xa, in check_create_range_1() argument
1641 xa_store_many_order(xa, index, order); in check_create_range_1()
1643 xa_erase_index(xa, i); in check_create_range_1()
1644 XA_BUG_ON(xa, !xa_empty(xa)); in check_create_range_1()
1647 static noinline void check_create_range_2(struct xarray *xa, unsigned order) in check_create_range_2() argument
1652 for (i = 0; i < nr * nr; i += nr) in check_create_range_2()
1653 xa_store_many_order(xa, i, order); in check_create_range_2()
1654 for (i = 0; i < nr * nr; i++) in check_create_range_2()
1655 xa_erase_index(xa, i); in check_create_range_2()
1656 XA_BUG_ON(xa, !xa_empty(xa)); in check_create_range_2()
1661 XA_STATE(xas, NULL, 0); in check_create_range_3()
1667 static noinline void check_create_range_4(struct xarray *xa, in check_create_range_4() argument
1670 XA_STATE_ORDER(xas, xa, index, order); in check_create_range_4()
1672 unsigned long i = 0; in check_create_range_4()
1674 xa_store_index(xa, index, GFP_KERNEL); in check_create_range_4()
1680 for (i = 0; i < (1UL << order); i++) { in check_create_range_4()
1683 XA_BUG_ON(xa, old != xa_mk_index(base + i)); in check_create_range_4()
1685 XA_BUG_ON(xa, old != NULL); in check_create_range_4()
1692 XA_BUG_ON(xa, xas_error(&xas)); in check_create_range_4()
1695 xa_erase_index(xa, i); in check_create_range_4()
1696 XA_BUG_ON(xa, !xa_empty(xa)); in check_create_range_4()
1699 static noinline void check_create_range_5(struct xarray *xa, in check_create_range_5() argument
1702 XA_STATE_ORDER(xas, xa, index, order); in check_create_range_5()
1705 xa_store_order(xa, index, order, xa_mk_index(index), GFP_KERNEL); in check_create_range_5()
1707 for (i = 0; i < order + 10; i++) { in check_create_range_5()
1715 xa_destroy(xa); in check_create_range_5()
1718 static noinline void check_create_range(struct xarray *xa) in check_create_range() argument
1723 for (order = 0; order < max_order; order++) { in check_create_range()
1724 check_create_range_1(xa, 0, order); in check_create_range()
1725 check_create_range_1(xa, 1U << order, order); in check_create_range()
1726 check_create_range_1(xa, 2U << order, order); in check_create_range()
1727 check_create_range_1(xa, 3U << order, order); in check_create_range()
1728 check_create_range_1(xa, 1U << 24, order); in check_create_range()
1730 check_create_range_2(xa, order); in check_create_range()
1732 check_create_range_4(xa, 0, order); in check_create_range()
1733 check_create_range_4(xa, 1U << order, order); in check_create_range()
1734 check_create_range_4(xa, 2U << order, order); in check_create_range()
1735 check_create_range_4(xa, 3U << order, order); in check_create_range()
1736 check_create_range_4(xa, 1U << 24, order); in check_create_range()
1738 check_create_range_4(xa, 1, order); in check_create_range()
1739 check_create_range_4(xa, (1U << order) + 1, order); in check_create_range()
1740 check_create_range_4(xa, (2U << order) + 1, order); in check_create_range()
1741 check_create_range_4(xa, (2U << order) - 1, order); in check_create_range()
1742 check_create_range_4(xa, (3U << order) + 1, order); in check_create_range()
1743 check_create_range_4(xa, (3U << order) - 1, order); in check_create_range()
1744 check_create_range_4(xa, (1U << 24) + 1, order); in check_create_range()
1746 check_create_range_5(xa, 0, order); in check_create_range()
1747 check_create_range_5(xa, (1U << order), order); in check_create_range()
1753 static noinline void __check_store_range(struct xarray *xa, unsigned long first, in __check_store_range() argument
1757 xa_store_range(xa, first, last, xa_mk_index(first), GFP_KERNEL); in __check_store_range()
1759 XA_BUG_ON(xa, xa_load(xa, first) != xa_mk_index(first)); in __check_store_range()
1760 XA_BUG_ON(xa, xa_load(xa, last) != xa_mk_index(first)); in __check_store_range()
1761 XA_BUG_ON(xa, xa_load(xa, first - 1) != NULL); in __check_store_range()
1762 XA_BUG_ON(xa, xa_load(xa, last + 1) != NULL); in __check_store_range()
1764 xa_store_range(xa, first, last, NULL, GFP_KERNEL); in __check_store_range()
1767 XA_BUG_ON(xa, !xa_empty(xa)); in __check_store_range()
1770 static noinline void check_store_range(struct xarray *xa) in check_store_range() argument
1774 for (i = 0; i < 128; i++) { in check_store_range()
1776 __check_store_range(xa, i, j); in check_store_range()
1777 __check_store_range(xa, 128 + i, 128 + j); in check_store_range()
1778 __check_store_range(xa, 4095 + i, 4095 + j); in check_store_range()
1779 __check_store_range(xa, 4096 + i, 4096 + j); in check_store_range()
1780 __check_store_range(xa, 123456 + i, 123456 + j); in check_store_range()
1781 __check_store_range(xa, (1 << 24) + i, (1 << 24) + j); in check_store_range()
1787 static void check_split_1(struct xarray *xa, unsigned long index, in check_split_1() argument
1790 XA_STATE_ORDER(xas, xa, index, new_order); in check_split_1()
1794 xa_store_order(xa, index, order, xa, GFP_KERNEL); in check_split_1()
1795 xa_set_mark(xa, index, XA_MARK_1); in check_split_1()
1797 xas_split_alloc(&xas, xa, order, GFP_KERNEL); in check_split_1()
1799 xas_split(&xas, xa, order); in check_split_1()
1800 for (i = 0; i < (1 << order); i += (1 << new_order)) in check_split_1()
1801 __xa_store(xa, index + i, xa_mk_index(index + i), 0); in check_split_1()
1804 for (i = 0; i < (1 << order); i++) { in check_split_1()
1806 XA_BUG_ON(xa, xa_load(xa, index + i) != xa_mk_index(val)); in check_split_1()
1809 xa_set_mark(xa, index, XA_MARK_0); in check_split_1()
1810 XA_BUG_ON(xa, !xa_get_mark(xa, index, XA_MARK_0)); in check_split_1()
1812 xas_set_order(&xas, index, 0); in check_split_1()
1813 found = 0; in check_split_1()
1817 XA_BUG_ON(xa, xa_is_internal(entry)); in check_split_1()
1820 XA_BUG_ON(xa, found != 1 << (order - new_order)); in check_split_1()
1822 xa_destroy(xa); in check_split_1()
1825 static noinline void check_split(struct xarray *xa) in check_split() argument
1829 XA_BUG_ON(xa, !xa_empty(xa)); in check_split()
1832 for (new_order = 0; new_order < order; new_order++) { in check_split()
1833 check_split_1(xa, 0, order, new_order); in check_split()
1834 check_split_1(xa, 1UL << order, order, new_order); in check_split()
1835 check_split_1(xa, 3UL << order, order, new_order); in check_split()
1840 static void check_split(struct xarray *xa) { } in check_split() argument
1843 static void check_align_1(struct xarray *xa, char *name) in check_align_1() argument
1850 for (i = 0; i < 8; i++) { in check_align_1()
1851 XA_BUG_ON(xa, xa_alloc(xa, &id, name + i, xa_limit_32b, in check_align_1()
1852 GFP_KERNEL) != 0); in check_align_1()
1853 XA_BUG_ON(xa, id != i); in check_align_1()
1855 xa_for_each(xa, index, entry) in check_align_1()
1856 XA_BUG_ON(xa, xa_is_err(entry)); in check_align_1()
1857 xa_destroy(xa); in check_align_1()
1864 static void check_align_2(struct xarray *xa, char *name) in check_align_2() argument
1868 XA_BUG_ON(xa, !xa_empty(xa)); in check_align_2()
1870 for (i = 0; i < 8; i++) { in check_align_2()
1871 XA_BUG_ON(xa, xa_store(xa, 0, name + i, GFP_KERNEL) != NULL); in check_align_2()
1872 xa_erase(xa, 0); in check_align_2()
1875 for (i = 0; i < 8; i++) { in check_align_2()
1876 XA_BUG_ON(xa, xa_reserve(xa, 0, GFP_KERNEL) != 0); in check_align_2()
1877 XA_BUG_ON(xa, xa_store(xa, 0, name + i, 0) != NULL); in check_align_2()
1878 xa_erase(xa, 0); in check_align_2()
1881 XA_BUG_ON(xa, !xa_empty(xa)); in check_align_2()
1884 static noinline void check_align(struct xarray *xa) in check_align() argument
1888 check_align_1(xa, name); in check_align()
1889 check_align_1(xa, name + 1); in check_align()
1890 check_align_1(xa, name + 2); in check_align()
1891 check_align_1(xa, name + 3); in check_align()
1892 check_align_2(xa, name); in check_align()
1908 static noinline void shadow_remove(struct xarray *xa) in shadow_remove() argument
1912 xa_lock(xa); in shadow_remove()
1915 XA_BUG_ON(xa, node->array != xa); in shadow_remove()
1919 xa_unlock(xa); in shadow_remove()
1922 static noinline void check_workingset(struct xarray *xa, unsigned long index) in check_workingset() argument
1924 XA_STATE(xas, xa, index); in check_workingset()
1929 xas_store(&xas, xa_mk_value(0)); in check_workingset()
1935 XA_BUG_ON(xa, list_empty(&shadow_nodes)); in check_workingset()
1940 XA_BUG_ON(xa, !list_empty(&shadow_nodes)); in check_workingset()
1944 XA_BUG_ON(xa, list_empty(&shadow_nodes)); in check_workingset()
1946 shadow_remove(xa); in check_workingset()
1947 XA_BUG_ON(xa, !list_empty(&shadow_nodes)); in check_workingset()
1948 XA_BUG_ON(xa, !xa_empty(xa)); in check_workingset()
1955 static noinline void check_account(struct xarray *xa) in check_account() argument
1961 XA_STATE(xas, xa, 1 << order); in check_account()
1963 xa_store_order(xa, 0, order, xa, GFP_KERNEL); in check_account()
1966 XA_BUG_ON(xa, xas.xa_node->count == 0); in check_account()
1967 XA_BUG_ON(xa, xas.xa_node->count > (1 << order)); in check_account()
1968 XA_BUG_ON(xa, xas.xa_node->nr_values != 0); in check_account()
1971 xa_store_order(xa, 1 << order, order, xa_mk_index(1UL << order), in check_account()
1973 XA_BUG_ON(xa, xas.xa_node->count != xas.xa_node->nr_values * 2); in check_account()
1975 xa_erase(xa, 1 << order); in check_account()
1976 XA_BUG_ON(xa, xas.xa_node->nr_values != 0); in check_account()
1978 xa_erase(xa, 0); in check_account()
1979 XA_BUG_ON(xa, !xa_empty(xa)); in check_account()
1984 static noinline void check_get_order(struct xarray *xa) in check_get_order() argument
1990 for (i = 0; i < 3; i++) in check_get_order()
1991 XA_BUG_ON(xa, xa_get_order(xa, i) != 0); in check_get_order()
1993 for (order = 0; order < max_order; order++) { in check_get_order()
1994 for (i = 0; i < 10; i++) { in check_get_order()
1995 xa_store_order(xa, i << order, order, in check_get_order()
1998 XA_BUG_ON(xa, xa_get_order(xa, j) != order); in check_get_order()
1999 xa_erase(xa, i << order); in check_get_order()
2004 static noinline void check_xas_get_order(struct xarray *xa) in check_xas_get_order() argument
2006 XA_STATE(xas, xa, 0); in check_xas_get_order()
2012 for (order = 0; order < max_order; order++) { in check_xas_get_order()
2013 for (i = 0; i < 10; i++) { in check_xas_get_order()
2022 xas_set_order(&xas, j, 0); in check_xas_get_order()
2025 XA_BUG_ON(xa, xas_get_order(&xas) != order); in check_xas_get_order()
2037 static noinline void check_xas_conflict_get_order(struct xarray *xa) in check_xas_conflict_get_order() argument
2039 XA_STATE(xas, xa, 0); in check_xas_conflict_get_order()
2047 for (order = 0; order < max_order; order++) { in check_xas_conflict_get_order()
2048 for (i = 0; i < 10; i++) { in check_xas_conflict_get_order()
2060 for (k = 0; k < order; k++) { in check_xas_conflict_get_order()
2061 only_once = 0; in check_xas_conflict_get_order()
2065 XA_BUG_ON(xa, entry != xa_mk_value(i)); in check_xas_conflict_get_order()
2066 XA_BUG_ON(xa, xas_get_order(&xas) != order); in check_xas_conflict_get_order()
2069 XA_BUG_ON(xa, only_once != 1); in check_xas_conflict_get_order()
2074 only_once = 0; in check_xas_conflict_get_order()
2078 XA_BUG_ON(xa, entry != xa_mk_value(i)); in check_xas_conflict_get_order()
2079 XA_BUG_ON(xa, xas_get_order(&xas) != order); in check_xas_conflict_get_order()
2082 XA_BUG_ON(xa, only_once != 1); in check_xas_conflict_get_order()
2095 static noinline void check_destroy(struct xarray *xa) in check_destroy() argument
2099 XA_BUG_ON(xa, !xa_empty(xa)); in check_destroy()
2102 xa_destroy(xa); in check_destroy()
2103 XA_BUG_ON(xa, !xa_empty(xa)); in check_destroy()
2106 for (index = 0; index < 1000; index++) { in check_destroy()
2107 xa_store_index(xa, index, GFP_KERNEL); in check_destroy()
2108 XA_BUG_ON(xa, xa_empty(xa)); in check_destroy()
2109 xa_destroy(xa); in check_destroy()
2110 XA_BUG_ON(xa, !xa_empty(xa)); in check_destroy()
2114 xa_store(xa, ULONG_MAX, xa, GFP_KERNEL); in check_destroy()
2115 XA_BUG_ON(xa, xa_empty(xa)); in check_destroy()
2116 xa_destroy(xa); in check_destroy()
2117 XA_BUG_ON(xa, !xa_empty(xa)); in check_destroy()
2121 xa_store_order(xa, 1 << 11, 11, xa, GFP_KERNEL); in check_destroy()
2122 XA_BUG_ON(xa, xa_empty(xa)); in check_destroy()
2123 xa_destroy(xa); in check_destroy()
2124 XA_BUG_ON(xa, !xa_empty(xa)); in check_destroy()
2161 check_workingset(&array, 0); in xarray_checks()
2166 return (tests_run == tests_passed) ? 0 : -EINVAL; in xarray_checks()