Lines Matching full:mm
11 #include "../../../mm/vma.h"
23 #include "../../../mm/vma.c"
49 static struct vm_area_struct *alloc_vma(struct mm_struct *mm, in alloc_vma() argument
55 struct vm_area_struct *ret = vm_area_alloc(mm); in alloc_vma()
69 static struct vm_area_struct *alloc_and_link_vma(struct mm_struct *mm, in alloc_and_link_vma() argument
75 struct vm_area_struct *vma = alloc_vma(mm, start, end, pgoff, flags); in alloc_and_link_vma()
80 if (vma_link(mm, vma)) { in alloc_and_link_vma()
152 static struct vm_area_struct *try_merge_new_vma(struct mm_struct *mm, in try_merge_new_vma() argument
173 return alloc_and_link_vma(mm, start, end, pgoff, flags); in try_merge_new_vma()
190 static int cleanup_mm(struct mm_struct *mm, struct vma_iterator *vmi) in cleanup_mm() argument
204 mtree_destroy(&mm->mm_mt); in cleanup_mm()
205 mm->map_count = 0; in cleanup_mm()
230 struct mm_struct mm = {}; in test_simple_merge() local
231 struct vm_area_struct *vma_left = alloc_vma(&mm, 0, 0x1000, 0, flags); in test_simple_merge()
232 struct vm_area_struct *vma_right = alloc_vma(&mm, 0x2000, 0x3000, 2, flags); in test_simple_merge()
233 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_merge()
235 .mm = &mm, in test_simple_merge()
243 ASSERT_FALSE(vma_link(&mm, vma_left)); in test_simple_merge()
244 ASSERT_FALSE(vma_link(&mm, vma_right)); in test_simple_merge()
255 mtree_destroy(&mm.mm_mt); in test_simple_merge()
264 struct mm_struct mm = {}; in test_simple_modify() local
265 struct vm_area_struct *init_vma = alloc_vma(&mm, 0, 0x3000, 0, flags); in test_simple_modify()
266 VMA_ITERATOR(vmi, &mm, 0x1000); in test_simple_modify()
268 ASSERT_FALSE(vma_link(&mm, init_vma)); in test_simple_modify()
315 mtree_destroy(&mm.mm_mt); in test_simple_modify()
323 struct mm_struct mm = {}; in test_simple_expand() local
324 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x1000, 0, flags); in test_simple_expand()
325 VMA_ITERATOR(vmi, &mm, 0); in test_simple_expand()
334 ASSERT_FALSE(vma_link(&mm, vma)); in test_simple_expand()
343 mtree_destroy(&mm.mm_mt); in test_simple_expand()
351 struct mm_struct mm = {}; in test_simple_shrink() local
352 struct vm_area_struct *vma = alloc_vma(&mm, 0, 0x3000, 0, flags); in test_simple_shrink()
353 VMA_ITERATOR(vmi, &mm, 0); in test_simple_shrink()
355 ASSERT_FALSE(vma_link(&mm, vma)); in test_simple_shrink()
364 mtree_destroy(&mm.mm_mt); in test_simple_shrink()
372 struct mm_struct mm = {}; in test_merge_new() local
373 VMA_ITERATOR(vmi, &mm, 0); in test_merge_new()
375 .mm = &mm, in test_merge_new()
401 vma_a = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_merge_new()
407 vma_b = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags); in test_merge_new()
412 vma_c = alloc_and_link_vma(&mm, 0xb000, 0xc000, 0xb, flags); in test_merge_new()
423 vma_d = try_merge_new_vma(&mm, &vmg, 0x7000, 0x9000, 7, flags, &merged); in test_merge_new()
428 ASSERT_EQ(mm.map_count, 4); in test_merge_new()
438 vma = try_merge_new_vma(&mm, &vmg, 0x2000, 0x3000, 2, flags, &merged); in test_merge_new()
447 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
455 vma = try_merge_new_vma(&mm, &vmg, 0x4000, 0x5000, 4, flags, &merged); in test_merge_new()
464 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
474 vma = try_merge_new_vma(&mm, &vmg, 0x6000, 0x7000, 6, flags, &merged); in test_merge_new()
483 ASSERT_EQ(mm.map_count, 3); in test_merge_new()
492 vma = try_merge_new_vma(&mm, &vmg, 0x5000, 0x6000, 5, flags, &merged); in test_merge_new()
501 ASSERT_EQ(mm.map_count, 2); in test_merge_new()
510 vma = try_merge_new_vma(&mm, &vmg, 0xa000, 0xb000, 0xa, flags, &merged); in test_merge_new()
519 ASSERT_EQ(mm.map_count, 2); in test_merge_new()
527 vma = try_merge_new_vma(&mm, &vmg, 0x9000, 0xa000, 0x9, flags, &merged); in test_merge_new()
536 ASSERT_EQ(mm.map_count, 1); in test_merge_new()
561 mtree_destroy(&mm.mm_mt); in test_merge_new()
568 struct mm_struct mm = {}; in test_vma_merge_special_flags() local
569 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_special_flags()
571 .mm = &mm, in test_vma_merge_special_flags()
589 vma_left = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_special_flags()
619 vma = alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags); in test_vma_merge_special_flags()
633 cleanup_mm(&mm, &vmi); in test_vma_merge_special_flags()
640 struct mm_struct mm = {}; in test_vma_merge_with_close() local
641 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_with_close()
643 .mm = &mm, in test_vma_merge_with_close()
719 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
720 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
730 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
744 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
745 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
759 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
772 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
773 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
786 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
800 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
801 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
802 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
812 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3); in test_vma_merge_with_close()
826 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vma_merge_with_close()
827 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vma_merge_with_close()
828 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x9000, 5, flags); in test_vma_merge_with_close()
841 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_vma_merge_with_close()
849 struct mm_struct mm = {}; in test_vma_merge_new_with_close() local
850 VMA_ITERATOR(vmi, &mm, 0); in test_vma_merge_new_with_close()
852 .mm = &mm, in test_vma_merge_new_with_close()
855 struct vm_area_struct *vma_prev = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_vma_merge_new_with_close()
856 struct vm_area_struct *vma_next = alloc_and_link_vma(&mm, 0x5000, 0x7000, 5, flags); in test_vma_merge_new_with_close()
895 ASSERT_EQ(mm.map_count, 2); in test_vma_merge_new_with_close()
897 cleanup_mm(&mm, &vmi); in test_vma_merge_new_with_close()
904 struct mm_struct mm = {}; in test_merge_existing() local
905 VMA_ITERATOR(vmi, &mm, 0); in test_merge_existing()
908 .mm = &mm, in test_merge_existing()
925 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags); in test_merge_existing()
927 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, flags); in test_merge_existing()
944 ASSERT_EQ(mm.map_count, 2); in test_merge_existing()
947 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_merge_existing()
959 vma = alloc_and_link_vma(&mm, 0x2000, 0x6000, 2, flags); in test_merge_existing()
960 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x9000, 6, flags); in test_merge_existing()
972 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
975 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
987 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
989 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1007 ASSERT_EQ(mm.map_count, 2); in test_merge_existing()
1010 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_merge_existing()
1022 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1024 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1036 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
1039 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
1051 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1053 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_merge_existing()
1054 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags); in test_merge_existing()
1066 ASSERT_EQ(mm.map_count, 1); in test_merge_existing()
1069 ASSERT_EQ(cleanup_mm(&mm, &vmi), 1); in test_merge_existing()
1086 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_merge_existing()
1087 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags); in test_merge_existing()
1088 vma_next = alloc_and_link_vma(&mm, 0x8000, 0xa000, 8, flags); in test_merge_existing()
1126 ASSERT_EQ(cleanup_mm(&mm, &vmi), 3); in test_merge_existing()
1134 struct mm_struct mm = {}; in test_anon_vma_non_mergeable() local
1135 VMA_ITERATOR(vmi, &mm, 0); in test_anon_vma_non_mergeable()
1138 .mm = &mm, in test_anon_vma_non_mergeable()
1160 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_anon_vma_non_mergeable()
1161 vma = alloc_and_link_vma(&mm, 0x3000, 0x7000, 3, flags); in test_anon_vma_non_mergeable()
1162 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags); in test_anon_vma_non_mergeable()
1198 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_anon_vma_non_mergeable()
1211 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_anon_vma_non_mergeable()
1212 vma_next = alloc_and_link_vma(&mm, 0x7000, 0x9000, 7, flags); in test_anon_vma_non_mergeable()
1234 ASSERT_EQ(cleanup_mm(&mm, &vmi), 2); in test_anon_vma_non_mergeable()
1242 struct mm_struct mm = {}; in test_dup_anon_vma() local
1243 VMA_ITERATOR(vmi, &mm, 0); in test_dup_anon_vma()
1245 .mm = &mm, in test_dup_anon_vma()
1262 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1263 vma_next = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1277 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1288 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1289 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1290 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); in test_dup_anon_vma()
1310 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1321 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1322 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_dup_anon_vma()
1323 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); in test_dup_anon_vma()
1339 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1350 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_dup_anon_vma()
1351 vma = alloc_and_link_vma(&mm, 0x3000, 0x8000, 3, flags); in test_dup_anon_vma()
1367 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1378 vma = alloc_and_link_vma(&mm, 0, 0x5000, 0, flags); in test_dup_anon_vma()
1379 vma_next = alloc_and_link_vma(&mm, 0x5000, 0x8000, 5, flags); in test_dup_anon_vma()
1395 cleanup_mm(&mm, &vmi); in test_dup_anon_vma()
1402 struct mm_struct mm = {}; in test_vmi_prealloc_fail() local
1403 VMA_ITERATOR(vmi, &mm, 0); in test_vmi_prealloc_fail()
1405 .mm = &mm, in test_vmi_prealloc_fail()
1416 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vmi_prealloc_fail()
1417 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vmi_prealloc_fail()
1435 cleanup_mm(&mm, &vmi); /* Resets fail_prealloc too. */ in test_vmi_prealloc_fail()
1443 vma_prev = alloc_and_link_vma(&mm, 0, 0x3000, 0, flags); in test_vmi_prealloc_fail()
1444 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_vmi_prealloc_fail()
1459 cleanup_mm(&mm, &vmi); in test_vmi_prealloc_fail()
1466 struct mm_struct mm = {}; in test_merge_extend() local
1467 VMA_ITERATOR(vmi, &mm, 0x1000); in test_merge_extend()
1470 vma = alloc_and_link_vma(&mm, 0, 0x1000, 0, flags); in test_merge_extend()
1471 alloc_and_link_vma(&mm, 0x3000, 0x4000, 3, flags); in test_merge_extend()
1487 ASSERT_EQ(mm.map_count, 1); in test_merge_extend()
1489 cleanup_mm(&mm, &vmi); in test_merge_extend()
1496 struct mm_struct mm = {}; in test_copy_vma() local
1498 VMA_ITERATOR(vmi, &mm, 0); in test_copy_vma()
1503 vma = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_copy_vma()
1511 cleanup_mm(&mm, &vmi); in test_copy_vma()
1515 vma = alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_copy_vma()
1516 vma_next = alloc_and_link_vma(&mm, 0x6000, 0x8000, 6, flags); in test_copy_vma()
1521 cleanup_mm(&mm, &vmi); in test_copy_vma()
1528 struct mm_struct mm = {}; in test_expand_only_mode() local
1529 VMA_ITERATOR(vmi, &mm, 0); in test_expand_only_mode()
1531 VMG_STATE(vmg, &mm, &vmi, 0x5000, 0x9000, flags, 5); in test_expand_only_mode()
1539 alloc_and_link_vma(&mm, 0, 0x2000, 0, flags); in test_expand_only_mode()
1546 vma_prev = alloc_and_link_vma(&mm, 0x3000, 0x5000, 3, flags); in test_expand_only_mode()
1560 cleanup_mm(&mm, &vmi); in test_expand_only_mode()