Lines Matching refs:dev_dax
71 struct dev_dax *dev_dax = to_dev_dax(dev); in dax_match_type() local
73 if (dev_dax->region->res.flags & IORESOURCE_DAX_KMEM) in dax_match_type()
181 bool static_dev_dax(struct dev_dax *dev_dax) in static_dev_dax() argument
183 return is_static(dev_dax->region); in static_dev_dax()
187 static u64 dev_dax_size(struct dev_dax *dev_dax) in dev_dax_size() argument
194 for (i = 0; i < dev_dax->nr_range; i++) in dev_dax_size()
195 size += range_len(&dev_dax->ranges[i].range); in dev_dax_size()
203 struct dev_dax *dev_dax = to_dev_dax(dev); in dax_bus_probe() local
204 struct dax_region *dax_region = dev_dax->region; in dax_bus_probe()
211 size = dev_dax_size(dev_dax); in dax_bus_probe()
214 if (size == 0 || dev_dax->id < 0) in dax_bus_probe()
217 rc = dax_drv->probe(dev_dax); in dax_bus_probe()
235 struct dev_dax *dev_dax = to_dev_dax(dev); in dax_bus_remove() local
238 dax_drv->remove(dev_dax); in dax_bus_remove()
367 static struct dev_dax *__devm_create_dev_dax(struct dev_dax_data *data);
399 struct dev_dax *dev_dax = __devm_create_dev_dax(&data); in create_store() local
401 if (IS_ERR(dev_dax)) in create_store()
402 rc = PTR_ERR(dev_dax); in create_store()
412 dax_region->seed = &dev_dax->dev; in create_store()
413 dax_region->youngest = &dev_dax->dev; in create_store()
423 void kill_dev_dax(struct dev_dax *dev_dax) in kill_dev_dax() argument
425 struct dax_device *dax_dev = dev_dax->dax_dev; in kill_dev_dax()
436 if (!static_dev_dax(dev_dax)) in kill_dev_dax()
437 dev_dax->pgmap = NULL; in kill_dev_dax()
441 static void trim_dev_dax_range(struct dev_dax *dev_dax) in trim_dev_dax_range() argument
443 int i = dev_dax->nr_range - 1; in trim_dev_dax_range()
444 struct range *range = &dev_dax->ranges[i].range; in trim_dev_dax_range()
445 struct dax_region *dax_region = dev_dax->region; in trim_dev_dax_range()
448 dev_dbg(&dev_dax->dev, "delete range[%d]: %#llx:%#llx\n", i, in trim_dev_dax_range()
453 if (--dev_dax->nr_range == 0) { in trim_dev_dax_range()
454 kfree(dev_dax->ranges); in trim_dev_dax_range()
455 dev_dax->ranges = NULL; in trim_dev_dax_range()
459 static void free_dev_dax_ranges(struct dev_dax *dev_dax) in free_dev_dax_ranges() argument
461 while (dev_dax->nr_range) in free_dev_dax_ranges()
462 trim_dev_dax_range(dev_dax); in free_dev_dax_ranges()
467 struct dev_dax *dev_dax = to_dev_dax(dev); in unregister_dev_dax() local
472 kill_dev_dax(dev_dax); in unregister_dev_dax()
474 free_dev_dax_ranges(dev_dax); in unregister_dev_dax()
493 static int __free_dev_dax_id(struct dev_dax *dev_dax) in __free_dev_dax_id() argument
496 int rc = dev_dax->id; in __free_dev_dax_id()
500 if (!dev_dax->dyn_id || dev_dax->id < 0) in __free_dev_dax_id()
502 dax_region = dev_dax->region; in __free_dev_dax_id()
503 ida_free(&dax_region->ida, dev_dax->id); in __free_dev_dax_id()
505 dev_dax->id = -1; in __free_dev_dax_id()
509 static int free_dev_dax_id(struct dev_dax *dev_dax) in free_dev_dax_id() argument
516 rc = __free_dev_dax_id(dev_dax); in free_dev_dax_id()
521 static int alloc_dev_dax_id(struct dev_dax *dev_dax) in alloc_dev_dax_id() argument
523 struct dax_region *dax_region = dev_dax->region; in alloc_dev_dax_id()
530 dev_dax->dyn_id = true; in alloc_dev_dax_id()
531 dev_dax->id = id; in alloc_dev_dax_id()
539 struct dev_dax *dev_dax; in delete_store() local
553 dev_dax = to_dev_dax(victim); in delete_store()
555 if (victim->driver || dev_dax_size(dev_dax)) in delete_store()
564 if (dev_dax->id > 0) { in delete_store()
565 do_del = __free_dev_dax_id(dev_dax) >= 0; in delete_store()
685 struct dev_dax *dev_dax = to_dev_dax(parent); in dax_mapping_release() local
687 ida_free(&dev_dax->ida, mapping->id); in dax_mapping_release()
696 struct dev_dax *dev_dax = to_dev_dax(dev->parent); in unregister_dax_mapping() local
700 dev_dax->ranges[mapping->range_id].mapping = NULL; in unregister_dax_mapping()
709 struct dev_dax *dev_dax = to_dev_dax(dev->parent); in get_dax_range() local
720 return &dev_dax->ranges[mapping->range_id]; in get_dax_range()
797 static int devm_register_dax_mapping(struct dev_dax *dev_dax, int range_id) in devm_register_dax_mapping() argument
799 struct dax_region *dax_region = dev_dax->region; in devm_register_dax_mapping()
806 if (dev_WARN_ONCE(&dev_dax->dev, !dax_region->dev->driver, in devm_register_dax_mapping()
814 mapping->id = ida_alloc(&dev_dax->ida, GFP_KERNEL); in devm_register_dax_mapping()
819 dev_dax->ranges[range_id].mapping = mapping; in devm_register_dax_mapping()
822 dev->parent = &dev_dax->dev; in devm_register_dax_mapping()
839 static int alloc_dev_dax_range(struct dev_dax *dev_dax, u64 start, in alloc_dev_dax_range() argument
842 struct dax_region *dax_region = dev_dax->region; in alloc_dev_dax_range()
844 struct device *dev = &dev_dax->dev; in alloc_dev_dax_range()
854 if (dev_WARN_ONCE(dev, dev_dax->nr_range, in alloc_dev_dax_range()
865 ranges = krealloc(dev_dax->ranges, sizeof(*ranges) in alloc_dev_dax_range()
866 * (dev_dax->nr_range + 1), GFP_KERNEL); in alloc_dev_dax_range()
872 for (i = 0; i < dev_dax->nr_range; i++) in alloc_dev_dax_range()
874 dev_dax->ranges = ranges; in alloc_dev_dax_range()
875 ranges[dev_dax->nr_range++] = (struct dev_dax_range) { in alloc_dev_dax_range()
883 dev_dbg(dev, "alloc range[%d]: %pa:%pa\n", dev_dax->nr_range - 1, in alloc_dev_dax_range()
890 if (!device_is_registered(&dev_dax->dev)) in alloc_dev_dax_range()
893 rc = devm_register_dax_mapping(dev_dax, dev_dax->nr_range - 1); in alloc_dev_dax_range()
895 trim_dev_dax_range(dev_dax); in alloc_dev_dax_range()
900 static int adjust_dev_dax_range(struct dev_dax *dev_dax, struct resource *res, resource_size_t size) in adjust_dev_dax_range() argument
902 int last_range = dev_dax->nr_range - 1; in adjust_dev_dax_range()
903 struct dev_dax_range *dax_range = &dev_dax->ranges[last_range]; in adjust_dev_dax_range()
906 struct device *dev = &dev_dax->dev; in adjust_dev_dax_range()
933 struct dev_dax *dev_dax = to_dev_dax(dev); in size_show() local
940 size = dev_dax_size(dev_dax); in size_show()
946 static bool alloc_is_aligned(struct dev_dax *dev_dax, resource_size_t size) in alloc_is_aligned() argument
952 return IS_ALIGNED(size, max_t(unsigned long, dev_dax->align, memremap_compat_align())); in alloc_is_aligned()
955 static int dev_dax_shrink(struct dev_dax *dev_dax, resource_size_t size) in dev_dax_shrink() argument
957 resource_size_t to_shrink = dev_dax_size(dev_dax) - size; in dev_dax_shrink()
958 struct dax_region *dax_region = dev_dax->region; in dev_dax_shrink()
959 struct device *dev = &dev_dax->dev; in dev_dax_shrink()
962 for (i = dev_dax->nr_range - 1; i >= 0; i--) { in dev_dax_shrink()
963 struct range *range = &dev_dax->ranges[i].range; in dev_dax_shrink()
964 struct dax_mapping *mapping = dev_dax->ranges[i].mapping; in dev_dax_shrink()
972 trim_dev_dax_range(dev_dax); in dev_dax_shrink()
986 if (dev_WARN_ONCE(dev, !adjust || i != dev_dax->nr_range - 1, in dev_dax_shrink()
989 return adjust_dev_dax_range(dev_dax, adjust, range_len(range) in dev_dax_shrink()
999 static bool adjust_ok(struct dev_dax *dev_dax, struct resource *res) in adjust_ok() argument
1004 if (dev_dax->nr_range == 0) in adjust_ok()
1006 if (strcmp(res->name, dev_name(&dev_dax->dev)) != 0) in adjust_ok()
1008 last = &dev_dax->ranges[dev_dax->nr_range - 1]; in adjust_ok()
1011 for (i = 0; i < dev_dax->nr_range - 1; i++) { in adjust_ok()
1012 struct dev_dax_range *dax_range = &dev_dax->ranges[i]; in adjust_ok()
1022 struct dev_dax *dev_dax, resource_size_t size) in dev_dax_resize() argument
1025 resource_size_t dev_size = dev_dax_size(dev_dax); in dev_dax_resize()
1027 struct device *dev = &dev_dax->dev; in dev_dax_resize()
1039 return dev_dax_shrink(dev_dax, size); in dev_dax_resize()
1042 if (dev_WARN_ONCE(dev, !alloc_is_aligned(dev_dax, to_alloc), in dev_dax_resize()
1054 return alloc_dev_dax_range(dev_dax, dax_region->res.start, to_alloc); in dev_dax_resize()
1063 rc = alloc_dev_dax_range(dev_dax, dax_region->res.start, alloc); in dev_dax_resize()
1079 if (adjust_ok(dev_dax, res)) { in dev_dax_resize()
1080 rc = adjust_dev_dax_range(dev_dax, res, resource_size(res) + alloc); in dev_dax_resize()
1083 rc = alloc_dev_dax_range(dev_dax, res->end + 1, alloc); in dev_dax_resize()
1099 struct dev_dax *dev_dax = to_dev_dax(dev); in size_store() local
1100 struct dax_region *dax_region = dev_dax->region; in size_store()
1106 if (!alloc_is_aligned(dev_dax, val)) { in size_store()
1122 rc = dev_dax_resize(dax_region, dev_dax, val); in size_store()
1168 struct dev_dax *dev_dax = to_dev_dax(dev); in mapping_store() local
1169 struct dax_region *dax_region = dev_dax->region; in mapping_store()
1192 if (alloc_is_aligned(dev_dax, to_alloc)) in mapping_store()
1193 rc = alloc_dev_dax_range(dev_dax, r.start, to_alloc); in mapping_store()
1204 struct dev_dax *dev_dax = to_dev_dax(dev); in align_show() local
1206 return sysfs_emit(buf, "%d\n", dev_dax->align); in align_show()
1209 static ssize_t dev_dax_validate_align(struct dev_dax *dev_dax) in dev_dax_validate_align() argument
1211 struct device *dev = &dev_dax->dev; in dev_dax_validate_align()
1214 for (i = 0; i < dev_dax->nr_range; i++) { in dev_dax_validate_align()
1215 size_t len = range_len(&dev_dax->ranges[i].range); in dev_dax_validate_align()
1217 if (!alloc_is_aligned(dev_dax, len)) { in dev_dax_validate_align()
1219 __func__, dev_dax->align, i); in dev_dax_validate_align()
1230 struct dev_dax *dev_dax = to_dev_dax(dev); in align_store() local
1231 struct dax_region *dax_region = dev_dax->region; in align_store()
1260 align_save = dev_dax->align; in align_store()
1261 dev_dax->align = val; in align_store()
1262 rc = dev_dax_validate_align(dev_dax); in align_store()
1264 dev_dax->align = align_save; in align_store()
1272 static int dev_dax_target_node(struct dev_dax *dev_dax) in dev_dax_target_node() argument
1274 struct dax_region *dax_region = dev_dax->region; in dev_dax_target_node()
1282 struct dev_dax *dev_dax = to_dev_dax(dev); in target_node_show() local
1284 return sysfs_emit(buf, "%d\n", dev_dax_target_node(dev_dax)); in target_node_show()
1291 struct dev_dax *dev_dax = to_dev_dax(dev); in resource_show() local
1292 struct dax_region *dax_region = dev_dax->region; in resource_show()
1295 if (dev_dax->nr_range < 1) in resource_show()
1298 start = dev_dax->ranges[0].range.start; in resource_show()
1325 struct dev_dax *dev_dax = to_dev_dax(dev); in memmap_on_memory_show() local
1327 return sysfs_emit(buf, "%d\n", dev_dax->memmap_on_memory); in memmap_on_memory_show()
1334 struct dev_dax *dev_dax = to_dev_dax(dev); in memmap_on_memory_store() local
1351 if (dev_dax->memmap_on_memory != val && dev->driver && in memmap_on_memory_store()
1357 dev_dax->memmap_on_memory = val; in memmap_on_memory_store()
1367 struct dev_dax *dev_dax = to_dev_dax(dev); in dev_dax_visible() local
1368 struct dax_region *dax_region = dev_dax->region; in dev_dax_visible()
1370 if (a == &dev_attr_target_node.attr && dev_dax_target_node(dev_dax) < 0) in dev_dax_visible()
1406 struct dev_dax *dev_dax = to_dev_dax(dev); in dev_dax_release() local
1407 struct dax_device *dax_dev = dev_dax->dax_dev; in dev_dax_release()
1410 free_dev_dax_id(dev_dax); in dev_dax_release()
1411 kfree(dev_dax->pgmap); in dev_dax_release()
1412 kfree(dev_dax); in dev_dax_release()
1420 static struct dev_dax *__devm_create_dev_dax(struct dev_dax_data *data) in __devm_create_dev_dax()
1425 struct dev_dax *dev_dax; in __devm_create_dev_dax() local
1430 dev_dax = kzalloc(sizeof(*dev_dax), GFP_KERNEL); in __devm_create_dev_dax()
1431 if (!dev_dax) in __devm_create_dev_dax()
1434 dev_dax->region = dax_region; in __devm_create_dev_dax()
1442 dev_dax->id = data->id; in __devm_create_dev_dax()
1450 rc = alloc_dev_dax_id(dev_dax); in __devm_create_dev_dax()
1455 dev = &dev_dax->dev; in __devm_create_dev_dax()
1457 dev_set_name(dev, "dax%d.%d", dax_region->id, dev_dax->id); in __devm_create_dev_dax()
1459 rc = alloc_dev_dax_range(dev_dax, dax_region->res.start, data->size); in __devm_create_dev_dax()
1467 dev_dax->pgmap = kmemdup(data->pgmap, in __devm_create_dev_dax()
1469 if (!dev_dax->pgmap) { in __devm_create_dev_dax()
1479 dax_dev = alloc_dax(dev_dax, NULL); in __devm_create_dev_dax()
1491 dev_dax->dax_dev = dax_dev; in __devm_create_dev_dax()
1492 dev_dax->target_node = dax_region->target_node; in __devm_create_dev_dax()
1493 dev_dax->align = dax_region->align; in __devm_create_dev_dax()
1494 ida_init(&dev_dax->ida); in __devm_create_dev_dax()
1496 dev_dax->memmap_on_memory = data->memmap_on_memory; in __devm_create_dev_dax()
1506 kill_dev_dax(dev_dax); in __devm_create_dev_dax()
1516 if (dev_dax->nr_range && range_len(&dev_dax->ranges[0].range)) { in __devm_create_dev_dax()
1517 rc = devm_register_dax_mapping(dev_dax, 0); in __devm_create_dev_dax()
1522 return dev_dax; in __devm_create_dev_dax()
1525 kfree(dev_dax->pgmap); in __devm_create_dev_dax()
1527 free_dev_dax_ranges(dev_dax); in __devm_create_dev_dax()
1529 free_dev_dax_id(dev_dax); in __devm_create_dev_dax()
1531 kfree(dev_dax); in __devm_create_dev_dax()
1536 struct dev_dax *devm_create_dev_dax(struct dev_dax_data *data) in devm_create_dev_dax()
1538 struct dev_dax *dev_dax; in devm_create_dev_dax() local
1541 dev_dax = __devm_create_dev_dax(data); in devm_create_dev_dax()
1544 return dev_dax; in devm_create_dev_dax()