Lines Matching refs:spinand
23 static int spinand_read_reg_op(struct spinand_device *spinand, u8 reg, u8 *val) in spinand_read_reg_op() argument
26 spinand->scratchbuf); in spinand_read_reg_op()
29 ret = spi_mem_exec_op(spinand->spimem, &op); in spinand_read_reg_op()
33 *val = *spinand->scratchbuf; in spinand_read_reg_op()
37 int spinand_write_reg_op(struct spinand_device *spinand, u8 reg, u8 val) in spinand_write_reg_op() argument
40 spinand->scratchbuf); in spinand_write_reg_op()
42 *spinand->scratchbuf = val; in spinand_write_reg_op()
43 return spi_mem_exec_op(spinand->spimem, &op); in spinand_write_reg_op()
46 static int spinand_read_status(struct spinand_device *spinand, u8 *status) in spinand_read_status() argument
48 return spinand_read_reg_op(spinand, REG_STATUS, status); in spinand_read_status()
51 static int spinand_get_cfg(struct spinand_device *spinand, u8 *cfg) in spinand_get_cfg() argument
53 struct nand_device *nand = spinand_to_nand(spinand); in spinand_get_cfg()
55 if (WARN_ON(spinand->cur_target < 0 || in spinand_get_cfg()
56 spinand->cur_target >= nand->memorg.ntargets)) in spinand_get_cfg()
59 *cfg = spinand->cfg_cache[spinand->cur_target]; in spinand_get_cfg()
63 static int spinand_set_cfg(struct spinand_device *spinand, u8 cfg) in spinand_set_cfg() argument
65 struct nand_device *nand = spinand_to_nand(spinand); in spinand_set_cfg()
68 if (WARN_ON(spinand->cur_target < 0 || in spinand_set_cfg()
69 spinand->cur_target >= nand->memorg.ntargets)) in spinand_set_cfg()
72 if (spinand->cfg_cache[spinand->cur_target] == cfg) in spinand_set_cfg()
75 ret = spinand_write_reg_op(spinand, REG_CFG, cfg); in spinand_set_cfg()
79 spinand->cfg_cache[spinand->cur_target] = cfg; in spinand_set_cfg()
93 int spinand_upd_cfg(struct spinand_device *spinand, u8 mask, u8 val) in spinand_upd_cfg() argument
98 ret = spinand_get_cfg(spinand, &cfg); in spinand_upd_cfg()
105 return spinand_set_cfg(spinand, cfg); in spinand_upd_cfg()
117 int spinand_select_target(struct spinand_device *spinand, unsigned int target) in spinand_select_target() argument
119 struct nand_device *nand = spinand_to_nand(spinand); in spinand_select_target()
125 if (spinand->cur_target == target) in spinand_select_target()
129 spinand->cur_target = target; in spinand_select_target()
133 ret = spinand->select_target(spinand, target); in spinand_select_target()
137 spinand->cur_target = target; in spinand_select_target()
141 static int spinand_read_cfg(struct spinand_device *spinand) in spinand_read_cfg() argument
143 struct nand_device *nand = spinand_to_nand(spinand); in spinand_read_cfg()
148 ret = spinand_select_target(spinand, target); in spinand_read_cfg()
156 ret = spinand_read_reg_op(spinand, REG_CFG, in spinand_read_cfg()
157 &spinand->cfg_cache[target]); in spinand_read_cfg()
165 static int spinand_init_cfg_cache(struct spinand_device *spinand) in spinand_init_cfg_cache() argument
167 struct nand_device *nand = spinand_to_nand(spinand); in spinand_init_cfg_cache()
168 struct device *dev = &spinand->spimem->spi->dev; in spinand_init_cfg_cache()
170 spinand->cfg_cache = devm_kcalloc(dev, in spinand_init_cfg_cache()
172 sizeof(*spinand->cfg_cache), in spinand_init_cfg_cache()
174 if (!spinand->cfg_cache) in spinand_init_cfg_cache()
180 static int spinand_init_quad_enable(struct spinand_device *spinand) in spinand_init_quad_enable() argument
184 if (!(spinand->flags & SPINAND_HAS_QE_BIT)) in spinand_init_quad_enable()
187 if (spinand->op_templates.read_cache->data.buswidth == 4 || in spinand_init_quad_enable()
188 spinand->op_templates.write_cache->data.buswidth == 4 || in spinand_init_quad_enable()
189 spinand->op_templates.update_cache->data.buswidth == 4) in spinand_init_quad_enable()
192 return spinand_upd_cfg(spinand, CFG_QUAD_ENABLE, in spinand_init_quad_enable()
196 static int spinand_ecc_enable(struct spinand_device *spinand, in spinand_ecc_enable() argument
199 return spinand_upd_cfg(spinand, CFG_ECC_ENABLE, in spinand_ecc_enable()
203 static int spinand_cont_read_enable(struct spinand_device *spinand, in spinand_cont_read_enable() argument
206 return spinand->set_cont_read(spinand, enable); in spinand_cont_read_enable()
209 static int spinand_check_ecc_status(struct spinand_device *spinand, u8 status) in spinand_check_ecc_status() argument
211 struct nand_device *nand = spinand_to_nand(spinand); in spinand_check_ecc_status()
213 if (spinand->eccinfo.get_status) in spinand_check_ecc_status()
214 return spinand->eccinfo.get_status(spinand, status); in spinand_check_ecc_status()
264 struct spinand_device *spinand = nand_to_spinand(nand); in spinand_ondie_ecc_init_ctx() local
278 if (spinand->eccinfo.ooblayout) in spinand_ondie_ecc_init_ctx()
279 mtd_set_ooblayout(mtd, spinand->eccinfo.ooblayout); in spinand_ondie_ecc_init_ctx()
294 struct spinand_device *spinand = nand_to_spinand(nand); in spinand_ondie_ecc_prepare_io_req() local
297 memset(spinand->oobbuf, 0xff, nanddev_per_page_oobsize(nand)); in spinand_ondie_ecc_prepare_io_req()
300 return spinand_ecc_enable(spinand, enable); in spinand_ondie_ecc_prepare_io_req()
307 struct spinand_device *spinand = nand_to_spinand(nand); in spinand_ondie_ecc_finish_io_req() local
308 struct mtd_info *mtd = spinand_to_mtd(spinand); in spinand_ondie_ecc_finish_io_req()
319 ret = spinand_check_ecc_status(spinand, engine_conf->status); in spinand_ondie_ecc_finish_io_req()
360 static int spinand_write_enable_op(struct spinand_device *spinand) in spinand_write_enable_op() argument
364 return spi_mem_exec_op(spinand->spimem, &op); in spinand_write_enable_op()
367 static int spinand_load_page_op(struct spinand_device *spinand, in spinand_load_page_op() argument
370 struct nand_device *nand = spinand_to_nand(spinand); in spinand_load_page_op()
374 return spi_mem_exec_op(spinand->spimem, &op); in spinand_load_page_op()
377 static int spinand_read_from_cache_op(struct spinand_device *spinand, in spinand_read_from_cache_op() argument
380 struct nand_device *nand = spinand_to_nand(spinand); in spinand_read_from_cache_op()
381 struct mtd_info *mtd = spinand_to_mtd(spinand); in spinand_read_from_cache_op()
389 buf = spinand->databuf; in spinand_read_from_cache_op()
401 buf = spinand->oobbuf; in spinand_read_from_cache_op()
407 rdesc = spinand->dirmaps[req->pos.plane].rdesc; in spinand_read_from_cache_op()
409 rdesc = spinand->dirmaps[req->pos.plane].rdesc_ecc; in spinand_read_from_cache_op()
411 if (spinand->flags & SPINAND_HAS_READ_PLANE_SELECT_BIT) in spinand_read_from_cache_op()
435 memcpy(req->databuf.in, spinand->databuf + req->dataoffs, in spinand_read_from_cache_op()
441 spinand->oobbuf, in spinand_read_from_cache_op()
445 memcpy(req->oobbuf.in, spinand->oobbuf + req->ooboffs, in spinand_read_from_cache_op()
452 static int spinand_write_to_cache_op(struct spinand_device *spinand, in spinand_write_to_cache_op() argument
455 struct nand_device *nand = spinand_to_nand(spinand); in spinand_write_to_cache_op()
456 struct mtd_info *mtd = spinand_to_mtd(spinand); in spinand_write_to_cache_op()
459 void *buf = spinand->databuf; in spinand_write_to_cache_op()
473 memset(spinand->databuf, 0xff, nanddev_page_size(nand)); in spinand_write_to_cache_op()
476 memcpy(spinand->databuf + req->dataoffs, req->databuf.out, in spinand_write_to_cache_op()
482 spinand->oobbuf, in spinand_write_to_cache_op()
486 memcpy(spinand->oobbuf + req->ooboffs, req->oobbuf.out, in spinand_write_to_cache_op()
491 wdesc = spinand->dirmaps[req->pos.plane].wdesc; in spinand_write_to_cache_op()
493 wdesc = spinand->dirmaps[req->pos.plane].wdesc_ecc; in spinand_write_to_cache_op()
495 if (spinand->flags & SPINAND_HAS_PROG_PLANE_SELECT_BIT) in spinand_write_to_cache_op()
514 static int spinand_program_op(struct spinand_device *spinand, in spinand_program_op() argument
517 struct nand_device *nand = spinand_to_nand(spinand); in spinand_program_op()
521 return spi_mem_exec_op(spinand->spimem, &op); in spinand_program_op()
524 static int spinand_erase_op(struct spinand_device *spinand, in spinand_erase_op() argument
527 struct nand_device *nand = spinand_to_nand(spinand); in spinand_erase_op()
531 return spi_mem_exec_op(spinand->spimem, &op); in spinand_erase_op()
534 static int spinand_wait(struct spinand_device *spinand, in spinand_wait() argument
540 spinand->scratchbuf); in spinand_wait()
544 ret = spi_mem_poll_status(spinand->spimem, &op, STATUS_BUSY, 0, in spinand_wait()
551 status = *spinand->scratchbuf; in spinand_wait()
559 ret = spinand_read_status(spinand, &status); in spinand_wait()
570 static int spinand_read_id_op(struct spinand_device *spinand, u8 naddr, in spinand_read_id_op() argument
574 naddr, ndummy, spinand->scratchbuf, SPINAND_MAX_ID_LEN); in spinand_read_id_op()
577 ret = spi_mem_exec_op(spinand->spimem, &op); in spinand_read_id_op()
579 memcpy(buf, spinand->scratchbuf, SPINAND_MAX_ID_LEN); in spinand_read_id_op()
584 static int spinand_reset_op(struct spinand_device *spinand) in spinand_reset_op() argument
589 ret = spi_mem_exec_op(spinand->spimem, &op); in spinand_reset_op()
593 return spinand_wait(spinand, in spinand_reset_op()
599 static int spinand_lock_block(struct spinand_device *spinand, u8 lock) in spinand_lock_block() argument
601 return spinand_write_reg_op(spinand, REG_BLOCK_LOCK, lock); in spinand_lock_block()
604 static int spinand_read_page(struct spinand_device *spinand, in spinand_read_page() argument
607 struct nand_device *nand = spinand_to_nand(spinand); in spinand_read_page()
615 ret = spinand_load_page_op(spinand, req); in spinand_read_page()
619 ret = spinand_wait(spinand, in spinand_read_page()
628 ret = spinand_read_from_cache_op(spinand, req); in spinand_read_page()
635 static int spinand_write_page(struct spinand_device *spinand, in spinand_write_page() argument
638 struct nand_device *nand = spinand_to_nand(spinand); in spinand_write_page()
646 ret = spinand_write_enable_op(spinand); in spinand_write_page()
650 ret = spinand_write_to_cache_op(spinand, req); in spinand_write_page()
654 ret = spinand_program_op(spinand, req); in spinand_write_page()
658 ret = spinand_wait(spinand, in spinand_write_page()
672 struct spinand_device *spinand = mtd_to_spinand(mtd); in spinand_mtd_regular_page_read() local
686 ret = spinand_select_target(spinand, iter.req.pos.target); in spinand_mtd_regular_page_read()
690 ret = spinand_read_page(spinand, &iter.req); in spinand_mtd_regular_page_read()
714 struct spinand_device *spinand = mtd_to_spinand(mtd); in spinand_mtd_continuous_page_read() local
720 ret = spinand_cont_read_enable(spinand, true); in spinand_mtd_continuous_page_read()
733 ret = spinand_select_target(spinand, iter.req.pos.target); in spinand_mtd_continuous_page_read()
741 ret = spinand_load_page_op(spinand, &iter.req); in spinand_mtd_continuous_page_read()
745 ret = spinand_wait(spinand, SPINAND_READ_INITIAL_DELAY_US, in spinand_mtd_continuous_page_read()
750 ret = spinand_read_from_cache_op(spinand, &iter.req); in spinand_mtd_continuous_page_read()
756 ret = spinand_read_status(spinand, &status); in spinand_mtd_continuous_page_read()
778 spinand_cont_read_enable(spinand, false); in spinand_mtd_continuous_page_read()
783 static void spinand_cont_read_init(struct spinand_device *spinand) in spinand_cont_read_init() argument
785 struct nand_device *nand = spinand_to_nand(spinand); in spinand_cont_read_init()
789 if (spinand->set_cont_read && in spinand_cont_read_init()
792 spinand->cont_read_possible = true; in spinand_cont_read_init()
800 struct spinand_device *spinand = nand_to_spinand(nand); in spinand_use_cont_read() local
803 if (!spinand->cont_read_possible) in spinand_use_cont_read()
832 struct spinand_device *spinand = mtd_to_spinand(mtd); in spinand_mtd_read() local
837 mutex_lock(&spinand->lock); in spinand_mtd_read()
853 mutex_unlock(&spinand->lock); in spinand_mtd_read()
861 struct spinand_device *spinand = mtd_to_spinand(mtd); in spinand_mtd_write() local
870 mutex_lock(&spinand->lock); in spinand_mtd_write()
876 ret = spinand_select_target(spinand, iter.req.pos.target); in spinand_mtd_write()
880 ret = spinand_write_page(spinand, &iter.req); in spinand_mtd_write()
888 mutex_unlock(&spinand->lock); in spinand_mtd_write()
895 struct spinand_device *spinand = nand_to_spinand(nand); in spinand_isbad() local
905 spinand_select_target(spinand, pos->target); in spinand_isbad()
906 spinand_read_page(spinand, &req); in spinand_isbad()
916 struct spinand_device *spinand = nand_to_spinand(nand); in spinand_mtd_block_isbad() local
921 mutex_lock(&spinand->lock); in spinand_mtd_block_isbad()
923 mutex_unlock(&spinand->lock); in spinand_mtd_block_isbad()
930 struct spinand_device *spinand = nand_to_spinand(nand); in spinand_markbad() local
941 ret = spinand_select_target(spinand, pos->target); in spinand_markbad()
945 ret = spinand_write_enable_op(spinand); in spinand_markbad()
949 return spinand_write_page(spinand, &req); in spinand_markbad()
955 struct spinand_device *spinand = nand_to_spinand(nand); in spinand_mtd_block_markbad() local
960 mutex_lock(&spinand->lock); in spinand_mtd_block_markbad()
962 mutex_unlock(&spinand->lock); in spinand_mtd_block_markbad()
969 struct spinand_device *spinand = nand_to_spinand(nand); in spinand_erase() local
973 ret = spinand_select_target(spinand, pos->target); in spinand_erase()
977 ret = spinand_write_enable_op(spinand); in spinand_erase()
981 ret = spinand_erase_op(spinand, pos); in spinand_erase()
985 ret = spinand_wait(spinand, in spinand_erase()
999 struct spinand_device *spinand = mtd_to_spinand(mtd); in spinand_mtd_erase() local
1002 mutex_lock(&spinand->lock); in spinand_mtd_erase()
1004 mutex_unlock(&spinand->lock); in spinand_mtd_erase()
1011 struct spinand_device *spinand = mtd_to_spinand(mtd); in spinand_mtd_block_isreserved() local
1017 mutex_lock(&spinand->lock); in spinand_mtd_block_isreserved()
1019 mutex_unlock(&spinand->lock); in spinand_mtd_block_isreserved()
1024 static int spinand_create_dirmap(struct spinand_device *spinand, in spinand_create_dirmap() argument
1027 struct nand_device *nand = spinand_to_nand(spinand); in spinand_create_dirmap()
1034 if (spinand->cont_read_possible) in spinand_create_dirmap()
1040 info.op_tmpl = *spinand->op_templates.update_cache; in spinand_create_dirmap()
1041 desc = devm_spi_mem_dirmap_create(&spinand->spimem->spi->dev, in spinand_create_dirmap()
1042 spinand->spimem, &info); in spinand_create_dirmap()
1046 spinand->dirmaps[plane].wdesc = desc; in spinand_create_dirmap()
1048 info.op_tmpl = *spinand->op_templates.read_cache; in spinand_create_dirmap()
1049 desc = devm_spi_mem_dirmap_create(&spinand->spimem->spi->dev, in spinand_create_dirmap()
1050 spinand->spimem, &info); in spinand_create_dirmap()
1054 spinand->dirmaps[plane].rdesc = desc; in spinand_create_dirmap()
1057 spinand->dirmaps[plane].wdesc_ecc = spinand->dirmaps[plane].wdesc; in spinand_create_dirmap()
1058 spinand->dirmaps[plane].rdesc_ecc = spinand->dirmaps[plane].rdesc; in spinand_create_dirmap()
1063 info.op_tmpl = *spinand->op_templates.update_cache; in spinand_create_dirmap()
1065 desc = devm_spi_mem_dirmap_create(&spinand->spimem->spi->dev, in spinand_create_dirmap()
1066 spinand->spimem, &info); in spinand_create_dirmap()
1070 spinand->dirmaps[plane].wdesc_ecc = desc; in spinand_create_dirmap()
1072 info.op_tmpl = *spinand->op_templates.read_cache; in spinand_create_dirmap()
1074 desc = devm_spi_mem_dirmap_create(&spinand->spimem->spi->dev, in spinand_create_dirmap()
1075 spinand->spimem, &info); in spinand_create_dirmap()
1079 spinand->dirmaps[plane].rdesc_ecc = desc; in spinand_create_dirmap()
1084 static int spinand_create_dirmaps(struct spinand_device *spinand) in spinand_create_dirmaps() argument
1086 struct nand_device *nand = spinand_to_nand(spinand); in spinand_create_dirmaps()
1089 spinand->dirmaps = devm_kzalloc(&spinand->spimem->spi->dev, in spinand_create_dirmaps()
1090 sizeof(*spinand->dirmaps) * in spinand_create_dirmaps()
1093 if (!spinand->dirmaps) in spinand_create_dirmaps()
1097 ret = spinand_create_dirmap(spinand, i); in spinand_create_dirmaps()
1125 static int spinand_manufacturer_match(struct spinand_device *spinand, in spinand_manufacturer_match() argument
1128 u8 *id = spinand->id.data; in spinand_manufacturer_match()
1139 ret = spinand_match_and_init(spinand, in spinand_manufacturer_match()
1146 spinand->manufacturer = manufacturer; in spinand_manufacturer_match()
1152 static int spinand_id_detect(struct spinand_device *spinand) in spinand_id_detect() argument
1154 u8 *id = spinand->id.data; in spinand_id_detect()
1157 ret = spinand_read_id_op(spinand, 0, 0, id); in spinand_id_detect()
1160 ret = spinand_manufacturer_match(spinand, SPINAND_READID_METHOD_OPCODE); in spinand_id_detect()
1164 ret = spinand_read_id_op(spinand, 1, 0, id); in spinand_id_detect()
1167 ret = spinand_manufacturer_match(spinand, in spinand_id_detect()
1172 ret = spinand_read_id_op(spinand, 0, 1, id); in spinand_id_detect()
1175 ret = spinand_manufacturer_match(spinand, in spinand_id_detect()
1181 static int spinand_manufacturer_init(struct spinand_device *spinand) in spinand_manufacturer_init() argument
1183 if (spinand->manufacturer->ops->init) in spinand_manufacturer_init()
1184 return spinand->manufacturer->ops->init(spinand); in spinand_manufacturer_init()
1189 static void spinand_manufacturer_cleanup(struct spinand_device *spinand) in spinand_manufacturer_cleanup() argument
1192 if (spinand->manufacturer->ops->cleanup) in spinand_manufacturer_cleanup()
1193 return spinand->manufacturer->ops->cleanup(spinand); in spinand_manufacturer_cleanup()
1197 spinand_select_op_variant(struct spinand_device *spinand, in spinand_select_op_variant() argument
1200 struct nand_device *nand = spinand_to_nand(spinand); in spinand_select_op_variant()
1213 ret = spi_mem_adjust_op_size(spinand->spimem, &op); in spinand_select_op_variant()
1217 if (!spi_mem_supports_op(spinand->spimem, &op)) in spinand_select_op_variant()
1245 int spinand_match_and_init(struct spinand_device *spinand, in spinand_match_and_init() argument
1250 u8 *id = spinand->id.data; in spinand_match_and_init()
1251 struct nand_device *nand = spinand_to_nand(spinand); in spinand_match_and_init()
1266 spinand->eccinfo = table[i].eccinfo; in spinand_match_and_init()
1267 spinand->flags = table[i].flags; in spinand_match_and_init()
1268 spinand->id.len = 1 + table[i].devid.len; in spinand_match_and_init()
1269 spinand->select_target = table[i].select_target; in spinand_match_and_init()
1270 spinand->set_cont_read = table[i].set_cont_read; in spinand_match_and_init()
1272 op = spinand_select_op_variant(spinand, in spinand_match_and_init()
1277 spinand->op_templates.read_cache = op; in spinand_match_and_init()
1279 op = spinand_select_op_variant(spinand, in spinand_match_and_init()
1284 spinand->op_templates.write_cache = op; in spinand_match_and_init()
1286 op = spinand_select_op_variant(spinand, in spinand_match_and_init()
1288 spinand->op_templates.update_cache = op; in spinand_match_and_init()
1296 static int spinand_detect(struct spinand_device *spinand) in spinand_detect() argument
1298 struct device *dev = &spinand->spimem->spi->dev; in spinand_detect()
1299 struct nand_device *nand = spinand_to_nand(spinand); in spinand_detect()
1302 ret = spinand_reset_op(spinand); in spinand_detect()
1306 ret = spinand_id_detect(spinand); in spinand_detect()
1309 spinand->id.data); in spinand_detect()
1313 if (nand->memorg.ntargets > 1 && !spinand->select_target) { in spinand_detect()
1319 dev_info(&spinand->spimem->spi->dev, in spinand_detect()
1320 "%s SPI NAND was found.\n", spinand->manufacturer->name); in spinand_detect()
1321 dev_info(&spinand->spimem->spi->dev, in spinand_detect()
1329 static int spinand_init_flash(struct spinand_device *spinand) in spinand_init_flash() argument
1331 struct device *dev = &spinand->spimem->spi->dev; in spinand_init_flash()
1332 struct nand_device *nand = spinand_to_nand(spinand); in spinand_init_flash()
1335 ret = spinand_read_cfg(spinand); in spinand_init_flash()
1339 ret = spinand_init_quad_enable(spinand); in spinand_init_flash()
1343 ret = spinand_upd_cfg(spinand, CFG_OTP_ENABLE, 0); in spinand_init_flash()
1347 ret = spinand_manufacturer_init(spinand); in spinand_init_flash()
1357 ret = spinand_select_target(spinand, i); in spinand_init_flash()
1361 ret = spinand_lock_block(spinand, BL_ALL_UNLOCKED); in spinand_init_flash()
1367 spinand_manufacturer_cleanup(spinand); in spinand_init_flash()
1374 struct spinand_device *spinand = mtd_to_spinand(mtd); in spinand_mtd_resume() local
1377 ret = spinand_reset_op(spinand); in spinand_mtd_resume()
1381 ret = spinand_init_flash(spinand); in spinand_mtd_resume()
1385 spinand_ecc_enable(spinand, false); in spinand_mtd_resume()
1388 static int spinand_init(struct spinand_device *spinand) in spinand_init() argument
1390 struct device *dev = &spinand->spimem->spi->dev; in spinand_init()
1391 struct mtd_info *mtd = spinand_to_mtd(spinand); in spinand_init()
1399 spinand->scratchbuf = kzalloc(SPINAND_MAX_ID_LEN, GFP_KERNEL); in spinand_init()
1400 if (!spinand->scratchbuf) in spinand_init()
1403 ret = spinand_detect(spinand); in spinand_init()
1412 spinand->databuf = kzalloc(nanddev_eraseblock_size(nand), in spinand_init()
1414 if (!spinand->databuf) { in spinand_init()
1419 spinand->oobbuf = spinand->databuf + nanddev_page_size(nand); in spinand_init()
1421 ret = spinand_init_cfg_cache(spinand); in spinand_init()
1425 ret = spinand_init_flash(spinand); in spinand_init()
1437 spinand_ecc_enable(spinand, false); in spinand_init()
1446 spinand_cont_read_init(spinand); in spinand_init()
1470 ret = spinand_create_dirmaps(spinand); in spinand_init()
1487 spinand_manufacturer_cleanup(spinand); in spinand_init()
1490 kfree(spinand->databuf); in spinand_init()
1491 kfree(spinand->scratchbuf); in spinand_init()
1495 static void spinand_cleanup(struct spinand_device *spinand) in spinand_cleanup() argument
1497 struct nand_device *nand = spinand_to_nand(spinand); in spinand_cleanup()
1500 spinand_manufacturer_cleanup(spinand); in spinand_cleanup()
1501 kfree(spinand->databuf); in spinand_cleanup()
1502 kfree(spinand->scratchbuf); in spinand_cleanup()
1507 struct spinand_device *spinand; in spinand_probe() local
1511 spinand = devm_kzalloc(&mem->spi->dev, sizeof(*spinand), in spinand_probe()
1513 if (!spinand) in spinand_probe()
1516 spinand->spimem = mem; in spinand_probe()
1517 spi_mem_set_drvdata(mem, spinand); in spinand_probe()
1518 spinand_set_of_node(spinand, mem->spi->dev.of_node); in spinand_probe()
1519 mutex_init(&spinand->lock); in spinand_probe()
1520 mtd = spinand_to_mtd(spinand); in spinand_probe()
1523 ret = spinand_init(spinand); in spinand_probe()
1534 spinand_cleanup(spinand); in spinand_probe()
1541 struct spinand_device *spinand; in spinand_remove() local
1545 spinand = spi_mem_get_drvdata(mem); in spinand_remove()
1546 mtd = spinand_to_mtd(spinand); in spinand_remove()
1552 spinand_cleanup(spinand); in spinand_remove()