Lines Matching +full:exynos5433 +full:- +full:spi

1 // SPDX-License-Identifier: GPL-2.0+
10 #include <linux/dma-mapping.h>
17 #include <linux/platform_data/spi-s3c64xx.h>
20 #include <linux/spi/spi.h>
27 /* Registers and bit-fields */
112 #define FIFO_LVL_MASK(i) ((i)->port_conf->fifo_lvl_mask[i->port_id])
114 (1 << (i)->port_conf->tx_st_done)) ? 1 : 0)
115 #define TX_FIFO_LVL(v, sdd) (((v) & (sdd)->tx_fifomask) >> \
116 __ffs((sdd)->tx_fifomask))
117 #define RX_FIFO_LVL(v, sdd) (((v) & (sdd)->rx_fifomask) >> \
118 __ffs((sdd)->rx_fifomask))
127 #define is_polling(x) (x->cntrlr_info->polling)
139 * struct s3c64xx_spi_port_config - SPI Controller hardware info
155 * @use_32bit_io: True if the SoC allows only 32-bit register accesses.
157 * The Samsung s3c64xx SPI controller are used on various Samsung SoC's but
158 * differ in some aspects such as the size of the fifo and spi bus clock
179 * struct s3c64xx_spi_driver_data - Runtime info holder for SPI driver.
180 * @clk: Pointer to the spi clock.
181 * @src_clk: Pointer to the clock used to generate SPI signals.
184 * @host: Pointer to the SPI Protocol host.
188 * @sfr_start: BUS address of SPI controller regs.
196 * @port_conf: Local SPI port configuration data
229 void __iomem *regs = sdd->regs; in s3c64xx_flush_fifo()
248 } while (TX_FIFO_LVL(val, sdd) && --loops); in s3c64xx_flush_fifo()
251 dev_warn(&sdd->pdev->dev, "Timed out flushing TX FIFO\n"); in s3c64xx_flush_fifo()
261 } while (--loops); in s3c64xx_flush_fifo()
264 dev_warn(&sdd->pdev->dev, "Timed out flushing RX FIFO\n"); in s3c64xx_flush_fifo()
281 if (dma->direction == DMA_DEV_TO_MEM) in s3c64xx_spi_dmacb()
288 spin_lock_irqsave(&sdd->lock, flags); in s3c64xx_spi_dmacb()
290 if (dma->direction == DMA_DEV_TO_MEM) { in s3c64xx_spi_dmacb()
291 sdd->state &= ~RXBUSY; in s3c64xx_spi_dmacb()
292 if (!(sdd->state & TXBUSY)) in s3c64xx_spi_dmacb()
293 complete(&sdd->xfer_completion); in s3c64xx_spi_dmacb()
295 sdd->state &= ~TXBUSY; in s3c64xx_spi_dmacb()
296 if (!(sdd->state & RXBUSY)) in s3c64xx_spi_dmacb()
297 complete(&sdd->xfer_completion); in s3c64xx_spi_dmacb()
300 spin_unlock_irqrestore(&sdd->lock, flags); in s3c64xx_spi_dmacb()
313 if (dma->direction == DMA_DEV_TO_MEM) { in s3c64xx_prepare_dma()
316 config.src_addr = sdd->sfr_start + S3C64XX_SPI_RX_DATA; in s3c64xx_prepare_dma()
317 config.src_addr_width = sdd->cur_bpw / 8; in s3c64xx_prepare_dma()
322 config.dst_addr = sdd->sfr_start + S3C64XX_SPI_TX_DATA; in s3c64xx_prepare_dma()
323 config.dst_addr_width = sdd->cur_bpw / 8; in s3c64xx_prepare_dma()
326 config.direction = dma->direction; in s3c64xx_prepare_dma()
327 ret = dmaengine_slave_config(dma->ch, &config); in s3c64xx_prepare_dma()
331 desc = dmaengine_prep_slave_sg(dma->ch, sgt->sgl, sgt->nents, in s3c64xx_prepare_dma()
332 dma->direction, DMA_PREP_INTERRUPT); in s3c64xx_prepare_dma()
334 dev_err(&sdd->pdev->dev, "unable to prepare %s scatterlist", in s3c64xx_prepare_dma()
335 dma->direction == DMA_DEV_TO_MEM ? "rx" : "tx"); in s3c64xx_prepare_dma()
336 return -ENOMEM; in s3c64xx_prepare_dma()
339 desc->callback = s3c64xx_spi_dmacb; in s3c64xx_prepare_dma()
340 desc->callback_param = dma; in s3c64xx_prepare_dma()
342 dma->cookie = dmaengine_submit(desc); in s3c64xx_prepare_dma()
343 ret = dma_submit_error(dma->cookie); in s3c64xx_prepare_dma()
345 dev_err(&sdd->pdev->dev, "DMA submission failed"); in s3c64xx_prepare_dma()
349 dma_async_issue_pending(dma->ch); in s3c64xx_prepare_dma()
353 static void s3c64xx_spi_set_cs(struct spi_device *spi, bool enable) in s3c64xx_spi_set_cs() argument
356 spi_controller_get_devdata(spi->controller); in s3c64xx_spi_set_cs()
358 if (sdd->cntrlr_info->no_cs) in s3c64xx_spi_set_cs()
362 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) { in s3c64xx_spi_set_cs()
363 writel(0, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
365 u32 ssel = readl(sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
369 writel(ssel, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
372 if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_set_cs()
374 sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_set_cs()
378 static int s3c64xx_spi_prepare_transfer(struct spi_controller *spi) in s3c64xx_spi_prepare_transfer() argument
380 struct s3c64xx_spi_driver_data *sdd = spi_controller_get_devdata(spi); in s3c64xx_spi_prepare_transfer()
386 sdd->rx_dma.ch = dma_request_chan(&sdd->pdev->dev, "rx"); in s3c64xx_spi_prepare_transfer()
387 if (IS_ERR(sdd->rx_dma.ch)) { in s3c64xx_spi_prepare_transfer()
388 dev_err(&sdd->pdev->dev, "Failed to get RX DMA channel\n"); in s3c64xx_spi_prepare_transfer()
389 sdd->rx_dma.ch = NULL; in s3c64xx_spi_prepare_transfer()
393 sdd->tx_dma.ch = dma_request_chan(&sdd->pdev->dev, "tx"); in s3c64xx_spi_prepare_transfer()
394 if (IS_ERR(sdd->tx_dma.ch)) { in s3c64xx_spi_prepare_transfer()
395 dev_err(&sdd->pdev->dev, "Failed to get TX DMA channel\n"); in s3c64xx_spi_prepare_transfer()
396 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_prepare_transfer()
397 sdd->tx_dma.ch = NULL; in s3c64xx_spi_prepare_transfer()
398 sdd->rx_dma.ch = NULL; in s3c64xx_spi_prepare_transfer()
402 spi->dma_rx = sdd->rx_dma.ch; in s3c64xx_spi_prepare_transfer()
403 spi->dma_tx = sdd->tx_dma.ch; in s3c64xx_spi_prepare_transfer()
408 static int s3c64xx_spi_unprepare_transfer(struct spi_controller *spi) in s3c64xx_spi_unprepare_transfer() argument
410 struct s3c64xx_spi_driver_data *sdd = spi_controller_get_devdata(spi); in s3c64xx_spi_unprepare_transfer()
416 if (sdd->rx_dma.ch && sdd->tx_dma.ch) { in s3c64xx_spi_unprepare_transfer()
417 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_unprepare_transfer()
418 dma_release_channel(sdd->tx_dma.ch); in s3c64xx_spi_unprepare_transfer()
419 sdd->rx_dma.ch = NULL; in s3c64xx_spi_unprepare_transfer()
420 sdd->tx_dma.ch = NULL; in s3c64xx_spi_unprepare_transfer()
427 struct spi_device *spi, in s3c64xx_spi_can_dma() argument
432 if (sdd->rx_dma.ch && sdd->tx_dma.ch) in s3c64xx_spi_can_dma()
433 return xfer->len >= sdd->fifo_depth; in s3c64xx_spi_can_dma()
446 } while (--count); in s3c64xx_iowrite8_32_rep()
458 } while (--count); in s3c64xx_iowrite16_32_rep()
465 void __iomem *addr = sdd->regs + S3C64XX_SPI_TX_DATA; in s3c64xx_iowrite_rep()
466 const void *buf = xfer->tx_buf; in s3c64xx_iowrite_rep()
467 unsigned int len = xfer->len; in s3c64xx_iowrite_rep()
469 switch (sdd->cur_bpw) { in s3c64xx_iowrite_rep()
474 if (sdd->port_conf->use_32bit_io) in s3c64xx_iowrite_rep()
480 if (sdd->port_conf->use_32bit_io) in s3c64xx_iowrite_rep()
491 void __iomem *regs = sdd->regs; in s3c64xx_enable_datapath()
509 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) in s3c64xx_enable_datapath()
514 if (xfer->tx_buf != NULL) { in s3c64xx_enable_datapath()
515 sdd->state |= TXBUSY; in s3c64xx_enable_datapath()
519 ret = s3c64xx_prepare_dma(&sdd->tx_dma, &xfer->tx_sg); in s3c64xx_enable_datapath()
525 if (xfer->rx_buf != NULL) { in s3c64xx_enable_datapath()
526 sdd->state |= RXBUSY; in s3c64xx_enable_datapath()
528 if (sdd->port_conf->high_speed && sdd->cur_speed >= 30000000UL in s3c64xx_enable_datapath()
529 && !(sdd->cur_mode & SPI_CPHA)) in s3c64xx_enable_datapath()
535 writel(((xfer->len * 8 / sdd->cur_bpw) & 0xffff) in s3c64xx_enable_datapath()
538 ret = s3c64xx_prepare_dma(&sdd->rx_dma, &xfer->rx_sg); in s3c64xx_enable_datapath()
554 void __iomem *regs = sdd->regs; in s3c64xx_spi_wait_for_timeout()
557 u32 max_fifo = sdd->fifo_depth; in s3c64xx_spi_wait_for_timeout()
564 } while (RX_FIFO_LVL(status, sdd) < max_fifo && --val); in s3c64xx_spi_wait_for_timeout()
573 void __iomem *regs = sdd->regs; in s3c64xx_wait_for_dma()
579 ms = xfer->len * 8 * 1000 / sdd->cur_speed; in s3c64xx_wait_for_dma()
584 val = wait_for_completion_timeout(&sdd->xfer_completion, val); in s3c64xx_wait_for_dma()
588 * proceed further else return -ETIMEDOUT. in s3c64xx_wait_for_dma()
595 if (val && !xfer->rx_buf) { in s3c64xx_wait_for_dma()
600 && --val) { in s3c64xx_wait_for_dma()
609 return -ETIMEDOUT; in s3c64xx_wait_for_dma()
617 void __iomem *regs = sdd->regs; in s3c64xx_wait_for_pio()
627 time_us = (xfer->len * 8 * 1000 * 1000) / sdd->cur_speed; in s3c64xx_wait_for_pio()
633 if (RX_FIFO_LVL(status, sdd) < xfer->len) in s3c64xx_wait_for_pio()
638 if (!wait_for_completion_timeout(&sdd->xfer_completion, val)) in s3c64xx_wait_for_pio()
639 return -ETIMEDOUT; in s3c64xx_wait_for_pio()
645 } while (RX_FIFO_LVL(status, sdd) < xfer->len && --val); in s3c64xx_wait_for_pio()
648 return -EIO; in s3c64xx_wait_for_pio()
651 if (!xfer->rx_buf) { in s3c64xx_wait_for_pio()
652 sdd->state &= ~TXBUSY; in s3c64xx_wait_for_pio()
664 loops = xfer->len / sdd->fifo_depth; in s3c64xx_wait_for_pio()
665 buf = xfer->rx_buf; in s3c64xx_wait_for_pio()
671 switch (sdd->cur_bpw) { in s3c64xx_wait_for_pio()
687 } while (loops--); in s3c64xx_wait_for_pio()
688 sdd->state &= ~RXBUSY; in s3c64xx_wait_for_pio()
695 void __iomem *regs = sdd->regs; in s3c64xx_spi_config()
698 int div = sdd->port_conf->clk_div; in s3c64xx_spi_config()
701 if (!sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_config()
713 if (sdd->cur_mode & SPI_CPOL) in s3c64xx_spi_config()
716 if (sdd->cur_mode & SPI_CPHA) in s3c64xx_spi_config()
726 switch (sdd->cur_bpw) { in s3c64xx_spi_config()
741 if ((sdd->cur_mode & SPI_LOOP) && sdd->port_conf->has_loopback) in s3c64xx_spi_config()
748 if (sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_config()
749 ret = clk_set_rate(sdd->src_clk, sdd->cur_speed * div); in s3c64xx_spi_config()
752 sdd->cur_speed = clk_get_rate(sdd->src_clk) / div; in s3c64xx_spi_config()
757 val |= ((clk_get_rate(sdd->src_clk) / sdd->cur_speed / div - 1) in s3c64xx_spi_config()
776 struct spi_device *spi = msg->spi; in s3c64xx_spi_prepare_message() local
777 struct s3c64xx_spi_csinfo *cs = spi->controller_data; in s3c64xx_spi_prepare_message()
782 writel(0, sdd->regs + S3C64XX_SPI_FB_CLK); in s3c64xx_spi_prepare_message()
784 writel(cs->fb_delay & 0x3, sdd->regs + S3C64XX_SPI_FB_CLK); in s3c64xx_spi_prepare_message()
789 static size_t s3c64xx_spi_max_transfer_size(struct spi_device *spi) in s3c64xx_spi_max_transfer_size() argument
791 struct spi_controller *ctlr = spi->controller; in s3c64xx_spi_max_transfer_size()
793 return ctlr->can_dma ? S3C64XX_SPI_PACKET_CNT_MASK : SIZE_MAX; in s3c64xx_spi_max_transfer_size()
797 struct spi_device *spi, in s3c64xx_spi_transfer_one() argument
801 const unsigned int fifo_len = sdd->fifo_depth; in s3c64xx_spi_transfer_one()
814 reinit_completion(&sdd->xfer_completion); in s3c64xx_spi_transfer_one()
817 bpw = xfer->bits_per_word; in s3c64xx_spi_transfer_one()
818 speed = xfer->speed_hz; in s3c64xx_spi_transfer_one()
820 if (bpw != sdd->cur_bpw || speed != sdd->cur_speed) { in s3c64xx_spi_transfer_one()
821 sdd->cur_bpw = bpw; in s3c64xx_spi_transfer_one()
822 sdd->cur_speed = speed; in s3c64xx_spi_transfer_one()
823 sdd->cur_mode = spi->mode; in s3c64xx_spi_transfer_one()
829 if (!is_polling(sdd) && xfer->len >= fifo_len && in s3c64xx_spi_transfer_one()
830 sdd->rx_dma.ch && sdd->tx_dma.ch) { in s3c64xx_spi_transfer_one()
832 } else if (xfer->len >= fifo_len) { in s3c64xx_spi_transfer_one()
833 tx_buf = xfer->tx_buf; in s3c64xx_spi_transfer_one()
834 rx_buf = xfer->rx_buf; in s3c64xx_spi_transfer_one()
835 origin_len = xfer->len; in s3c64xx_spi_transfer_one()
836 target_len = xfer->len; in s3c64xx_spi_transfer_one()
837 xfer->len = fifo_len - 1; in s3c64xx_spi_transfer_one()
842 if (!use_dma && xfer->len > S3C64XX_SPI_POLLING_SIZE) in s3c64xx_spi_transfer_one()
846 reinit_completion(&sdd->xfer_completion); in s3c64xx_spi_transfer_one()
848 rdy_lv = xfer->len; in s3c64xx_spi_transfer_one()
851 * fifo_lvl up to 64 byte -> N bytes in s3c64xx_spi_transfer_one()
852 * 128 byte -> RDY_LVL * 2 bytes in s3c64xx_spi_transfer_one()
853 * 256 byte -> RDY_LVL * 4 bytes in s3c64xx_spi_transfer_one()
860 val = readl(sdd->regs + S3C64XX_SPI_MODE_CFG); in s3c64xx_spi_transfer_one()
863 writel(val, sdd->regs + S3C64XX_SPI_MODE_CFG); in s3c64xx_spi_transfer_one()
866 val = readl(sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_transfer_one()
868 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_transfer_one()
872 spin_lock_irqsave(&sdd->lock, flags); in s3c64xx_spi_transfer_one()
875 sdd->state &= ~RXBUSY; in s3c64xx_spi_transfer_one()
876 sdd->state &= ~TXBUSY; in s3c64xx_spi_transfer_one()
879 s3c64xx_spi_set_cs(spi, true); in s3c64xx_spi_transfer_one()
883 spin_unlock_irqrestore(&sdd->lock, flags); in s3c64xx_spi_transfer_one()
886 dev_err(&spi->dev, "failed to enable data path for transfer: %d\n", status); in s3c64xx_spi_transfer_one()
896 dev_err(&spi->dev, in s3c64xx_spi_transfer_one()
897 "I/O Error: rx-%d tx-%d rx-%c tx-%c len-%d dma-%d res-(%d)\n", in s3c64xx_spi_transfer_one()
898 xfer->rx_buf ? 1 : 0, xfer->tx_buf ? 1 : 0, in s3c64xx_spi_transfer_one()
899 (sdd->state & RXBUSY) ? 'f' : 'p', in s3c64xx_spi_transfer_one()
900 (sdd->state & TXBUSY) ? 'f' : 'p', in s3c64xx_spi_transfer_one()
901 xfer->len, use_dma ? 1 : 0, status); in s3c64xx_spi_transfer_one()
906 if (xfer->tx_buf && (sdd->state & TXBUSY)) { in s3c64xx_spi_transfer_one()
907 dmaengine_pause(sdd->tx_dma.ch); in s3c64xx_spi_transfer_one()
908 dmaengine_tx_status(sdd->tx_dma.ch, sdd->tx_dma.cookie, &s); in s3c64xx_spi_transfer_one()
909 dmaengine_terminate_all(sdd->tx_dma.ch); in s3c64xx_spi_transfer_one()
910 dev_err(&spi->dev, "TX residue: %d\n", s.residue); in s3c64xx_spi_transfer_one()
913 if (xfer->rx_buf && (sdd->state & RXBUSY)) { in s3c64xx_spi_transfer_one()
914 dmaengine_pause(sdd->rx_dma.ch); in s3c64xx_spi_transfer_one()
915 dmaengine_tx_status(sdd->rx_dma.ch, sdd->rx_dma.cookie, &s); in s3c64xx_spi_transfer_one()
916 dmaengine_terminate_all(sdd->rx_dma.ch); in s3c64xx_spi_transfer_one()
917 dev_err(&spi->dev, "RX residue: %d\n", s.residue); in s3c64xx_spi_transfer_one()
924 target_len -= xfer->len; in s3c64xx_spi_transfer_one()
926 if (xfer->tx_buf) in s3c64xx_spi_transfer_one()
927 xfer->tx_buf += xfer->len; in s3c64xx_spi_transfer_one()
929 if (xfer->rx_buf) in s3c64xx_spi_transfer_one()
930 xfer->rx_buf += xfer->len; in s3c64xx_spi_transfer_one()
933 xfer->len = fifo_len - 1; in s3c64xx_spi_transfer_one()
935 xfer->len = target_len; in s3c64xx_spi_transfer_one()
941 xfer->tx_buf = tx_buf; in s3c64xx_spi_transfer_one()
942 xfer->rx_buf = rx_buf; in s3c64xx_spi_transfer_one()
943 xfer->len = origin_len; in s3c64xx_spi_transfer_one()
950 struct spi_device *spi) in s3c64xx_get_target_ctrldata() argument
956 target_np = spi->dev.of_node; in s3c64xx_get_target_ctrldata()
958 dev_err(&spi->dev, "device node not found\n"); in s3c64xx_get_target_ctrldata()
959 return ERR_PTR(-EINVAL); in s3c64xx_get_target_ctrldata()
964 return ERR_PTR(-ENOMEM); in s3c64xx_get_target_ctrldata()
967 of_get_child_by_name(target_np, "controller-data"); in s3c64xx_get_target_ctrldata()
969 dev_info(&spi->dev, "feedback delay set to default (0)\n"); in s3c64xx_get_target_ctrldata()
973 of_property_read_u32(data_np, "samsung,spi-feedback-delay", &fb_delay); in s3c64xx_get_target_ctrldata()
974 cs->fb_delay = fb_delay; in s3c64xx_get_target_ctrldata()
980 * and save the configuration in a local data-structure.
984 static int s3c64xx_spi_setup(struct spi_device *spi) in s3c64xx_spi_setup() argument
986 struct s3c64xx_spi_csinfo *cs = spi->controller_data; in s3c64xx_spi_setup()
991 sdd = spi_controller_get_devdata(spi->controller); in s3c64xx_spi_setup()
992 if (spi->dev.of_node) { in s3c64xx_spi_setup()
993 cs = s3c64xx_get_target_ctrldata(spi); in s3c64xx_spi_setup()
994 spi->controller_data = cs; in s3c64xx_spi_setup()
999 dev_err(&spi->dev, "No CS for SPI(%d)\n", spi_get_chipselect(spi, 0)); in s3c64xx_spi_setup()
1000 return -ENODEV; in s3c64xx_spi_setup()
1003 if (!spi_get_ctldata(spi)) in s3c64xx_spi_setup()
1004 spi_set_ctldata(spi, cs); in s3c64xx_spi_setup()
1006 pm_runtime_get_sync(&sdd->pdev->dev); in s3c64xx_spi_setup()
1008 div = sdd->port_conf->clk_div; in s3c64xx_spi_setup()
1011 if (!sdd->port_conf->clk_from_cmu) { in s3c64xx_spi_setup()
1015 speed = clk_get_rate(sdd->src_clk) / div / (0 + 1); in s3c64xx_spi_setup()
1017 if (spi->max_speed_hz > speed) in s3c64xx_spi_setup()
1018 spi->max_speed_hz = speed; in s3c64xx_spi_setup()
1020 psr = clk_get_rate(sdd->src_clk) / div / spi->max_speed_hz - 1; in s3c64xx_spi_setup()
1023 psr--; in s3c64xx_spi_setup()
1025 speed = clk_get_rate(sdd->src_clk) / div / (psr + 1); in s3c64xx_spi_setup()
1026 if (spi->max_speed_hz < speed) { in s3c64xx_spi_setup()
1030 err = -EINVAL; in s3c64xx_spi_setup()
1035 speed = clk_get_rate(sdd->src_clk) / div / (psr + 1); in s3c64xx_spi_setup()
1036 if (spi->max_speed_hz >= speed) { in s3c64xx_spi_setup()
1037 spi->max_speed_hz = speed; in s3c64xx_spi_setup()
1039 dev_err(&spi->dev, "Can't set %dHz transfer speed\n", in s3c64xx_spi_setup()
1040 spi->max_speed_hz); in s3c64xx_spi_setup()
1041 err = -EINVAL; in s3c64xx_spi_setup()
1046 pm_runtime_mark_last_busy(&sdd->pdev->dev); in s3c64xx_spi_setup()
1047 pm_runtime_put_autosuspend(&sdd->pdev->dev); in s3c64xx_spi_setup()
1048 s3c64xx_spi_set_cs(spi, false); in s3c64xx_spi_setup()
1053 pm_runtime_mark_last_busy(&sdd->pdev->dev); in s3c64xx_spi_setup()
1054 pm_runtime_put_autosuspend(&sdd->pdev->dev); in s3c64xx_spi_setup()
1055 /* setup() returns with device de-selected */ in s3c64xx_spi_setup()
1056 s3c64xx_spi_set_cs(spi, false); in s3c64xx_spi_setup()
1058 spi_set_ctldata(spi, NULL); in s3c64xx_spi_setup()
1061 if (spi->dev.of_node) in s3c64xx_spi_setup()
1067 static void s3c64xx_spi_cleanup(struct spi_device *spi) in s3c64xx_spi_cleanup() argument
1069 struct s3c64xx_spi_csinfo *cs = spi_get_ctldata(spi); in s3c64xx_spi_cleanup()
1072 if (spi->dev.of_node) in s3c64xx_spi_cleanup()
1075 spi_set_ctldata(spi, NULL); in s3c64xx_spi_cleanup()
1081 struct spi_controller *spi = sdd->host; in s3c64xx_spi_irq() local
1084 val = readl(sdd->regs + S3C64XX_SPI_STATUS); in s3c64xx_spi_irq()
1088 dev_err(&spi->dev, "RX overrun\n"); in s3c64xx_spi_irq()
1092 dev_err(&spi->dev, "RX underrun\n"); in s3c64xx_spi_irq()
1096 dev_err(&spi->dev, "TX overrun\n"); in s3c64xx_spi_irq()
1100 dev_err(&spi->dev, "TX underrun\n"); in s3c64xx_spi_irq()
1104 complete(&sdd->xfer_completion); in s3c64xx_spi_irq()
1105 /* No pending clear irq, turn-off INT_EN_RX_FIFO_RDY */ in s3c64xx_spi_irq()
1106 val = readl(sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_irq()
1108 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_irq()
1112 writel(clr, sdd->regs + S3C64XX_SPI_PENDING_CLR); in s3c64xx_spi_irq()
1113 writel(0, sdd->regs + S3C64XX_SPI_PENDING_CLR); in s3c64xx_spi_irq()
1120 struct s3c64xx_spi_info *sci = sdd->cntrlr_info; in s3c64xx_spi_hwinit()
1121 void __iomem *regs = sdd->regs; in s3c64xx_spi_hwinit()
1124 sdd->cur_speed = 0; in s3c64xx_spi_hwinit()
1126 if (sci->no_cs) in s3c64xx_spi_hwinit()
1127 writel(0, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_hwinit()
1128 else if (!(sdd->port_conf->quirks & S3C64XX_SPI_QUIRK_CS_AUTO)) in s3c64xx_spi_hwinit()
1129 writel(S3C64XX_SPI_CS_SIG_INACT, sdd->regs + S3C64XX_SPI_CS_REG); in s3c64xx_spi_hwinit()
1131 /* Disable Interrupts - we use Polling if not DMA mode */ in s3c64xx_spi_hwinit()
1134 if (!sdd->port_conf->clk_from_cmu) in s3c64xx_spi_hwinit()
1135 writel(sci->src_clk_nr << S3C64XX_SPI_CLKSEL_SRCSHFT, in s3c64xx_spi_hwinit()
1166 return ERR_PTR(-ENOMEM); in s3c64xx_spi_parse_dt()
1168 if (of_property_read_u32(dev->of_node, "samsung,spi-src-clk", &temp)) { in s3c64xx_spi_parse_dt()
1169 dev_dbg(dev, "spi bus clock parent not specified, using clock at index 0 as parent\n"); in s3c64xx_spi_parse_dt()
1170 sci->src_clk_nr = 0; in s3c64xx_spi_parse_dt()
1172 sci->src_clk_nr = temp; in s3c64xx_spi_parse_dt()
1175 if (of_property_read_u32(dev->of_node, "num-cs", &temp)) { in s3c64xx_spi_parse_dt()
1177 sci->num_cs = 1; in s3c64xx_spi_parse_dt()
1179 sci->num_cs = temp; in s3c64xx_spi_parse_dt()
1182 sci->no_cs = of_property_read_bool(dev->of_node, "no-cs-readback"); in s3c64xx_spi_parse_dt()
1183 sci->polling = !of_property_present(dev->of_node, "dmas"); in s3c64xx_spi_parse_dt()
1198 if (pdev->dev.of_node) in s3c64xx_spi_get_port_config()
1199 return of_device_get_match_data(&pdev->dev); in s3c64xx_spi_get_port_config()
1201 return (const struct s3c64xx_spi_port_config *)platform_get_device_id(pdev)->driver_data; in s3c64xx_spi_get_port_config()
1207 const struct s3c64xx_spi_port_config *port_conf = sdd->port_conf; in s3c64xx_spi_set_port_id()
1210 if (port_conf->rx_fifomask && port_conf->tx_fifomask) in s3c64xx_spi_set_port_id()
1213 if (pdev->dev.of_node) { in s3c64xx_spi_set_port_id()
1214 ret = of_alias_get_id(pdev->dev.of_node, "spi"); in s3c64xx_spi_set_port_id()
1216 return dev_err_probe(&pdev->dev, ret, in s3c64xx_spi_set_port_id()
1218 sdd->port_id = ret; in s3c64xx_spi_set_port_id()
1220 if (pdev->id < 0) in s3c64xx_spi_set_port_id()
1221 return dev_err_probe(&pdev->dev, -EINVAL, in s3c64xx_spi_set_port_id()
1223 sdd->port_id = pdev->id; in s3c64xx_spi_set_port_id()
1231 const struct s3c64xx_spi_port_config *port_conf = sdd->port_conf; in s3c64xx_spi_set_fifomask()
1233 if (port_conf->rx_fifomask) in s3c64xx_spi_set_fifomask()
1234 sdd->rx_fifomask = port_conf->rx_fifomask; in s3c64xx_spi_set_fifomask()
1236 sdd->rx_fifomask = FIFO_LVL_MASK(sdd) << in s3c64xx_spi_set_fifomask()
1237 port_conf->rx_lvl_offset; in s3c64xx_spi_set_fifomask()
1239 if (port_conf->tx_fifomask) in s3c64xx_spi_set_fifomask()
1240 sdd->tx_fifomask = port_conf->tx_fifomask; in s3c64xx_spi_set_fifomask()
1242 sdd->tx_fifomask = FIFO_LVL_MASK(sdd) << in s3c64xx_spi_set_fifomask()
1250 struct s3c64xx_spi_info *sci = dev_get_platdata(&pdev->dev); in s3c64xx_spi_probe()
1255 if (!sci && pdev->dev.of_node) { in s3c64xx_spi_probe()
1256 sci = s3c64xx_spi_parse_dt(&pdev->dev); in s3c64xx_spi_probe()
1262 return dev_err_probe(&pdev->dev, -ENODEV, in s3c64xx_spi_probe()
1269 host = devm_spi_alloc_host(&pdev->dev, sizeof(*sdd)); in s3c64xx_spi_probe()
1271 return dev_err_probe(&pdev->dev, -ENOMEM, in s3c64xx_spi_probe()
1272 "Unable to allocate SPI Host\n"); in s3c64xx_spi_probe()
1277 sdd->port_conf = s3c64xx_spi_get_port_config(pdev); in s3c64xx_spi_probe()
1278 sdd->host = host; in s3c64xx_spi_probe()
1279 sdd->cntrlr_info = sci; in s3c64xx_spi_probe()
1280 sdd->pdev = pdev; in s3c64xx_spi_probe()
1286 if (sdd->port_conf->fifo_depth) in s3c64xx_spi_probe()
1287 sdd->fifo_depth = sdd->port_conf->fifo_depth; in s3c64xx_spi_probe()
1288 else if (of_property_read_u32(pdev->dev.of_node, "fifo-depth", in s3c64xx_spi_probe()
1289 &sdd->fifo_depth)) in s3c64xx_spi_probe()
1290 sdd->fifo_depth = FIFO_DEPTH(sdd); in s3c64xx_spi_probe()
1294 sdd->cur_bpw = 8; in s3c64xx_spi_probe()
1296 sdd->tx_dma.direction = DMA_MEM_TO_DEV; in s3c64xx_spi_probe()
1297 sdd->rx_dma.direction = DMA_DEV_TO_MEM; in s3c64xx_spi_probe()
1299 host->dev.of_node = pdev->dev.of_node; in s3c64xx_spi_probe()
1300 host->bus_num = -1; in s3c64xx_spi_probe()
1301 host->setup = s3c64xx_spi_setup; in s3c64xx_spi_probe()
1302 host->cleanup = s3c64xx_spi_cleanup; in s3c64xx_spi_probe()
1303 host->prepare_transfer_hardware = s3c64xx_spi_prepare_transfer; in s3c64xx_spi_probe()
1304 host->unprepare_transfer_hardware = s3c64xx_spi_unprepare_transfer; in s3c64xx_spi_probe()
1305 host->prepare_message = s3c64xx_spi_prepare_message; in s3c64xx_spi_probe()
1306 host->transfer_one = s3c64xx_spi_transfer_one; in s3c64xx_spi_probe()
1307 host->max_transfer_size = s3c64xx_spi_max_transfer_size; in s3c64xx_spi_probe()
1308 host->num_chipselect = sci->num_cs; in s3c64xx_spi_probe()
1309 host->use_gpio_descriptors = true; in s3c64xx_spi_probe()
1310 host->dma_alignment = 8; in s3c64xx_spi_probe()
1311 host->bits_per_word_mask = SPI_BPW_MASK(32) | SPI_BPW_MASK(16) | in s3c64xx_spi_probe()
1313 /* the spi->mode bits understood by this driver: */ in s3c64xx_spi_probe()
1314 host->mode_bits = SPI_CPOL | SPI_CPHA | SPI_CS_HIGH; in s3c64xx_spi_probe()
1315 if (sdd->port_conf->has_loopback) in s3c64xx_spi_probe()
1316 host->mode_bits |= SPI_LOOP; in s3c64xx_spi_probe()
1317 host->auto_runtime_pm = true; in s3c64xx_spi_probe()
1319 host->can_dma = s3c64xx_spi_can_dma; in s3c64xx_spi_probe()
1321 sdd->regs = devm_platform_get_and_ioremap_resource(pdev, 0, &mem_res); in s3c64xx_spi_probe()
1322 if (IS_ERR(sdd->regs)) in s3c64xx_spi_probe()
1323 return PTR_ERR(sdd->regs); in s3c64xx_spi_probe()
1324 sdd->sfr_start = mem_res->start; in s3c64xx_spi_probe()
1326 if (sci->cfg_gpio && sci->cfg_gpio()) in s3c64xx_spi_probe()
1327 return dev_err_probe(&pdev->dev, -EBUSY, in s3c64xx_spi_probe()
1331 sdd->clk = devm_clk_get_enabled(&pdev->dev, "spi"); in s3c64xx_spi_probe()
1332 if (IS_ERR(sdd->clk)) in s3c64xx_spi_probe()
1333 return dev_err_probe(&pdev->dev, PTR_ERR(sdd->clk), in s3c64xx_spi_probe()
1334 "Unable to acquire clock 'spi'\n"); in s3c64xx_spi_probe()
1336 sprintf(clk_name, "spi_busclk%d", sci->src_clk_nr); in s3c64xx_spi_probe()
1337 sdd->src_clk = devm_clk_get_enabled(&pdev->dev, clk_name); in s3c64xx_spi_probe()
1338 if (IS_ERR(sdd->src_clk)) in s3c64xx_spi_probe()
1339 return dev_err_probe(&pdev->dev, PTR_ERR(sdd->src_clk), in s3c64xx_spi_probe()
1343 if (sdd->port_conf->clk_ioclk) { in s3c64xx_spi_probe()
1344 sdd->ioclk = devm_clk_get_enabled(&pdev->dev, "spi_ioclk"); in s3c64xx_spi_probe()
1345 if (IS_ERR(sdd->ioclk)) in s3c64xx_spi_probe()
1346 return dev_err_probe(&pdev->dev, PTR_ERR(sdd->ioclk), in s3c64xx_spi_probe()
1350 pm_runtime_set_autosuspend_delay(&pdev->dev, AUTOSUSPEND_TIMEOUT); in s3c64xx_spi_probe()
1351 pm_runtime_use_autosuspend(&pdev->dev); in s3c64xx_spi_probe()
1352 pm_runtime_set_active(&pdev->dev); in s3c64xx_spi_probe()
1353 pm_runtime_enable(&pdev->dev); in s3c64xx_spi_probe()
1354 pm_runtime_get_sync(&pdev->dev); in s3c64xx_spi_probe()
1359 spin_lock_init(&sdd->lock); in s3c64xx_spi_probe()
1360 init_completion(&sdd->xfer_completion); in s3c64xx_spi_probe()
1362 ret = devm_request_irq(&pdev->dev, irq, s3c64xx_spi_irq, 0, in s3c64xx_spi_probe()
1363 "spi-s3c64xx", sdd); in s3c64xx_spi_probe()
1365 dev_err(&pdev->dev, "Failed to request IRQ %d: %d\n", in s3c64xx_spi_probe()
1372 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_probe()
1374 ret = devm_spi_register_controller(&pdev->dev, host); in s3c64xx_spi_probe()
1376 dev_err(&pdev->dev, "cannot register SPI host: %d\n", ret); in s3c64xx_spi_probe()
1380 dev_dbg(&pdev->dev, "Samsung SoC SPI Driver loaded for Bus SPI-%d with %d Targets attached\n", in s3c64xx_spi_probe()
1381 host->bus_num, host->num_chipselect); in s3c64xx_spi_probe()
1382 dev_dbg(&pdev->dev, "\tIOmem=[%pR]\tFIFO %dbytes\n", in s3c64xx_spi_probe()
1383 mem_res, sdd->fifo_depth); in s3c64xx_spi_probe()
1385 pm_runtime_mark_last_busy(&pdev->dev); in s3c64xx_spi_probe()
1386 pm_runtime_put_autosuspend(&pdev->dev); in s3c64xx_spi_probe()
1391 pm_runtime_put_noidle(&pdev->dev); in s3c64xx_spi_probe()
1392 pm_runtime_disable(&pdev->dev); in s3c64xx_spi_probe()
1393 pm_runtime_set_suspended(&pdev->dev); in s3c64xx_spi_probe()
1403 pm_runtime_get_sync(&pdev->dev); in s3c64xx_spi_remove()
1405 writel(0, sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_remove()
1408 dma_release_channel(sdd->rx_dma.ch); in s3c64xx_spi_remove()
1409 dma_release_channel(sdd->tx_dma.ch); in s3c64xx_spi_remove()
1412 pm_runtime_put_noidle(&pdev->dev); in s3c64xx_spi_remove()
1413 pm_runtime_disable(&pdev->dev); in s3c64xx_spi_remove()
1414 pm_runtime_set_suspended(&pdev->dev); in s3c64xx_spi_remove()
1432 sdd->cur_speed = 0; /* Output Clock is stopped */ in s3c64xx_spi_suspend()
1441 struct s3c64xx_spi_info *sci = sdd->cntrlr_info; in s3c64xx_spi_resume()
1444 if (sci->cfg_gpio) in s3c64xx_spi_resume()
1445 sci->cfg_gpio(); in s3c64xx_spi_resume()
1461 clk_disable_unprepare(sdd->clk); in s3c64xx_spi_runtime_suspend()
1462 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_runtime_suspend()
1463 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_runtime_suspend()
1474 if (sdd->port_conf->clk_ioclk) { in s3c64xx_spi_runtime_resume()
1475 ret = clk_prepare_enable(sdd->ioclk); in s3c64xx_spi_runtime_resume()
1480 ret = clk_prepare_enable(sdd->src_clk); in s3c64xx_spi_runtime_resume()
1484 ret = clk_prepare_enable(sdd->clk); in s3c64xx_spi_runtime_resume()
1492 sdd->regs + S3C64XX_SPI_INT_EN); in s3c64xx_spi_runtime_resume()
1497 clk_disable_unprepare(sdd->src_clk); in s3c64xx_spi_runtime_resume()
1499 clk_disable_unprepare(sdd->ioclk); in s3c64xx_spi_runtime_resume()
1632 .name = "s3c2443-spi",
1635 .name = "s3c6410-spi",
1643 { .compatible = "google,gs101-spi",
1646 { .compatible = "samsung,s3c2443-spi",
1649 { .compatible = "samsung,s3c6410-spi",
1652 { .compatible = "samsung,s5pv210-spi",
1655 { .compatible = "samsung,exynos4210-spi",
1658 { .compatible = "samsung,exynos7-spi",
1661 { .compatible = "samsung,exynos5433-spi",
1664 { .compatible = "samsung,exynos850-spi",
1667 { .compatible = "samsung,exynosautov9-spi",
1670 { .compatible = "tesla,fsd-spi",
1679 .name = "s3c64xx-spi",
1687 MODULE_ALIAS("platform:s3c64xx-spi");
1692 MODULE_DESCRIPTION("S3C64XX SPI Controller Driver");