1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * SuperH Mobile I2C Controller
4 *
5 * Copyright (C) 2014-19 Wolfram Sang <wsa@sang-engineering.com>
6 * Copyright (C) 2008 Magnus Damm
7 *
8 * Portions of the code based on out-of-tree driver i2c-sh7343.c
9 * Copyright (c) 2006 Carlos Munoz <carlos@kenati.com>
10 */
11
12 #include <linux/clk.h>
13 #include <linux/delay.h>
14 #include <linux/dmaengine.h>
15 #include <linux/dma-mapping.h>
16 #include <linux/err.h>
17 #include <linux/i2c.h>
18 #include <linux/init.h>
19 #include <linux/interrupt.h>
20 #include <linux/io.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/of.h>
24 #include <linux/platform_device.h>
25 #include <linux/pm_runtime.h>
26 #include <linux/slab.h>
27
28 /* Transmit operation: */
29 /* */
30 /* 0 byte transmit */
31 /* BUS: S A8 ACK P(*) */
32 /* IRQ: DTE WAIT */
33 /* ICIC: */
34 /* ICCR: 0x94 0x90 */
35 /* ICDR: A8 */
36 /* */
37 /* 1 byte transmit */
38 /* BUS: S A8 ACK D8(1) ACK P(*) */
39 /* IRQ: DTE WAIT WAIT */
40 /* ICIC: -DTE */
41 /* ICCR: 0x94 0x90 */
42 /* ICDR: A8 D8(1) */
43 /* */
44 /* 2 byte transmit */
45 /* BUS: S A8 ACK D8(1) ACK D8(2) ACK P(*) */
46 /* IRQ: DTE WAIT WAIT WAIT */
47 /* ICIC: -DTE */
48 /* ICCR: 0x94 0x90 */
49 /* ICDR: A8 D8(1) D8(2) */
50 /* */
51 /* 3 bytes or more, +---------+ gets repeated */
52 /* */
53 /* */
54 /* Receive operation: */
55 /* */
56 /* 0 byte receive - not supported since slave may hold SDA low */
57 /* */
58 /* 1 byte receive [TX] | [RX] */
59 /* BUS: S A8 ACK | D8(1) ACK P(*) */
60 /* IRQ: DTE WAIT | WAIT DTE */
61 /* ICIC: -DTE | +DTE */
62 /* ICCR: 0x94 0x81 | 0xc0 */
63 /* ICDR: A8 | D8(1) */
64 /* */
65 /* 2 byte receive [TX]| [RX] */
66 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK P(*) */
67 /* IRQ: DTE WAIT | WAIT WAIT DTE */
68 /* ICIC: -DTE | +DTE */
69 /* ICCR: 0x94 0x81 | 0xc0 */
70 /* ICDR: A8 | D8(1) D8(2) */
71 /* */
72 /* 3 byte receive [TX] | [RX] (*) */
73 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK D8(3) ACK P */
74 /* IRQ: DTE WAIT | WAIT WAIT WAIT DTE */
75 /* ICIC: -DTE | +DTE */
76 /* ICCR: 0x94 0x81 | 0xc0 */
77 /* ICDR: A8 | D8(1) D8(2) D8(3) */
78 /* */
79 /* 4 bytes or more, this part is repeated +---------+ */
80 /* */
81 /* */
82 /* Interrupt order and BUSY flag */
83 /* ___ _ */
84 /* SDA ___\___XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXAAAAAAAAA___/ */
85 /* SCL \_/1\_/2\_/3\_/4\_/5\_/6\_/7\_/8\___/9\_____/ */
86 /* */
87 /* S D7 D6 D5 D4 D3 D2 D1 D0 P(*) */
88 /* ___ */
89 /* WAIT IRQ ________________________________/ \___________ */
90 /* TACK IRQ ____________________________________/ \_______ */
91 /* DTE IRQ __________________________________________/ \_ */
92 /* AL IRQ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX */
93 /* _______________________________________________ */
94 /* BUSY __/ \_ */
95 /* */
96 /* (*) The STOP condition is only sent by the master at the end of the last */
97 /* I2C message or if the I2C_M_STOP flag is set. Similarly, the BUSY bit is */
98 /* only cleared after the STOP condition, so, between messages we have to */
99 /* poll for the DTE bit. */
100 /* */
101
102 enum sh_mobile_i2c_op {
103 OP_START = 0,
104 OP_TX_FIRST,
105 OP_TX,
106 OP_TX_STOP,
107 OP_TX_TO_RX,
108 OP_RX,
109 OP_RX_STOP,
110 OP_RX_STOP_DATA,
111 };
112
113 struct sh_mobile_i2c_data {
114 struct device *dev;
115 void __iomem *reg;
116 struct i2c_adapter adap;
117 unsigned long bus_speed;
118 unsigned int clks_per_count;
119 struct clk *clk;
120 u_int8_t icic;
121 u_int8_t flags;
122 u_int16_t iccl;
123 u_int16_t icch;
124
125 spinlock_t lock;
126 wait_queue_head_t wait;
127 struct i2c_msg *msg;
128 int pos;
129 int sr;
130 bool send_stop;
131 bool stop_after_dma;
132 bool atomic_xfer;
133
134 struct resource *res;
135 struct dma_chan *dma_tx;
136 struct dma_chan *dma_rx;
137 struct scatterlist sg;
138 enum dma_data_direction dma_direction;
139 u8 *dma_buf;
140 };
141
142 struct sh_mobile_dt_config {
143 int clks_per_count;
144 int (*setup)(struct sh_mobile_i2c_data *pd);
145 };
146
147 #define IIC_FLAG_HAS_ICIC67 (1 << 0)
148
149 /* Register offsets */
150 #define ICDR 0x00
151 #define ICCR 0x04
152 #define ICSR 0x08
153 #define ICIC 0x0c
154 #define ICCL 0x10
155 #define ICCH 0x14
156 #define ICSTART 0x70
157
158 /* Register bits */
159 #define ICCR_ICE 0x80
160 #define ICCR_RACK 0x40
161 #define ICCR_TRS 0x10
162 #define ICCR_BBSY 0x04
163 #define ICCR_SCP 0x01
164
165 #define ICSR_SCLM 0x80
166 #define ICSR_SDAM 0x40
167 #define SW_DONE 0x20
168 #define ICSR_BUSY 0x10
169 #define ICSR_AL 0x08
170 #define ICSR_TACK 0x04
171 #define ICSR_WAIT 0x02
172 #define ICSR_DTE 0x01
173
174 #define ICIC_ICCLB8 0x80
175 #define ICIC_ICCHB8 0x40
176 #define ICIC_TDMAE 0x20
177 #define ICIC_RDMAE 0x10
178 #define ICIC_ALE 0x08
179 #define ICIC_TACKE 0x04
180 #define ICIC_WAITE 0x02
181 #define ICIC_DTEE 0x01
182
183 #define ICSTART_ICSTART 0x10
184
iic_wr(struct sh_mobile_i2c_data * pd,int offs,unsigned char data)185 static void iic_wr(struct sh_mobile_i2c_data *pd, int offs, unsigned char data)
186 {
187 if (offs == ICIC)
188 data |= pd->icic;
189
190 iowrite8(data, pd->reg + offs);
191 }
192
iic_rd(struct sh_mobile_i2c_data * pd,int offs)193 static unsigned char iic_rd(struct sh_mobile_i2c_data *pd, int offs)
194 {
195 return ioread8(pd->reg + offs);
196 }
197
iic_set_clr(struct sh_mobile_i2c_data * pd,int offs,unsigned char set,unsigned char clr)198 static void iic_set_clr(struct sh_mobile_i2c_data *pd, int offs,
199 unsigned char set, unsigned char clr)
200 {
201 iic_wr(pd, offs, (iic_rd(pd, offs) | set) & ~clr);
202 }
203
sh_mobile_i2c_iccl(unsigned long count_khz,u32 tLOW,u32 tf)204 static u32 sh_mobile_i2c_iccl(unsigned long count_khz, u32 tLOW, u32 tf)
205 {
206 /*
207 * Conditional expression:
208 * ICCL >= COUNT_CLK * (tLOW + tf)
209 *
210 * SH-Mobile IIC hardware starts counting the LOW period of
211 * the SCL signal (tLOW) as soon as it pulls the SCL line.
212 * In order to meet the tLOW timing spec, we need to take into
213 * account the fall time of SCL signal (tf). Default tf value
214 * should be 0.3 us, for safety.
215 */
216 return (((count_khz * (tLOW + tf)) + 5000) / 10000);
217 }
218
sh_mobile_i2c_icch(unsigned long count_khz,u32 tHIGH,u32 tf)219 static u32 sh_mobile_i2c_icch(unsigned long count_khz, u32 tHIGH, u32 tf)
220 {
221 /*
222 * Conditional expression:
223 * ICCH >= COUNT_CLK * (tHIGH + tf)
224 *
225 * SH-Mobile IIC hardware is aware of SCL transition period 'tr',
226 * and can ignore it. SH-Mobile IIC controller starts counting
227 * the HIGH period of the SCL signal (tHIGH) after the SCL input
228 * voltage increases at VIH.
229 *
230 * Afterward it turned out calculating ICCH using only tHIGH spec
231 * will result in violation of the tHD;STA timing spec. We need
232 * to take into account the fall time of SDA signal (tf) at START
233 * condition, in order to meet both tHIGH and tHD;STA specs.
234 */
235 return (((count_khz * (tHIGH + tf)) + 5000) / 10000);
236 }
237
sh_mobile_i2c_check_timing(struct sh_mobile_i2c_data * pd)238 static int sh_mobile_i2c_check_timing(struct sh_mobile_i2c_data *pd)
239 {
240 u16 max_val = pd->flags & IIC_FLAG_HAS_ICIC67 ? 0x1ff : 0xff;
241
242 if (pd->iccl > max_val || pd->icch > max_val) {
243 dev_err(pd->dev, "timing values out of range: L/H=0x%x/0x%x\n",
244 pd->iccl, pd->icch);
245 return -EINVAL;
246 }
247
248 /* one more bit of ICCL in ICIC */
249 if (pd->iccl & 0x100)
250 pd->icic |= ICIC_ICCLB8;
251 else
252 pd->icic &= ~ICIC_ICCLB8;
253
254 /* one more bit of ICCH in ICIC */
255 if (pd->icch & 0x100)
256 pd->icic |= ICIC_ICCHB8;
257 else
258 pd->icic &= ~ICIC_ICCHB8;
259
260 dev_dbg(pd->dev, "timing values: L/H=0x%x/0x%x\n", pd->iccl, pd->icch);
261 return 0;
262 }
263
sh_mobile_i2c_init(struct sh_mobile_i2c_data * pd)264 static int sh_mobile_i2c_init(struct sh_mobile_i2c_data *pd)
265 {
266 unsigned long i2c_clk_khz;
267 u32 tHIGH, tLOW, tf;
268
269 i2c_clk_khz = clk_get_rate(pd->clk) / 1000 / pd->clks_per_count;
270
271 if (pd->bus_speed == I2C_MAX_STANDARD_MODE_FREQ) {
272 tLOW = 47; /* tLOW = 4.7 us */
273 tHIGH = 40; /* tHD;STA = tHIGH = 4.0 us */
274 tf = 3; /* tf = 0.3 us */
275 } else if (pd->bus_speed == I2C_MAX_FAST_MODE_FREQ) {
276 tLOW = 13; /* tLOW = 1.3 us */
277 tHIGH = 6; /* tHD;STA = tHIGH = 0.6 us */
278 tf = 3; /* tf = 0.3 us */
279 } else {
280 dev_err(pd->dev, "unrecognized bus speed %lu Hz\n",
281 pd->bus_speed);
282 return -EINVAL;
283 }
284
285 pd->iccl = sh_mobile_i2c_iccl(i2c_clk_khz, tLOW, tf);
286 pd->icch = sh_mobile_i2c_icch(i2c_clk_khz, tHIGH, tf);
287
288 return sh_mobile_i2c_check_timing(pd);
289 }
290
sh_mobile_i2c_v2_init(struct sh_mobile_i2c_data * pd)291 static int sh_mobile_i2c_v2_init(struct sh_mobile_i2c_data *pd)
292 {
293 unsigned long clks_per_cycle;
294
295 /* L = 5, H = 4, L + H = 9 */
296 clks_per_cycle = clk_get_rate(pd->clk) / pd->bus_speed;
297 pd->iccl = DIV_ROUND_UP(clks_per_cycle * 5 / 9 - 1, pd->clks_per_count);
298 pd->icch = DIV_ROUND_UP(clks_per_cycle * 4 / 9 - 5, pd->clks_per_count);
299
300 return sh_mobile_i2c_check_timing(pd);
301 }
302
i2c_op(struct sh_mobile_i2c_data * pd,enum sh_mobile_i2c_op op)303 static unsigned char i2c_op(struct sh_mobile_i2c_data *pd, enum sh_mobile_i2c_op op)
304 {
305 unsigned char ret = 0;
306 unsigned long flags;
307
308 dev_dbg(pd->dev, "op %d\n", op);
309
310 spin_lock_irqsave(&pd->lock, flags);
311
312 switch (op) {
313 case OP_START: /* issue start and trigger DTE interrupt */
314 iic_wr(pd, ICCR, ICCR_ICE | ICCR_TRS | ICCR_BBSY);
315 break;
316 case OP_TX_FIRST: /* disable DTE interrupt and write client address */
317 iic_wr(pd, ICIC, ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
318 iic_wr(pd, ICDR, i2c_8bit_addr_from_msg(pd->msg));
319 break;
320 case OP_TX: /* write data */
321 iic_wr(pd, ICDR, pd->msg->buf[pd->pos]);
322 break;
323 case OP_TX_STOP: /* issue a stop (or rep_start) */
324 iic_wr(pd, ICCR, pd->send_stop ? ICCR_ICE | ICCR_TRS
325 : ICCR_ICE | ICCR_TRS | ICCR_BBSY);
326 break;
327 case OP_TX_TO_RX: /* select read mode */
328 iic_wr(pd, ICCR, ICCR_ICE | ICCR_SCP);
329 break;
330 case OP_RX: /* just read data */
331 ret = iic_rd(pd, ICDR);
332 break;
333 case OP_RX_STOP: /* enable DTE interrupt, issue stop */
334 if (!pd->atomic_xfer)
335 iic_wr(pd, ICIC,
336 ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
337 iic_wr(pd, ICCR, ICCR_ICE | ICCR_RACK);
338 break;
339 case OP_RX_STOP_DATA: /* enable DTE interrupt, read data, issue stop */
340 if (!pd->atomic_xfer)
341 iic_wr(pd, ICIC,
342 ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
343 ret = iic_rd(pd, ICDR);
344 iic_wr(pd, ICCR, ICCR_ICE | ICCR_RACK);
345 break;
346 }
347
348 spin_unlock_irqrestore(&pd->lock, flags);
349
350 dev_dbg(pd->dev, "op %d, data out 0x%02x\n", op, ret);
351 return ret;
352 }
353
sh_mobile_i2c_isr_tx(struct sh_mobile_i2c_data * pd)354 static int sh_mobile_i2c_isr_tx(struct sh_mobile_i2c_data *pd)
355 {
356 if (pd->pos == pd->msg->len) {
357 i2c_op(pd, OP_TX_STOP);
358 return 1;
359 }
360
361 if (pd->pos == -1)
362 i2c_op(pd, OP_TX_FIRST);
363 else
364 i2c_op(pd, OP_TX);
365
366 pd->pos++;
367 return 0;
368 }
369
sh_mobile_i2c_isr_rx(struct sh_mobile_i2c_data * pd)370 static int sh_mobile_i2c_isr_rx(struct sh_mobile_i2c_data *pd)
371 {
372 int real_pos;
373
374 /* switch from TX (address) to RX (data) adds two interrupts */
375 real_pos = pd->pos - 2;
376
377 if (pd->pos == -1) {
378 i2c_op(pd, OP_TX_FIRST);
379 } else if (pd->pos == 0) {
380 i2c_op(pd, OP_TX_TO_RX);
381 } else if (pd->pos == pd->msg->len) {
382 if (pd->stop_after_dma) {
383 /* Simulate PIO end condition after DMA transfer */
384 i2c_op(pd, OP_RX_STOP);
385 pd->pos++;
386 goto done;
387 }
388
389 if (real_pos < 0)
390 i2c_op(pd, OP_RX_STOP);
391 else
392 pd->msg->buf[real_pos] = i2c_op(pd, OP_RX_STOP_DATA);
393 } else if (real_pos >= 0) {
394 pd->msg->buf[real_pos] = i2c_op(pd, OP_RX);
395 }
396
397 done:
398 pd->pos++;
399 return pd->pos == (pd->msg->len + 2);
400 }
401
sh_mobile_i2c_isr(int irq,void * dev_id)402 static irqreturn_t sh_mobile_i2c_isr(int irq, void *dev_id)
403 {
404 struct sh_mobile_i2c_data *pd = dev_id;
405 unsigned char sr;
406 int wakeup = 0;
407
408 sr = iic_rd(pd, ICSR);
409 pd->sr |= sr; /* remember state */
410
411 dev_dbg(pd->dev, "i2c_isr 0x%02x 0x%02x %s %d %d!\n", sr, pd->sr,
412 (pd->msg->flags & I2C_M_RD) ? "read" : "write",
413 pd->pos, pd->msg->len);
414
415 /* Kick off TxDMA after preface was done */
416 if (pd->dma_direction == DMA_TO_DEVICE && pd->pos == 0)
417 iic_set_clr(pd, ICIC, ICIC_TDMAE, 0);
418 else if (sr & (ICSR_AL | ICSR_TACK))
419 /* don't interrupt transaction - continue to issue stop */
420 iic_wr(pd, ICSR, sr & ~(ICSR_AL | ICSR_TACK));
421 else if (pd->msg->flags & I2C_M_RD)
422 wakeup = sh_mobile_i2c_isr_rx(pd);
423 else
424 wakeup = sh_mobile_i2c_isr_tx(pd);
425
426 /* Kick off RxDMA after preface was done */
427 if (pd->dma_direction == DMA_FROM_DEVICE && pd->pos == 1)
428 iic_set_clr(pd, ICIC, ICIC_RDMAE, 0);
429
430 if (sr & ICSR_WAIT) /* TODO: add delay here to support slow acks */
431 iic_wr(pd, ICSR, sr & ~ICSR_WAIT);
432
433 if (wakeup) {
434 pd->sr |= SW_DONE;
435 if (!pd->atomic_xfer)
436 wake_up(&pd->wait);
437 }
438
439 /* defeat write posting to avoid spurious WAIT interrupts */
440 iic_rd(pd, ICSR);
441
442 return IRQ_HANDLED;
443 }
444
sh_mobile_i2c_cleanup_dma(struct sh_mobile_i2c_data * pd,bool terminate)445 static void sh_mobile_i2c_cleanup_dma(struct sh_mobile_i2c_data *pd, bool terminate)
446 {
447 struct dma_chan *chan = pd->dma_direction == DMA_FROM_DEVICE
448 ? pd->dma_rx : pd->dma_tx;
449
450 /* only allowed from thread context! */
451 if (terminate)
452 dmaengine_terminate_sync(chan);
453
454 dma_unmap_single(chan->device->dev, sg_dma_address(&pd->sg),
455 pd->msg->len, pd->dma_direction);
456
457 pd->dma_direction = DMA_NONE;
458 }
459
sh_mobile_i2c_dma_callback(void * data)460 static void sh_mobile_i2c_dma_callback(void *data)
461 {
462 struct sh_mobile_i2c_data *pd = data;
463
464 sh_mobile_i2c_cleanup_dma(pd, false);
465 pd->pos = pd->msg->len;
466 pd->stop_after_dma = true;
467
468 iic_set_clr(pd, ICIC, 0, ICIC_TDMAE | ICIC_RDMAE);
469 }
470
sh_mobile_i2c_request_dma_chan(struct device * dev,enum dma_transfer_direction dir,dma_addr_t port_addr)471 static struct dma_chan *sh_mobile_i2c_request_dma_chan(struct device *dev,
472 enum dma_transfer_direction dir, dma_addr_t port_addr)
473 {
474 struct dma_chan *chan;
475 struct dma_slave_config cfg;
476 char *chan_name = dir == DMA_MEM_TO_DEV ? "tx" : "rx";
477 int ret;
478
479 chan = dma_request_chan(dev, chan_name);
480 if (IS_ERR(chan)) {
481 dev_dbg(dev, "request_channel failed for %s (%ld)\n", chan_name,
482 PTR_ERR(chan));
483 return chan;
484 }
485
486 memset(&cfg, 0, sizeof(cfg));
487 cfg.direction = dir;
488 if (dir == DMA_MEM_TO_DEV) {
489 cfg.dst_addr = port_addr;
490 cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
491 } else {
492 cfg.src_addr = port_addr;
493 cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
494 }
495
496 ret = dmaengine_slave_config(chan, &cfg);
497 if (ret) {
498 dev_dbg(dev, "slave_config failed for %s (%d)\n", chan_name, ret);
499 dma_release_channel(chan);
500 return ERR_PTR(ret);
501 }
502
503 dev_dbg(dev, "got DMA channel for %s\n", chan_name);
504 return chan;
505 }
506
sh_mobile_i2c_xfer_dma(struct sh_mobile_i2c_data * pd)507 static void sh_mobile_i2c_xfer_dma(struct sh_mobile_i2c_data *pd)
508 {
509 bool read = pd->msg->flags & I2C_M_RD;
510 enum dma_data_direction dir = read ? DMA_FROM_DEVICE : DMA_TO_DEVICE;
511 struct dma_chan *chan = read ? pd->dma_rx : pd->dma_tx;
512 struct dma_async_tx_descriptor *txdesc;
513 dma_addr_t dma_addr;
514 dma_cookie_t cookie;
515
516 if (PTR_ERR(chan) == -EPROBE_DEFER) {
517 if (read)
518 chan = pd->dma_rx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_DEV_TO_MEM,
519 pd->res->start + ICDR);
520 else
521 chan = pd->dma_tx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_MEM_TO_DEV,
522 pd->res->start + ICDR);
523 }
524
525 if (IS_ERR(chan))
526 return;
527
528 dma_addr = dma_map_single(chan->device->dev, pd->dma_buf, pd->msg->len, dir);
529 if (dma_mapping_error(chan->device->dev, dma_addr)) {
530 dev_dbg(pd->dev, "dma map failed, using PIO\n");
531 return;
532 }
533
534 sg_dma_len(&pd->sg) = pd->msg->len;
535 sg_dma_address(&pd->sg) = dma_addr;
536
537 pd->dma_direction = dir;
538
539 txdesc = dmaengine_prep_slave_sg(chan, &pd->sg, 1,
540 read ? DMA_DEV_TO_MEM : DMA_MEM_TO_DEV,
541 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
542 if (!txdesc) {
543 dev_dbg(pd->dev, "dma prep slave sg failed, using PIO\n");
544 sh_mobile_i2c_cleanup_dma(pd, false);
545 return;
546 }
547
548 txdesc->callback = sh_mobile_i2c_dma_callback;
549 txdesc->callback_param = pd;
550
551 cookie = dmaengine_submit(txdesc);
552 if (dma_submit_error(cookie)) {
553 dev_dbg(pd->dev, "submitting dma failed, using PIO\n");
554 sh_mobile_i2c_cleanup_dma(pd, false);
555 return;
556 }
557
558 dma_async_issue_pending(chan);
559 }
560
start_ch(struct sh_mobile_i2c_data * pd,struct i2c_msg * usr_msg,bool do_init)561 static void start_ch(struct sh_mobile_i2c_data *pd, struct i2c_msg *usr_msg,
562 bool do_init)
563 {
564 if (do_init) {
565 /* Initialize channel registers */
566 iic_wr(pd, ICCR, ICCR_SCP);
567
568 /* Enable channel and configure rx ack */
569 iic_wr(pd, ICCR, ICCR_ICE | ICCR_SCP);
570
571 /* Set the clock */
572 iic_wr(pd, ICCL, pd->iccl & 0xff);
573 iic_wr(pd, ICCH, pd->icch & 0xff);
574 }
575
576 pd->msg = usr_msg;
577 pd->pos = -1;
578 pd->sr = 0;
579
580 if (pd->atomic_xfer)
581 return;
582
583 pd->dma_buf = i2c_get_dma_safe_msg_buf(pd->msg, 8);
584 if (pd->dma_buf)
585 sh_mobile_i2c_xfer_dma(pd);
586
587 /* Enable all interrupts to begin with */
588 iic_wr(pd, ICIC, ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
589 }
590
poll_dte(struct sh_mobile_i2c_data * pd)591 static int poll_dte(struct sh_mobile_i2c_data *pd)
592 {
593 int i;
594
595 for (i = 1000; i; i--) {
596 u_int8_t val = iic_rd(pd, ICSR);
597
598 if (val & ICSR_DTE)
599 break;
600
601 if (val & ICSR_TACK)
602 return -ENXIO;
603
604 udelay(10);
605 }
606
607 return i ? 0 : -ETIMEDOUT;
608 }
609
poll_busy(struct sh_mobile_i2c_data * pd)610 static int poll_busy(struct sh_mobile_i2c_data *pd)
611 {
612 int i;
613
614 for (i = 1000; i; i--) {
615 u_int8_t val = iic_rd(pd, ICSR);
616
617 dev_dbg(pd->dev, "val 0x%02x pd->sr 0x%02x\n", val, pd->sr);
618
619 /* the interrupt handler may wake us up before the
620 * transfer is finished, so poll the hardware
621 * until we're done.
622 */
623 if (!(val & ICSR_BUSY)) {
624 /* handle missing acknowledge and arbitration lost */
625 val |= pd->sr;
626 if (val & ICSR_TACK)
627 return -ENXIO;
628 if (val & ICSR_AL)
629 return -EAGAIN;
630 break;
631 }
632
633 udelay(10);
634 }
635
636 return i ? 0 : -ETIMEDOUT;
637 }
638
sh_mobile_xfer(struct sh_mobile_i2c_data * pd,struct i2c_msg * msgs,int num)639 static int sh_mobile_xfer(struct sh_mobile_i2c_data *pd,
640 struct i2c_msg *msgs, int num)
641 {
642 struct i2c_msg *msg;
643 int err = 0;
644 int i;
645 long time_left;
646
647 /* Wake up device and enable clock */
648 pm_runtime_get_sync(pd->dev);
649
650 /* Process all messages */
651 for (i = 0; i < num; i++) {
652 bool do_start = pd->send_stop || !i;
653 msg = &msgs[i];
654 pd->send_stop = i == num - 1 || msg->flags & I2C_M_STOP;
655 pd->stop_after_dma = false;
656
657 start_ch(pd, msg, do_start);
658
659 if (do_start)
660 i2c_op(pd, OP_START);
661
662 if (pd->atomic_xfer) {
663 unsigned long j = jiffies + pd->adap.timeout;
664
665 time_left = time_before_eq(jiffies, j);
666 while (time_left &&
667 !(pd->sr & (ICSR_TACK | SW_DONE))) {
668 unsigned char sr = iic_rd(pd, ICSR);
669
670 if (sr & (ICSR_AL | ICSR_TACK |
671 ICSR_WAIT | ICSR_DTE)) {
672 sh_mobile_i2c_isr(0, pd);
673 udelay(150);
674 } else {
675 cpu_relax();
676 }
677 time_left = time_before_eq(jiffies, j);
678 }
679 } else {
680 /* The interrupt handler takes care of the rest... */
681 time_left = wait_event_timeout(pd->wait,
682 pd->sr & (ICSR_TACK | SW_DONE),
683 pd->adap.timeout);
684
685 /* 'stop_after_dma' tells if DMA xfer was complete */
686 i2c_put_dma_safe_msg_buf(pd->dma_buf, pd->msg,
687 pd->stop_after_dma);
688 }
689
690 if (!time_left) {
691 if (pd->dma_direction != DMA_NONE)
692 sh_mobile_i2c_cleanup_dma(pd, true);
693
694 err = -ETIMEDOUT;
695 break;
696 }
697
698 if (pd->send_stop)
699 err = poll_busy(pd);
700 else
701 err = poll_dte(pd);
702 if (err < 0)
703 break;
704 }
705
706 /* Disable channel */
707 iic_wr(pd, ICCR, ICCR_SCP);
708
709 /* Disable clock and mark device as idle */
710 pm_runtime_put_sync(pd->dev);
711
712 return err ?: num;
713 }
714
sh_mobile_i2c_xfer(struct i2c_adapter * adapter,struct i2c_msg * msgs,int num)715 static int sh_mobile_i2c_xfer(struct i2c_adapter *adapter,
716 struct i2c_msg *msgs,
717 int num)
718 {
719 struct sh_mobile_i2c_data *pd = i2c_get_adapdata(adapter);
720
721 pd->atomic_xfer = false;
722 return sh_mobile_xfer(pd, msgs, num);
723 }
724
sh_mobile_i2c_xfer_atomic(struct i2c_adapter * adapter,struct i2c_msg * msgs,int num)725 static int sh_mobile_i2c_xfer_atomic(struct i2c_adapter *adapter,
726 struct i2c_msg *msgs,
727 int num)
728 {
729 struct sh_mobile_i2c_data *pd = i2c_get_adapdata(adapter);
730
731 pd->atomic_xfer = true;
732 return sh_mobile_xfer(pd, msgs, num);
733 }
734
sh_mobile_i2c_func(struct i2c_adapter * adapter)735 static u32 sh_mobile_i2c_func(struct i2c_adapter *adapter)
736 {
737 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL | I2C_FUNC_PROTOCOL_MANGLING;
738 }
739
740 static const struct i2c_algorithm sh_mobile_i2c_algorithm = {
741 .functionality = sh_mobile_i2c_func,
742 .master_xfer = sh_mobile_i2c_xfer,
743 .master_xfer_atomic = sh_mobile_i2c_xfer_atomic,
744 };
745
746 static const struct i2c_adapter_quirks sh_mobile_i2c_quirks = {
747 .flags = I2C_AQ_NO_ZERO_LEN_READ,
748 };
749
750 /*
751 * r8a7740 has an errata regarding I2C I/O pad reset needing this workaround.
752 */
sh_mobile_i2c_r8a7740_workaround(struct sh_mobile_i2c_data * pd)753 static int sh_mobile_i2c_r8a7740_workaround(struct sh_mobile_i2c_data *pd)
754 {
755 iic_set_clr(pd, ICCR, ICCR_ICE, 0);
756 iic_rd(pd, ICCR); /* dummy read */
757
758 iic_set_clr(pd, ICSTART, ICSTART_ICSTART, 0);
759 iic_rd(pd, ICSTART); /* dummy read */
760
761 udelay(10);
762
763 iic_wr(pd, ICCR, ICCR_SCP);
764 iic_wr(pd, ICSTART, 0);
765
766 udelay(10);
767
768 iic_wr(pd, ICCR, ICCR_TRS);
769 udelay(10);
770 iic_wr(pd, ICCR, 0);
771 udelay(10);
772 iic_wr(pd, ICCR, ICCR_TRS);
773 udelay(10);
774
775 return sh_mobile_i2c_v2_init(pd);
776 }
777
778 static const struct sh_mobile_dt_config default_dt_config = {
779 .clks_per_count = 1,
780 .setup = sh_mobile_i2c_init,
781 };
782
783 static const struct sh_mobile_dt_config fast_clock_dt_config = {
784 .clks_per_count = 2,
785 .setup = sh_mobile_i2c_v2_init,
786 };
787
788 static const struct sh_mobile_dt_config r8a7740_dt_config = {
789 .clks_per_count = 1,
790 .setup = sh_mobile_i2c_r8a7740_workaround,
791 };
792
793 static const struct of_device_id sh_mobile_i2c_dt_ids[] = {
794 { .compatible = "renesas,iic-r8a73a4", .data = &fast_clock_dt_config },
795 { .compatible = "renesas,iic-r8a7740", .data = &r8a7740_dt_config },
796 { .compatible = "renesas,iic-r8a774c0", .data = &fast_clock_dt_config },
797 { .compatible = "renesas,iic-r8a7790", .data = &fast_clock_dt_config },
798 { .compatible = "renesas,iic-r8a7791", .data = &fast_clock_dt_config },
799 { .compatible = "renesas,iic-r8a7792", .data = &fast_clock_dt_config },
800 { .compatible = "renesas,iic-r8a7793", .data = &fast_clock_dt_config },
801 { .compatible = "renesas,iic-r8a7794", .data = &fast_clock_dt_config },
802 { .compatible = "renesas,iic-r8a7795", .data = &fast_clock_dt_config },
803 { .compatible = "renesas,iic-r8a77990", .data = &fast_clock_dt_config },
804 { .compatible = "renesas,iic-sh73a0", .data = &fast_clock_dt_config },
805 { .compatible = "renesas,rcar-gen2-iic", .data = &fast_clock_dt_config },
806 { .compatible = "renesas,rcar-gen3-iic", .data = &fast_clock_dt_config },
807 { .compatible = "renesas,rmobile-iic", .data = &default_dt_config },
808 {},
809 };
810 MODULE_DEVICE_TABLE(of, sh_mobile_i2c_dt_ids);
811
sh_mobile_i2c_release_dma(struct sh_mobile_i2c_data * pd)812 static void sh_mobile_i2c_release_dma(struct sh_mobile_i2c_data *pd)
813 {
814 if (!IS_ERR(pd->dma_tx)) {
815 dma_release_channel(pd->dma_tx);
816 pd->dma_tx = ERR_PTR(-EPROBE_DEFER);
817 }
818
819 if (!IS_ERR(pd->dma_rx)) {
820 dma_release_channel(pd->dma_rx);
821 pd->dma_rx = ERR_PTR(-EPROBE_DEFER);
822 }
823 }
824
sh_mobile_i2c_hook_irqs(struct platform_device * dev,struct sh_mobile_i2c_data * pd)825 static int sh_mobile_i2c_hook_irqs(struct platform_device *dev, struct sh_mobile_i2c_data *pd)
826 {
827 struct device_node *np = dev_of_node(&dev->dev);
828 int k = 0, ret;
829
830 if (np) {
831 int irq;
832
833 while ((irq = platform_get_irq_optional(dev, k)) != -ENXIO) {
834 if (irq < 0)
835 return irq;
836 ret = devm_request_irq(&dev->dev, irq, sh_mobile_i2c_isr,
837 0, dev_name(&dev->dev), pd);
838 if (ret) {
839 dev_err(&dev->dev, "cannot request IRQ %d\n", irq);
840 return ret;
841 }
842 k++;
843 }
844 } else {
845 struct resource *res;
846 resource_size_t n;
847
848 while ((res = platform_get_resource(dev, IORESOURCE_IRQ, k))) {
849 for (n = res->start; n <= res->end; n++) {
850 ret = devm_request_irq(&dev->dev, n, sh_mobile_i2c_isr,
851 0, dev_name(&dev->dev), pd);
852 if (ret) {
853 dev_err(&dev->dev, "cannot request IRQ %pa\n", &n);
854 return ret;
855 }
856 }
857 k++;
858 }
859 }
860
861 return k > 0 ? 0 : -ENOENT;
862 }
863
sh_mobile_i2c_probe(struct platform_device * dev)864 static int sh_mobile_i2c_probe(struct platform_device *dev)
865 {
866 struct sh_mobile_i2c_data *pd;
867 struct i2c_adapter *adap;
868 const struct sh_mobile_dt_config *config;
869 int ret;
870 u32 bus_speed;
871
872 pd = devm_kzalloc(&dev->dev, sizeof(struct sh_mobile_i2c_data), GFP_KERNEL);
873 if (!pd)
874 return -ENOMEM;
875
876 pd->clk = devm_clk_get(&dev->dev, NULL);
877 if (IS_ERR(pd->clk)) {
878 dev_err(&dev->dev, "cannot get clock\n");
879 return PTR_ERR(pd->clk);
880 }
881
882 ret = sh_mobile_i2c_hook_irqs(dev, pd);
883 if (ret)
884 return ret;
885
886 pd->dev = &dev->dev;
887 platform_set_drvdata(dev, pd);
888
889 pd->reg = devm_platform_get_and_ioremap_resource(dev, 0, &pd->res);
890 if (IS_ERR(pd->reg))
891 return PTR_ERR(pd->reg);
892
893 ret = of_property_read_u32(dev->dev.of_node, "clock-frequency", &bus_speed);
894 pd->bus_speed = (ret || !bus_speed) ? I2C_MAX_STANDARD_MODE_FREQ : bus_speed;
895 pd->clks_per_count = 1;
896
897 /* Newer variants come with two new bits in ICIC */
898 if (resource_size(pd->res) > 0x17)
899 pd->flags |= IIC_FLAG_HAS_ICIC67;
900
901 pm_runtime_enable(&dev->dev);
902 pm_runtime_get_sync(&dev->dev);
903
904 config = of_device_get_match_data(&dev->dev);
905 if (config) {
906 pd->clks_per_count = config->clks_per_count;
907 ret = config->setup(pd);
908 } else {
909 ret = sh_mobile_i2c_init(pd);
910 }
911
912 pm_runtime_put_sync(&dev->dev);
913 if (ret)
914 return ret;
915
916 /* Init DMA */
917 sg_init_table(&pd->sg, 1);
918 pd->dma_direction = DMA_NONE;
919 pd->dma_rx = pd->dma_tx = ERR_PTR(-EPROBE_DEFER);
920
921 /* setup the private data */
922 adap = &pd->adap;
923 i2c_set_adapdata(adap, pd);
924
925 adap->owner = THIS_MODULE;
926 adap->algo = &sh_mobile_i2c_algorithm;
927 adap->quirks = &sh_mobile_i2c_quirks;
928 adap->dev.parent = &dev->dev;
929 adap->retries = 5;
930 adap->nr = dev->id;
931 adap->dev.of_node = dev->dev.of_node;
932
933 strscpy(adap->name, dev->name, sizeof(adap->name));
934
935 spin_lock_init(&pd->lock);
936 init_waitqueue_head(&pd->wait);
937
938 ret = i2c_add_numbered_adapter(adap);
939 if (ret < 0) {
940 sh_mobile_i2c_release_dma(pd);
941 return ret;
942 }
943
944 dev_info(&dev->dev, "I2C adapter %d, bus speed %lu Hz\n", adap->nr, pd->bus_speed);
945
946 return 0;
947 }
948
sh_mobile_i2c_remove(struct platform_device * dev)949 static void sh_mobile_i2c_remove(struct platform_device *dev)
950 {
951 struct sh_mobile_i2c_data *pd = platform_get_drvdata(dev);
952
953 i2c_del_adapter(&pd->adap);
954 sh_mobile_i2c_release_dma(pd);
955 pm_runtime_disable(&dev->dev);
956 }
957
sh_mobile_i2c_suspend(struct device * dev)958 static int sh_mobile_i2c_suspend(struct device *dev)
959 {
960 struct sh_mobile_i2c_data *pd = dev_get_drvdata(dev);
961
962 i2c_mark_adapter_suspended(&pd->adap);
963 return 0;
964 }
965
sh_mobile_i2c_resume(struct device * dev)966 static int sh_mobile_i2c_resume(struct device *dev)
967 {
968 struct sh_mobile_i2c_data *pd = dev_get_drvdata(dev);
969
970 i2c_mark_adapter_resumed(&pd->adap);
971 return 0;
972 }
973
974 static const struct dev_pm_ops sh_mobile_i2c_pm_ops = {
975 NOIRQ_SYSTEM_SLEEP_PM_OPS(sh_mobile_i2c_suspend,
976 sh_mobile_i2c_resume)
977 };
978
979 static struct platform_driver sh_mobile_i2c_driver = {
980 .driver = {
981 .name = "i2c-sh_mobile",
982 .of_match_table = sh_mobile_i2c_dt_ids,
983 .pm = pm_sleep_ptr(&sh_mobile_i2c_pm_ops),
984 },
985 .probe = sh_mobile_i2c_probe,
986 .remove_new = sh_mobile_i2c_remove,
987 };
988
sh_mobile_i2c_adap_init(void)989 static int __init sh_mobile_i2c_adap_init(void)
990 {
991 return platform_driver_register(&sh_mobile_i2c_driver);
992 }
993 subsys_initcall(sh_mobile_i2c_adap_init);
994
sh_mobile_i2c_adap_exit(void)995 static void __exit sh_mobile_i2c_adap_exit(void)
996 {
997 platform_driver_unregister(&sh_mobile_i2c_driver);
998 }
999 module_exit(sh_mobile_i2c_adap_exit);
1000
1001 MODULE_DESCRIPTION("SuperH Mobile I2C Bus Controller driver");
1002 MODULE_AUTHOR("Magnus Damm");
1003 MODULE_AUTHOR("Wolfram Sang");
1004 MODULE_LICENSE("GPL v2");
1005 MODULE_ALIAS("platform:i2c-sh_mobile");
1006