2 * SuperH Mobile I2C Controller
4 * Copyright (C) 2014 Wolfram Sang <wsa@sang-engineering.com>
6 * Copyright (C) 2008 Magnus Damm
8 * Portions of the code based on out-of-tree driver i2c-sh7343.c
9 * Copyright (c) 2006 Carlos Munoz <carlos@kenati.com>
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 * GNU General Public License for more details.
21 #include <linux/clk.h>
22 #include <linux/delay.h>
23 #include <linux/dmaengine.h>
24 #include <linux/dma-mapping.h>
25 #include <linux/err.h>
26 #include <linux/i2c.h>
27 #include <linux/init.h>
28 #include <linux/interrupt.h>
30 #include <linux/kernel.h>
31 #include <linux/module.h>
32 #include <linux/of_device.h>
33 #include <linux/platform_device.h>
34 #include <linux/pm_runtime.h>
35 #include <linux/slab.h>
37 /* Transmit operation: */
40 /* BUS: S A8 ACK P(*) */
47 /* BUS: S A8 ACK D8(1) ACK P(*) */
48 /* IRQ: DTE WAIT WAIT */
54 /* BUS: S A8 ACK D8(1) ACK D8(2) ACK P(*) */
55 /* IRQ: DTE WAIT WAIT WAIT */
58 /* ICDR: A8 D8(1) D8(2) */
60 /* 3 bytes or more, +---------+ gets repeated */
63 /* Receive operation: */
65 /* 0 byte receive - not supported since slave may hold SDA low */
67 /* 1 byte receive [TX] | [RX] */
68 /* BUS: S A8 ACK | D8(1) ACK P(*) */
69 /* IRQ: DTE WAIT | WAIT DTE */
70 /* ICIC: -DTE | +DTE */
71 /* ICCR: 0x94 0x81 | 0xc0 */
72 /* ICDR: A8 | D8(1) */
74 /* 2 byte receive [TX]| [RX] */
75 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK P(*) */
76 /* IRQ: DTE WAIT | WAIT WAIT DTE */
77 /* ICIC: -DTE | +DTE */
78 /* ICCR: 0x94 0x81 | 0xc0 */
79 /* ICDR: A8 | D8(1) D8(2) */
81 /* 3 byte receive [TX] | [RX] (*) */
82 /* BUS: S A8 ACK | D8(1) ACK D8(2) ACK D8(3) ACK P */
83 /* IRQ: DTE WAIT | WAIT WAIT WAIT DTE */
84 /* ICIC: -DTE | +DTE */
85 /* ICCR: 0x94 0x81 | 0xc0 */
86 /* ICDR: A8 | D8(1) D8(2) D8(3) */
88 /* 4 bytes or more, this part is repeated +---------+ */
91 /* Interrupt order and BUSY flag */
93 /* SDA ___\___XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXAAAAAAAAA___/ */
94 /* SCL \_/1\_/2\_/3\_/4\_/5\_/6\_/7\_/8\___/9\_____/ */
96 /* S D7 D6 D5 D4 D3 D2 D1 D0 P(*) */
98 /* WAIT IRQ ________________________________/ \___________ */
99 /* TACK IRQ ____________________________________/ \_______ */
100 /* DTE IRQ __________________________________________/ \_ */
101 /* AL IRQ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX */
102 /* _______________________________________________ */
105 /* (*) The STOP condition is only sent by the master at the end of the last */
106 /* I2C message or if the I2C_M_STOP flag is set. Similarly, the BUSY bit is */
107 /* only cleared after the STOP condition, so, between messages we have to */
108 /* poll for the DTE bit. */
111 enum sh_mobile_i2c_op {
122 struct sh_mobile_i2c_data {
125 struct i2c_adapter adap;
126 unsigned long bus_speed;
127 unsigned int clks_per_count;
135 wait_queue_head_t wait;
142 struct resource *res;
143 struct dma_chan *dma_tx;
144 struct dma_chan *dma_rx;
145 struct scatterlist sg;
146 enum dma_data_direction dma_direction;
150 struct sh_mobile_dt_config {
152 int (*setup)(struct sh_mobile_i2c_data *pd);
155 #define IIC_FLAG_HAS_ICIC67 (1 << 0)
157 #define STANDARD_MODE 100000
158 #define FAST_MODE 400000
160 /* Register offsets */
170 #define ICCR_ICE 0x80
171 #define ICCR_RACK 0x40
172 #define ICCR_TRS 0x10
173 #define ICCR_BBSY 0x04
174 #define ICCR_SCP 0x01
176 #define ICSR_SCLM 0x80
177 #define ICSR_SDAM 0x40
179 #define ICSR_BUSY 0x10
181 #define ICSR_TACK 0x04
182 #define ICSR_WAIT 0x02
183 #define ICSR_DTE 0x01
185 #define ICIC_ICCLB8 0x80
186 #define ICIC_ICCHB8 0x40
187 #define ICIC_TDMAE 0x20
188 #define ICIC_RDMAE 0x10
189 #define ICIC_ALE 0x08
190 #define ICIC_TACKE 0x04
191 #define ICIC_WAITE 0x02
192 #define ICIC_DTEE 0x01
194 #define ICSTART_ICSTART 0x10
196 static void iic_wr(struct sh_mobile_i2c_data *pd, int offs, unsigned char data)
201 iowrite8(data, pd->reg + offs);
204 static unsigned char iic_rd(struct sh_mobile_i2c_data *pd, int offs)
206 return ioread8(pd->reg + offs);
209 static void iic_set_clr(struct sh_mobile_i2c_data *pd, int offs,
210 unsigned char set, unsigned char clr)
212 iic_wr(pd, offs, (iic_rd(pd, offs) | set) & ~clr);
215 static u32 sh_mobile_i2c_iccl(unsigned long count_khz, u32 tLOW, u32 tf)
218 * Conditional expression:
219 * ICCL >= COUNT_CLK * (tLOW + tf)
221 * SH-Mobile IIC hardware starts counting the LOW period of
222 * the SCL signal (tLOW) as soon as it pulls the SCL line.
223 * In order to meet the tLOW timing spec, we need to take into
224 * account the fall time of SCL signal (tf). Default tf value
225 * should be 0.3 us, for safety.
227 return (((count_khz * (tLOW + tf)) + 5000) / 10000);
230 static u32 sh_mobile_i2c_icch(unsigned long count_khz, u32 tHIGH, u32 tf)
233 * Conditional expression:
234 * ICCH >= COUNT_CLK * (tHIGH + tf)
236 * SH-Mobile IIC hardware is aware of SCL transition period 'tr',
237 * and can ignore it. SH-Mobile IIC controller starts counting
238 * the HIGH period of the SCL signal (tHIGH) after the SCL input
239 * voltage increases at VIH.
241 * Afterward it turned out calculating ICCH using only tHIGH spec
242 * will result in violation of the tHD;STA timing spec. We need
243 * to take into account the fall time of SDA signal (tf) at START
244 * condition, in order to meet both tHIGH and tHD;STA specs.
246 return (((count_khz * (tHIGH + tf)) + 5000) / 10000);
249 static int sh_mobile_i2c_check_timing(struct sh_mobile_i2c_data *pd)
251 u16 max_val = pd->flags & IIC_FLAG_HAS_ICIC67 ? 0x1ff : 0xff;
253 if (pd->iccl > max_val || pd->icch > max_val) {
254 dev_err(pd->dev, "timing values out of range: L/H=0x%x/0x%x\n",
259 /* one more bit of ICCL in ICIC */
260 if (pd->iccl & 0x100)
261 pd->icic |= ICIC_ICCLB8;
263 pd->icic &= ~ICIC_ICCLB8;
265 /* one more bit of ICCH in ICIC */
266 if (pd->icch & 0x100)
267 pd->icic |= ICIC_ICCHB8;
269 pd->icic &= ~ICIC_ICCHB8;
271 dev_dbg(pd->dev, "timing values: L/H=0x%x/0x%x\n", pd->iccl, pd->icch);
275 static int sh_mobile_i2c_init(struct sh_mobile_i2c_data *pd)
277 unsigned long i2c_clk_khz;
280 i2c_clk_khz = clk_get_rate(pd->clk) / 1000 / pd->clks_per_count;
282 if (pd->bus_speed == STANDARD_MODE) {
283 tLOW = 47; /* tLOW = 4.7 us */
284 tHIGH = 40; /* tHD;STA = tHIGH = 4.0 us */
285 tf = 3; /* tf = 0.3 us */
286 } else if (pd->bus_speed == FAST_MODE) {
287 tLOW = 13; /* tLOW = 1.3 us */
288 tHIGH = 6; /* tHD;STA = tHIGH = 0.6 us */
289 tf = 3; /* tf = 0.3 us */
291 dev_err(pd->dev, "unrecognized bus speed %lu Hz\n",
296 pd->iccl = sh_mobile_i2c_iccl(i2c_clk_khz, tLOW, tf);
297 pd->icch = sh_mobile_i2c_icch(i2c_clk_khz, tHIGH, tf);
299 return sh_mobile_i2c_check_timing(pd);
302 static int sh_mobile_i2c_v2_init(struct sh_mobile_i2c_data *pd)
304 unsigned long clks_per_cycle;
306 /* L = 5, H = 4, L + H = 9 */
307 clks_per_cycle = clk_get_rate(pd->clk) / pd->bus_speed;
308 pd->iccl = DIV_ROUND_UP(clks_per_cycle * 5 / 9 - 1, pd->clks_per_count);
309 pd->icch = DIV_ROUND_UP(clks_per_cycle * 4 / 9 - 5, pd->clks_per_count);
311 return sh_mobile_i2c_check_timing(pd);
314 static unsigned char i2c_op(struct sh_mobile_i2c_data *pd,
315 enum sh_mobile_i2c_op op, unsigned char data)
317 unsigned char ret = 0;
320 dev_dbg(pd->dev, "op %d, data in 0x%02x\n", op, data);
322 spin_lock_irqsave(&pd->lock, flags);
325 case OP_START: /* issue start and trigger DTE interrupt */
326 iic_wr(pd, ICCR, ICCR_ICE | ICCR_TRS | ICCR_BBSY);
328 case OP_TX_FIRST: /* disable DTE interrupt and write data */
329 iic_wr(pd, ICIC, ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
330 iic_wr(pd, ICDR, data);
332 case OP_TX: /* write data */
333 iic_wr(pd, ICDR, data);
335 case OP_TX_STOP: /* issue a stop (or rep_start) */
336 iic_wr(pd, ICCR, pd->send_stop ? ICCR_ICE | ICCR_TRS
337 : ICCR_ICE | ICCR_TRS | ICCR_BBSY);
339 case OP_TX_TO_RX: /* select read mode */
340 iic_wr(pd, ICCR, ICCR_ICE | ICCR_SCP);
342 case OP_RX: /* just read data */
343 ret = iic_rd(pd, ICDR);
345 case OP_RX_STOP: /* enable DTE interrupt, issue stop */
347 ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
348 iic_wr(pd, ICCR, ICCR_ICE | ICCR_RACK);
350 case OP_RX_STOP_DATA: /* enable DTE interrupt, read data, issue stop */
352 ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
353 ret = iic_rd(pd, ICDR);
354 iic_wr(pd, ICCR, ICCR_ICE | ICCR_RACK);
358 spin_unlock_irqrestore(&pd->lock, flags);
360 dev_dbg(pd->dev, "op %d, data out 0x%02x\n", op, ret);
364 static bool sh_mobile_i2c_is_first_byte(struct sh_mobile_i2c_data *pd)
366 return pd->pos == -1;
369 static void sh_mobile_i2c_get_data(struct sh_mobile_i2c_data *pd,
374 *buf = i2c_8bit_addr_from_msg(pd->msg);
377 *buf = pd->msg->buf[pd->pos];
381 static int sh_mobile_i2c_isr_tx(struct sh_mobile_i2c_data *pd)
385 if (pd->pos == pd->msg->len) {
386 i2c_op(pd, OP_TX_STOP, 0);
390 sh_mobile_i2c_get_data(pd, &data);
391 i2c_op(pd, sh_mobile_i2c_is_first_byte(pd) ? OP_TX_FIRST : OP_TX, data);
397 static int sh_mobile_i2c_isr_rx(struct sh_mobile_i2c_data *pd)
404 sh_mobile_i2c_get_data(pd, &data);
406 if (sh_mobile_i2c_is_first_byte(pd))
407 i2c_op(pd, OP_TX_FIRST, data);
409 i2c_op(pd, OP_TX, data);
414 i2c_op(pd, OP_TX_TO_RX, 0);
418 real_pos = pd->pos - 2;
420 if (pd->pos == pd->msg->len) {
421 if (pd->stop_after_dma) {
422 /* Simulate PIO end condition after DMA transfer */
423 i2c_op(pd, OP_RX_STOP, 0);
429 i2c_op(pd, OP_RX_STOP, 0);
432 data = i2c_op(pd, OP_RX_STOP_DATA, 0);
433 } else if (real_pos >= 0) {
434 data = i2c_op(pd, OP_RX, 0);
438 pd->msg->buf[real_pos] = data;
442 return pd->pos == (pd->msg->len + 2);
445 static irqreturn_t sh_mobile_i2c_isr(int irq, void *dev_id)
447 struct sh_mobile_i2c_data *pd = dev_id;
451 sr = iic_rd(pd, ICSR);
452 pd->sr |= sr; /* remember state */
454 dev_dbg(pd->dev, "i2c_isr 0x%02x 0x%02x %s %d %d!\n", sr, pd->sr,
455 (pd->msg->flags & I2C_M_RD) ? "read" : "write",
456 pd->pos, pd->msg->len);
458 /* Kick off TxDMA after preface was done */
459 if (pd->dma_direction == DMA_TO_DEVICE && pd->pos == 0)
460 iic_set_clr(pd, ICIC, ICIC_TDMAE, 0);
461 else if (sr & (ICSR_AL | ICSR_TACK))
462 /* don't interrupt transaction - continue to issue stop */
463 iic_wr(pd, ICSR, sr & ~(ICSR_AL | ICSR_TACK));
464 else if (pd->msg->flags & I2C_M_RD)
465 wakeup = sh_mobile_i2c_isr_rx(pd);
467 wakeup = sh_mobile_i2c_isr_tx(pd);
469 /* Kick off RxDMA after preface was done */
470 if (pd->dma_direction == DMA_FROM_DEVICE && pd->pos == 1)
471 iic_set_clr(pd, ICIC, ICIC_RDMAE, 0);
473 if (sr & ICSR_WAIT) /* TODO: add delay here to support slow acks */
474 iic_wr(pd, ICSR, sr & ~ICSR_WAIT);
481 /* defeat write posting to avoid spurious WAIT interrupts */
487 static void sh_mobile_i2c_dma_unmap(struct sh_mobile_i2c_data *pd)
489 struct dma_chan *chan = pd->dma_direction == DMA_FROM_DEVICE
490 ? pd->dma_rx : pd->dma_tx;
492 dma_unmap_single(chan->device->dev, sg_dma_address(&pd->sg),
493 pd->msg->len, pd->dma_direction);
495 pd->dma_direction = DMA_NONE;
498 static void sh_mobile_i2c_cleanup_dma(struct sh_mobile_i2c_data *pd)
500 if (pd->dma_direction == DMA_NONE)
502 else if (pd->dma_direction == DMA_FROM_DEVICE)
503 dmaengine_terminate_all(pd->dma_rx);
504 else if (pd->dma_direction == DMA_TO_DEVICE)
505 dmaengine_terminate_all(pd->dma_tx);
507 sh_mobile_i2c_dma_unmap(pd);
510 static void sh_mobile_i2c_dma_callback(void *data)
512 struct sh_mobile_i2c_data *pd = data;
514 sh_mobile_i2c_dma_unmap(pd);
515 pd->pos = pd->msg->len;
516 pd->stop_after_dma = true;
518 i2c_release_dma_safe_msg_buf(pd->msg, pd->dma_buf);
520 iic_set_clr(pd, ICIC, 0, ICIC_TDMAE | ICIC_RDMAE);
523 static struct dma_chan *sh_mobile_i2c_request_dma_chan(struct device *dev,
524 enum dma_transfer_direction dir, dma_addr_t port_addr)
526 struct dma_chan *chan;
527 struct dma_slave_config cfg;
528 char *chan_name = dir == DMA_MEM_TO_DEV ? "tx" : "rx";
531 chan = dma_request_slave_channel_reason(dev, chan_name);
533 dev_dbg(dev, "request_channel failed for %s (%ld)\n", chan_name,
538 memset(&cfg, 0, sizeof(cfg));
540 if (dir == DMA_MEM_TO_DEV) {
541 cfg.dst_addr = port_addr;
542 cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
544 cfg.src_addr = port_addr;
545 cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
548 ret = dmaengine_slave_config(chan, &cfg);
550 dev_dbg(dev, "slave_config failed for %s (%d)\n", chan_name, ret);
551 dma_release_channel(chan);
555 dev_dbg(dev, "got DMA channel for %s\n", chan_name);
559 static void sh_mobile_i2c_xfer_dma(struct sh_mobile_i2c_data *pd)
561 bool read = pd->msg->flags & I2C_M_RD;
562 enum dma_data_direction dir = read ? DMA_FROM_DEVICE : DMA_TO_DEVICE;
563 struct dma_chan *chan = read ? pd->dma_rx : pd->dma_tx;
564 struct dma_async_tx_descriptor *txdesc;
568 if (PTR_ERR(chan) == -EPROBE_DEFER) {
570 chan = pd->dma_rx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_DEV_TO_MEM,
571 pd->res->start + ICDR);
573 chan = pd->dma_tx = sh_mobile_i2c_request_dma_chan(pd->dev, DMA_MEM_TO_DEV,
574 pd->res->start + ICDR);
580 dma_addr = dma_map_single(chan->device->dev, pd->dma_buf, pd->msg->len, dir);
581 if (dma_mapping_error(chan->device->dev, dma_addr)) {
582 dev_dbg(pd->dev, "dma map failed, using PIO\n");
586 sg_dma_len(&pd->sg) = pd->msg->len;
587 sg_dma_address(&pd->sg) = dma_addr;
589 pd->dma_direction = dir;
591 txdesc = dmaengine_prep_slave_sg(chan, &pd->sg, 1,
592 read ? DMA_DEV_TO_MEM : DMA_MEM_TO_DEV,
593 DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
595 dev_dbg(pd->dev, "dma prep slave sg failed, using PIO\n");
596 sh_mobile_i2c_cleanup_dma(pd);
600 txdesc->callback = sh_mobile_i2c_dma_callback;
601 txdesc->callback_param = pd;
603 cookie = dmaengine_submit(txdesc);
604 if (dma_submit_error(cookie)) {
605 dev_dbg(pd->dev, "submitting dma failed, using PIO\n");
606 sh_mobile_i2c_cleanup_dma(pd);
610 dma_async_issue_pending(chan);
613 static int start_ch(struct sh_mobile_i2c_data *pd, struct i2c_msg *usr_msg,
617 /* Initialize channel registers */
618 iic_wr(pd, ICCR, ICCR_SCP);
620 /* Enable channel and configure rx ack */
621 iic_wr(pd, ICCR, ICCR_ICE | ICCR_SCP);
624 iic_wr(pd, ICCL, pd->iccl & 0xff);
625 iic_wr(pd, ICCH, pd->icch & 0xff);
632 pd->dma_buf = i2c_get_dma_safe_msg_buf(pd->msg, 8);
634 sh_mobile_i2c_xfer_dma(pd);
636 /* Enable all interrupts to begin with */
637 iic_wr(pd, ICIC, ICIC_DTEE | ICIC_WAITE | ICIC_ALE | ICIC_TACKE);
641 static int poll_dte(struct sh_mobile_i2c_data *pd)
645 for (i = 1000; i; i--) {
646 u_int8_t val = iic_rd(pd, ICSR);
657 return i ? 0 : -ETIMEDOUT;
660 static int poll_busy(struct sh_mobile_i2c_data *pd)
664 for (i = 1000; i; i--) {
665 u_int8_t val = iic_rd(pd, ICSR);
667 dev_dbg(pd->dev, "val 0x%02x pd->sr 0x%02x\n", val, pd->sr);
669 /* the interrupt handler may wake us up before the
670 * transfer is finished, so poll the hardware
673 if (!(val & ICSR_BUSY)) {
674 /* handle missing acknowledge and arbitration lost */
686 return i ? 0 : -ETIMEDOUT;
689 static int sh_mobile_i2c_xfer(struct i2c_adapter *adapter,
690 struct i2c_msg *msgs,
693 struct sh_mobile_i2c_data *pd = i2c_get_adapdata(adapter);
699 /* Wake up device and enable clock */
700 pm_runtime_get_sync(pd->dev);
702 /* Process all messages */
703 for (i = 0; i < num; i++) {
704 bool do_start = pd->send_stop || !i;
706 pd->send_stop = i == num - 1 || msg->flags & I2C_M_STOP;
707 pd->stop_after_dma = false;
709 err = start_ch(pd, msg, do_start);
714 i2c_op(pd, OP_START, 0);
716 /* The interrupt handler takes care of the rest... */
717 timeout = wait_event_timeout(pd->wait,
718 pd->sr & (ICSR_TACK | SW_DONE),
721 dev_err(pd->dev, "Transfer request timed out\n");
722 if (pd->dma_direction != DMA_NONE)
723 sh_mobile_i2c_cleanup_dma(pd);
737 /* Disable channel */
738 iic_wr(pd, ICCR, ICCR_SCP);
740 /* Disable clock and mark device as idle */
741 pm_runtime_put_sync(pd->dev);
746 static u32 sh_mobile_i2c_func(struct i2c_adapter *adapter)
748 return I2C_FUNC_I2C | I2C_FUNC_SMBUS_EMUL | I2C_FUNC_PROTOCOL_MANGLING;
751 static const struct i2c_algorithm sh_mobile_i2c_algorithm = {
752 .functionality = sh_mobile_i2c_func,
753 .master_xfer = sh_mobile_i2c_xfer,
756 static const struct i2c_adapter_quirks sh_mobile_i2c_quirks = {
757 .flags = I2C_AQ_NO_ZERO_LEN_READ,
761 * r8a7740 chip has lasting errata on I2C I/O pad reset.
762 * this is work-around for it.
764 static int sh_mobile_i2c_r8a7740_workaround(struct sh_mobile_i2c_data *pd)
766 iic_set_clr(pd, ICCR, ICCR_ICE, 0);
767 iic_rd(pd, ICCR); /* dummy read */
769 iic_set_clr(pd, ICSTART, ICSTART_ICSTART, 0);
770 iic_rd(pd, ICSTART); /* dummy read */
774 iic_wr(pd, ICCR, ICCR_SCP);
775 iic_wr(pd, ICSTART, 0);
779 iic_wr(pd, ICCR, ICCR_TRS);
783 iic_wr(pd, ICCR, ICCR_TRS);
786 return sh_mobile_i2c_init(pd);
789 static const struct sh_mobile_dt_config default_dt_config = {
791 .setup = sh_mobile_i2c_init,
794 static const struct sh_mobile_dt_config fast_clock_dt_config = {
796 .setup = sh_mobile_i2c_init,
799 static const struct sh_mobile_dt_config v2_freq_calc_dt_config = {
801 .setup = sh_mobile_i2c_v2_init,
804 static const struct sh_mobile_dt_config r8a7740_dt_config = {
806 .setup = sh_mobile_i2c_r8a7740_workaround,
809 static const struct of_device_id sh_mobile_i2c_dt_ids[] = {
810 { .compatible = "renesas,iic-r8a73a4", .data = &fast_clock_dt_config },
811 { .compatible = "renesas,iic-r8a7740", .data = &r8a7740_dt_config },
812 { .compatible = "renesas,iic-r8a7790", .data = &v2_freq_calc_dt_config },
813 { .compatible = "renesas,iic-r8a7791", .data = &fast_clock_dt_config },
814 { .compatible = "renesas,iic-r8a7792", .data = &fast_clock_dt_config },
815 { .compatible = "renesas,iic-r8a7793", .data = &fast_clock_dt_config },
816 { .compatible = "renesas,iic-r8a7794", .data = &fast_clock_dt_config },
817 { .compatible = "renesas,rcar-gen2-iic", .data = &fast_clock_dt_config },
818 { .compatible = "renesas,iic-r8a7795", .data = &fast_clock_dt_config },
819 { .compatible = "renesas,rcar-gen3-iic", .data = &fast_clock_dt_config },
820 { .compatible = "renesas,iic-sh73a0", .data = &fast_clock_dt_config },
821 { .compatible = "renesas,rmobile-iic", .data = &default_dt_config },
824 MODULE_DEVICE_TABLE(of, sh_mobile_i2c_dt_ids);
826 static void sh_mobile_i2c_release_dma(struct sh_mobile_i2c_data *pd)
828 if (!IS_ERR(pd->dma_tx)) {
829 dma_release_channel(pd->dma_tx);
830 pd->dma_tx = ERR_PTR(-EPROBE_DEFER);
833 if (!IS_ERR(pd->dma_rx)) {
834 dma_release_channel(pd->dma_rx);
835 pd->dma_rx = ERR_PTR(-EPROBE_DEFER);
839 static int sh_mobile_i2c_hook_irqs(struct platform_device *dev, struct sh_mobile_i2c_data *pd)
841 struct resource *res;
845 while ((res = platform_get_resource(dev, IORESOURCE_IRQ, k))) {
846 for (n = res->start; n <= res->end; n++) {
847 ret = devm_request_irq(&dev->dev, n, sh_mobile_i2c_isr,
848 0, dev_name(&dev->dev), pd);
850 dev_err(&dev->dev, "cannot request IRQ %pa\n", &n);
857 return k > 0 ? 0 : -ENOENT;
860 static int sh_mobile_i2c_probe(struct platform_device *dev)
862 struct sh_mobile_i2c_data *pd;
863 struct i2c_adapter *adap;
864 struct resource *res;
865 const struct sh_mobile_dt_config *config;
869 pd = devm_kzalloc(&dev->dev, sizeof(struct sh_mobile_i2c_data), GFP_KERNEL);
873 pd->clk = devm_clk_get(&dev->dev, NULL);
874 if (IS_ERR(pd->clk)) {
875 dev_err(&dev->dev, "cannot get clock\n");
876 return PTR_ERR(pd->clk);
879 ret = sh_mobile_i2c_hook_irqs(dev, pd);
884 platform_set_drvdata(dev, pd);
886 res = platform_get_resource(dev, IORESOURCE_MEM, 0);
889 pd->reg = devm_ioremap_resource(&dev->dev, res);
891 return PTR_ERR(pd->reg);
893 ret = of_property_read_u32(dev->dev.of_node, "clock-frequency", &bus_speed);
894 pd->bus_speed = (ret || !bus_speed) ? STANDARD_MODE : bus_speed;
895 pd->clks_per_count = 1;
897 /* Newer variants come with two new bits in ICIC */
898 if (resource_size(res) > 0x17)
899 pd->flags |= IIC_FLAG_HAS_ICIC67;
901 pm_runtime_enable(&dev->dev);
902 pm_runtime_get_sync(&dev->dev);
904 config = of_device_get_match_data(&dev->dev);
906 pd->clks_per_count = config->clks_per_count;
907 ret = config->setup(pd);
909 ret = sh_mobile_i2c_init(pd);
912 pm_runtime_put_sync(&dev->dev);
917 sg_init_table(&pd->sg, 1);
918 pd->dma_direction = DMA_NONE;
919 pd->dma_rx = pd->dma_tx = ERR_PTR(-EPROBE_DEFER);
921 /* setup the private data */
923 i2c_set_adapdata(adap, pd);
925 adap->owner = THIS_MODULE;
926 adap->algo = &sh_mobile_i2c_algorithm;
927 adap->quirks = &sh_mobile_i2c_quirks;
928 adap->dev.parent = &dev->dev;
931 adap->dev.of_node = dev->dev.of_node;
933 strlcpy(adap->name, dev->name, sizeof(adap->name));
935 spin_lock_init(&pd->lock);
936 init_waitqueue_head(&pd->wait);
938 ret = i2c_add_numbered_adapter(adap);
940 sh_mobile_i2c_release_dma(pd);
944 dev_info(&dev->dev, "I2C adapter %d, bus speed %lu Hz\n", adap->nr, pd->bus_speed);
949 static int sh_mobile_i2c_remove(struct platform_device *dev)
951 struct sh_mobile_i2c_data *pd = platform_get_drvdata(dev);
953 i2c_del_adapter(&pd->adap);
954 sh_mobile_i2c_release_dma(pd);
955 pm_runtime_disable(&dev->dev);
959 static int sh_mobile_i2c_runtime_nop(struct device *dev)
961 /* Runtime PM callback shared between ->runtime_suspend()
962 * and ->runtime_resume(). Simply returns success.
964 * This driver re-initializes all registers after
965 * pm_runtime_get_sync() anyway so there is no need
966 * to save and restore registers here.
971 static const struct dev_pm_ops sh_mobile_i2c_dev_pm_ops = {
972 .runtime_suspend = sh_mobile_i2c_runtime_nop,
973 .runtime_resume = sh_mobile_i2c_runtime_nop,
976 static struct platform_driver sh_mobile_i2c_driver = {
978 .name = "i2c-sh_mobile",
979 .pm = &sh_mobile_i2c_dev_pm_ops,
980 .of_match_table = sh_mobile_i2c_dt_ids,
982 .probe = sh_mobile_i2c_probe,
983 .remove = sh_mobile_i2c_remove,
986 static int __init sh_mobile_i2c_adap_init(void)
988 return platform_driver_register(&sh_mobile_i2c_driver);
990 subsys_initcall(sh_mobile_i2c_adap_init);
992 static void __exit sh_mobile_i2c_adap_exit(void)
994 platform_driver_unregister(&sh_mobile_i2c_driver);
996 module_exit(sh_mobile_i2c_adap_exit);
998 MODULE_DESCRIPTION("SuperH Mobile I2C Bus Controller driver");
999 MODULE_AUTHOR("Magnus Damm and Wolfram Sang");
1000 MODULE_LICENSE("GPL v2");
1001 MODULE_ALIAS("platform:i2c-sh_mobile");