David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
| 2 | // |
| 3 | // STMicroelectronics STM32 SPI Controller driver (master mode only) |
| 4 | // |
| 5 | // Copyright (C) 2017, STMicroelectronics - All Rights Reserved |
| 6 | // Author(s): Amelie Delaunay <amelie.delaunay@st.com> for STMicroelectronics. |
| 7 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 8 | #include <linux/debugfs.h> |
| 9 | #include <linux/clk.h> |
| 10 | #include <linux/delay.h> |
| 11 | #include <linux/dmaengine.h> |
| 12 | #include <linux/gpio.h> |
| 13 | #include <linux/interrupt.h> |
| 14 | #include <linux/iopoll.h> |
| 15 | #include <linux/module.h> |
| 16 | #include <linux/of_platform.h> |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 17 | #include <linux/pinctrl/consumer.h> |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 18 | #include <linux/pm_runtime.h> |
| 19 | #include <linux/reset.h> |
| 20 | #include <linux/spi/spi.h> |
| 21 | |
| 22 | #define DRIVER_NAME "spi_stm32" |
| 23 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 24 | /* STM32F4 SPI registers */ |
| 25 | #define STM32F4_SPI_CR1 0x00 |
| 26 | #define STM32F4_SPI_CR2 0x04 |
| 27 | #define STM32F4_SPI_SR 0x08 |
| 28 | #define STM32F4_SPI_DR 0x0C |
| 29 | #define STM32F4_SPI_I2SCFGR 0x1C |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 30 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 31 | /* STM32F4_SPI_CR1 bit fields */ |
| 32 | #define STM32F4_SPI_CR1_CPHA BIT(0) |
| 33 | #define STM32F4_SPI_CR1_CPOL BIT(1) |
| 34 | #define STM32F4_SPI_CR1_MSTR BIT(2) |
| 35 | #define STM32F4_SPI_CR1_BR_SHIFT 3 |
| 36 | #define STM32F4_SPI_CR1_BR GENMASK(5, 3) |
| 37 | #define STM32F4_SPI_CR1_SPE BIT(6) |
| 38 | #define STM32F4_SPI_CR1_LSBFRST BIT(7) |
| 39 | #define STM32F4_SPI_CR1_SSI BIT(8) |
| 40 | #define STM32F4_SPI_CR1_SSM BIT(9) |
| 41 | #define STM32F4_SPI_CR1_RXONLY BIT(10) |
| 42 | #define STM32F4_SPI_CR1_DFF BIT(11) |
| 43 | #define STM32F4_SPI_CR1_CRCNEXT BIT(12) |
| 44 | #define STM32F4_SPI_CR1_CRCEN BIT(13) |
| 45 | #define STM32F4_SPI_CR1_BIDIOE BIT(14) |
| 46 | #define STM32F4_SPI_CR1_BIDIMODE BIT(15) |
| 47 | #define STM32F4_SPI_CR1_BR_MIN 0 |
| 48 | #define STM32F4_SPI_CR1_BR_MAX (GENMASK(5, 3) >> 3) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 49 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 50 | /* STM32F4_SPI_CR2 bit fields */ |
| 51 | #define STM32F4_SPI_CR2_RXDMAEN BIT(0) |
| 52 | #define STM32F4_SPI_CR2_TXDMAEN BIT(1) |
| 53 | #define STM32F4_SPI_CR2_SSOE BIT(2) |
| 54 | #define STM32F4_SPI_CR2_FRF BIT(4) |
| 55 | #define STM32F4_SPI_CR2_ERRIE BIT(5) |
| 56 | #define STM32F4_SPI_CR2_RXNEIE BIT(6) |
| 57 | #define STM32F4_SPI_CR2_TXEIE BIT(7) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 58 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 59 | /* STM32F4_SPI_SR bit fields */ |
| 60 | #define STM32F4_SPI_SR_RXNE BIT(0) |
| 61 | #define STM32F4_SPI_SR_TXE BIT(1) |
| 62 | #define STM32F4_SPI_SR_CHSIDE BIT(2) |
| 63 | #define STM32F4_SPI_SR_UDR BIT(3) |
| 64 | #define STM32F4_SPI_SR_CRCERR BIT(4) |
| 65 | #define STM32F4_SPI_SR_MODF BIT(5) |
| 66 | #define STM32F4_SPI_SR_OVR BIT(6) |
| 67 | #define STM32F4_SPI_SR_BSY BIT(7) |
| 68 | #define STM32F4_SPI_SR_FRE BIT(8) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 69 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 70 | /* STM32F4_SPI_I2SCFGR bit fields */ |
| 71 | #define STM32F4_SPI_I2SCFGR_I2SMOD BIT(11) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 72 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 73 | /* STM32F4 SPI Baud Rate min/max divisor */ |
| 74 | #define STM32F4_SPI_BR_DIV_MIN (2 << STM32F4_SPI_CR1_BR_MIN) |
| 75 | #define STM32F4_SPI_BR_DIV_MAX (2 << STM32F4_SPI_CR1_BR_MAX) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 76 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 77 | /* STM32H7 SPI registers */ |
| 78 | #define STM32H7_SPI_CR1 0x00 |
| 79 | #define STM32H7_SPI_CR2 0x04 |
| 80 | #define STM32H7_SPI_CFG1 0x08 |
| 81 | #define STM32H7_SPI_CFG2 0x0C |
| 82 | #define STM32H7_SPI_IER 0x10 |
| 83 | #define STM32H7_SPI_SR 0x14 |
| 84 | #define STM32H7_SPI_IFCR 0x18 |
| 85 | #define STM32H7_SPI_TXDR 0x20 |
| 86 | #define STM32H7_SPI_RXDR 0x30 |
| 87 | #define STM32H7_SPI_I2SCFGR 0x50 |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 88 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 89 | /* STM32H7_SPI_CR1 bit fields */ |
| 90 | #define STM32H7_SPI_CR1_SPE BIT(0) |
| 91 | #define STM32H7_SPI_CR1_MASRX BIT(8) |
| 92 | #define STM32H7_SPI_CR1_CSTART BIT(9) |
| 93 | #define STM32H7_SPI_CR1_CSUSP BIT(10) |
| 94 | #define STM32H7_SPI_CR1_HDDIR BIT(11) |
| 95 | #define STM32H7_SPI_CR1_SSI BIT(12) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 96 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 97 | /* STM32H7_SPI_CR2 bit fields */ |
| 98 | #define STM32H7_SPI_CR2_TSIZE_SHIFT 0 |
| 99 | #define STM32H7_SPI_CR2_TSIZE GENMASK(15, 0) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 100 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 101 | /* STM32H7_SPI_CFG1 bit fields */ |
| 102 | #define STM32H7_SPI_CFG1_DSIZE_SHIFT 0 |
| 103 | #define STM32H7_SPI_CFG1_DSIZE GENMASK(4, 0) |
| 104 | #define STM32H7_SPI_CFG1_FTHLV_SHIFT 5 |
| 105 | #define STM32H7_SPI_CFG1_FTHLV GENMASK(8, 5) |
| 106 | #define STM32H7_SPI_CFG1_RXDMAEN BIT(14) |
| 107 | #define STM32H7_SPI_CFG1_TXDMAEN BIT(15) |
| 108 | #define STM32H7_SPI_CFG1_MBR_SHIFT 28 |
| 109 | #define STM32H7_SPI_CFG1_MBR GENMASK(30, 28) |
| 110 | #define STM32H7_SPI_CFG1_MBR_MIN 0 |
| 111 | #define STM32H7_SPI_CFG1_MBR_MAX (GENMASK(30, 28) >> 28) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 112 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 113 | /* STM32H7_SPI_CFG2 bit fields */ |
| 114 | #define STM32H7_SPI_CFG2_MIDI_SHIFT 4 |
| 115 | #define STM32H7_SPI_CFG2_MIDI GENMASK(7, 4) |
| 116 | #define STM32H7_SPI_CFG2_COMM_SHIFT 17 |
| 117 | #define STM32H7_SPI_CFG2_COMM GENMASK(18, 17) |
| 118 | #define STM32H7_SPI_CFG2_SP_SHIFT 19 |
| 119 | #define STM32H7_SPI_CFG2_SP GENMASK(21, 19) |
| 120 | #define STM32H7_SPI_CFG2_MASTER BIT(22) |
| 121 | #define STM32H7_SPI_CFG2_LSBFRST BIT(23) |
| 122 | #define STM32H7_SPI_CFG2_CPHA BIT(24) |
| 123 | #define STM32H7_SPI_CFG2_CPOL BIT(25) |
| 124 | #define STM32H7_SPI_CFG2_SSM BIT(26) |
| 125 | #define STM32H7_SPI_CFG2_AFCNTR BIT(31) |
| 126 | |
| 127 | /* STM32H7_SPI_IER bit fields */ |
| 128 | #define STM32H7_SPI_IER_RXPIE BIT(0) |
| 129 | #define STM32H7_SPI_IER_TXPIE BIT(1) |
| 130 | #define STM32H7_SPI_IER_DXPIE BIT(2) |
| 131 | #define STM32H7_SPI_IER_EOTIE BIT(3) |
| 132 | #define STM32H7_SPI_IER_TXTFIE BIT(4) |
| 133 | #define STM32H7_SPI_IER_OVRIE BIT(6) |
| 134 | #define STM32H7_SPI_IER_MODFIE BIT(9) |
| 135 | #define STM32H7_SPI_IER_ALL GENMASK(10, 0) |
| 136 | |
| 137 | /* STM32H7_SPI_SR bit fields */ |
| 138 | #define STM32H7_SPI_SR_RXP BIT(0) |
| 139 | #define STM32H7_SPI_SR_TXP BIT(1) |
| 140 | #define STM32H7_SPI_SR_EOT BIT(3) |
| 141 | #define STM32H7_SPI_SR_OVR BIT(6) |
| 142 | #define STM32H7_SPI_SR_MODF BIT(9) |
| 143 | #define STM32H7_SPI_SR_SUSP BIT(11) |
| 144 | #define STM32H7_SPI_SR_RXPLVL_SHIFT 13 |
| 145 | #define STM32H7_SPI_SR_RXPLVL GENMASK(14, 13) |
| 146 | #define STM32H7_SPI_SR_RXWNE BIT(15) |
| 147 | |
| 148 | /* STM32H7_SPI_IFCR bit fields */ |
| 149 | #define STM32H7_SPI_IFCR_ALL GENMASK(11, 3) |
| 150 | |
| 151 | /* STM32H7_SPI_I2SCFGR bit fields */ |
| 152 | #define STM32H7_SPI_I2SCFGR_I2SMOD BIT(0) |
| 153 | |
| 154 | /* STM32H7 SPI Master Baud Rate min/max divisor */ |
| 155 | #define STM32H7_SPI_MBR_DIV_MIN (2 << STM32H7_SPI_CFG1_MBR_MIN) |
| 156 | #define STM32H7_SPI_MBR_DIV_MAX (2 << STM32H7_SPI_CFG1_MBR_MAX) |
| 157 | |
| 158 | /* STM32H7 SPI Communication mode */ |
| 159 | #define STM32H7_SPI_FULL_DUPLEX 0 |
| 160 | #define STM32H7_SPI_SIMPLEX_TX 1 |
| 161 | #define STM32H7_SPI_SIMPLEX_RX 2 |
| 162 | #define STM32H7_SPI_HALF_DUPLEX 3 |
| 163 | |
| 164 | /* SPI Communication type */ |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 165 | #define SPI_FULL_DUPLEX 0 |
| 166 | #define SPI_SIMPLEX_TX 1 |
| 167 | #define SPI_SIMPLEX_RX 2 |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 168 | #define SPI_3WIRE_TX 3 |
| 169 | #define SPI_3WIRE_RX 4 |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 170 | |
| 171 | #define SPI_1HZ_NS 1000000000 |
| 172 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 173 | /* |
| 174 | * use PIO for small transfers, avoiding DMA setup/teardown overhead for drivers |
| 175 | * without fifo buffers. |
| 176 | */ |
| 177 | #define SPI_DMA_MIN_BYTES 16 |
| 178 | |
| 179 | /** |
| 180 | * stm32_spi_reg - stm32 SPI register & bitfield desc |
| 181 | * @reg: register offset |
| 182 | * @mask: bitfield mask |
| 183 | * @shift: left shift |
| 184 | */ |
| 185 | struct stm32_spi_reg { |
| 186 | int reg; |
| 187 | int mask; |
| 188 | int shift; |
| 189 | }; |
| 190 | |
| 191 | /** |
| 192 | * stm32_spi_regspec - stm32 registers definition, compatible dependent data |
| 193 | * en: enable register and SPI enable bit |
| 194 | * dma_rx_en: SPI DMA RX enable register end SPI DMA RX enable bit |
| 195 | * dma_tx_en: SPI DMA TX enable register end SPI DMA TX enable bit |
| 196 | * cpol: clock polarity register and polarity bit |
| 197 | * cpha: clock phase register and phase bit |
| 198 | * lsb_first: LSB transmitted first register and bit |
| 199 | * br: baud rate register and bitfields |
| 200 | * rx: SPI RX data register |
| 201 | * tx: SPI TX data register |
| 202 | */ |
| 203 | struct stm32_spi_regspec { |
| 204 | const struct stm32_spi_reg en; |
| 205 | const struct stm32_spi_reg dma_rx_en; |
| 206 | const struct stm32_spi_reg dma_tx_en; |
| 207 | const struct stm32_spi_reg cpol; |
| 208 | const struct stm32_spi_reg cpha; |
| 209 | const struct stm32_spi_reg lsb_first; |
| 210 | const struct stm32_spi_reg br; |
| 211 | const struct stm32_spi_reg rx; |
| 212 | const struct stm32_spi_reg tx; |
| 213 | }; |
| 214 | |
| 215 | struct stm32_spi; |
| 216 | |
| 217 | /** |
| 218 | * stm32_spi_cfg - stm32 compatible configuration data |
| 219 | * @regs: registers descriptions |
| 220 | * @get_fifo_size: routine to get fifo size |
| 221 | * @get_bpw_mask: routine to get bits per word mask |
| 222 | * @disable: routine to disable controller |
| 223 | * @config: routine to configure controller as SPI Master |
| 224 | * @set_bpw: routine to configure registers to for bits per word |
| 225 | * @set_mode: routine to configure registers to desired mode |
| 226 | * @set_data_idleness: optional routine to configure registers to desired idle |
| 227 | * time between frames (if driver has this functionality) |
| 228 | * set_number_of_data: optional routine to configure registers to desired |
| 229 | * number of data (if driver has this functionality) |
| 230 | * @can_dma: routine to determine if the transfer is eligible for DMA use |
| 231 | * @transfer_one_dma_start: routine to start transfer a single spi_transfer |
| 232 | * using DMA |
| 233 | * @dma_rx cb: routine to call after DMA RX channel operation is complete |
| 234 | * @dma_tx cb: routine to call after DMA TX channel operation is complete |
| 235 | * @transfer_one_irq: routine to configure interrupts for driver |
| 236 | * @irq_handler_event: Interrupt handler for SPI controller events |
| 237 | * @irq_handler_thread: thread of interrupt handler for SPI controller |
| 238 | * @baud_rate_div_min: minimum baud rate divisor |
| 239 | * @baud_rate_div_max: maximum baud rate divisor |
| 240 | * @has_fifo: boolean to know if fifo is used for driver |
| 241 | * @has_startbit: boolean to know if start bit is used to start transfer |
| 242 | */ |
| 243 | struct stm32_spi_cfg { |
| 244 | const struct stm32_spi_regspec *regs; |
| 245 | int (*get_fifo_size)(struct stm32_spi *spi); |
| 246 | int (*get_bpw_mask)(struct stm32_spi *spi); |
| 247 | void (*disable)(struct stm32_spi *spi); |
| 248 | int (*config)(struct stm32_spi *spi); |
| 249 | void (*set_bpw)(struct stm32_spi *spi); |
| 250 | int (*set_mode)(struct stm32_spi *spi, unsigned int comm_type); |
| 251 | void (*set_data_idleness)(struct stm32_spi *spi, u32 length); |
| 252 | int (*set_number_of_data)(struct stm32_spi *spi, u32 length); |
| 253 | void (*transfer_one_dma_start)(struct stm32_spi *spi); |
| 254 | void (*dma_rx_cb)(void *data); |
| 255 | void (*dma_tx_cb)(void *data); |
| 256 | int (*transfer_one_irq)(struct stm32_spi *spi); |
| 257 | irqreturn_t (*irq_handler_event)(int irq, void *dev_id); |
| 258 | irqreturn_t (*irq_handler_thread)(int irq, void *dev_id); |
| 259 | unsigned int baud_rate_div_min; |
| 260 | unsigned int baud_rate_div_max; |
| 261 | bool has_fifo; |
| 262 | }; |
| 263 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 264 | /** |
| 265 | * struct stm32_spi - private data of the SPI controller |
| 266 | * @dev: driver model representation of the controller |
| 267 | * @master: controller master interface |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 268 | * @cfg: compatible configuration data |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 269 | * @base: virtual memory area |
| 270 | * @clk: hw kernel clock feeding the SPI clock generator |
| 271 | * @clk_rate: rate of the hw kernel clock feeding the SPI clock generator |
| 272 | * @rst: SPI controller reset line |
| 273 | * @lock: prevent I/O concurrent access |
| 274 | * @irq: SPI controller interrupt line |
| 275 | * @fifo_size: size of the embedded fifo in bytes |
| 276 | * @cur_midi: master inter-data idleness in ns |
| 277 | * @cur_speed: speed configured in Hz |
| 278 | * @cur_bpw: number of bits in a single SPI data frame |
| 279 | * @cur_fthlv: fifo threshold level (data frames in a single data packet) |
| 280 | * @cur_comm: SPI communication mode |
| 281 | * @cur_xferlen: current transfer length in bytes |
| 282 | * @cur_usedma: boolean to know if dma is used in current transfer |
| 283 | * @tx_buf: data to be written, or NULL |
| 284 | * @rx_buf: data to be read, or NULL |
| 285 | * @tx_len: number of data to be written in bytes |
| 286 | * @rx_len: number of data to be read in bytes |
| 287 | * @dma_tx: dma channel for TX transfer |
| 288 | * @dma_rx: dma channel for RX transfer |
| 289 | * @phys_addr: SPI registers physical base address |
| 290 | */ |
| 291 | struct stm32_spi { |
| 292 | struct device *dev; |
| 293 | struct spi_master *master; |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 294 | const struct stm32_spi_cfg *cfg; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 295 | void __iomem *base; |
| 296 | struct clk *clk; |
| 297 | u32 clk_rate; |
| 298 | struct reset_control *rst; |
| 299 | spinlock_t lock; /* prevent I/O concurrent access */ |
| 300 | int irq; |
| 301 | unsigned int fifo_size; |
| 302 | |
| 303 | unsigned int cur_midi; |
| 304 | unsigned int cur_speed; |
| 305 | unsigned int cur_bpw; |
| 306 | unsigned int cur_fthlv; |
| 307 | unsigned int cur_comm; |
| 308 | unsigned int cur_xferlen; |
| 309 | bool cur_usedma; |
| 310 | |
| 311 | const void *tx_buf; |
| 312 | void *rx_buf; |
| 313 | int tx_len; |
| 314 | int rx_len; |
| 315 | struct dma_chan *dma_tx; |
| 316 | struct dma_chan *dma_rx; |
| 317 | dma_addr_t phys_addr; |
| 318 | }; |
| 319 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 320 | static const struct stm32_spi_regspec stm32f4_spi_regspec = { |
| 321 | .en = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_SPE }, |
| 322 | |
| 323 | .dma_rx_en = { STM32F4_SPI_CR2, STM32F4_SPI_CR2_RXDMAEN }, |
| 324 | .dma_tx_en = { STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXDMAEN }, |
| 325 | |
| 326 | .cpol = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_CPOL }, |
| 327 | .cpha = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_CPHA }, |
| 328 | .lsb_first = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_LSBFRST }, |
| 329 | .br = { STM32F4_SPI_CR1, STM32F4_SPI_CR1_BR, STM32F4_SPI_CR1_BR_SHIFT }, |
| 330 | |
| 331 | .rx = { STM32F4_SPI_DR }, |
| 332 | .tx = { STM32F4_SPI_DR }, |
| 333 | }; |
| 334 | |
| 335 | static const struct stm32_spi_regspec stm32h7_spi_regspec = { |
| 336 | /* SPI data transfer is enabled but spi_ker_ck is idle. |
| 337 | * CFG1 and CFG2 registers are write protected when SPE is enabled. |
| 338 | */ |
| 339 | .en = { STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE }, |
| 340 | |
| 341 | .dma_rx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_RXDMAEN }, |
| 342 | .dma_tx_en = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN }, |
| 343 | |
| 344 | .cpol = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPOL }, |
| 345 | .cpha = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_CPHA }, |
| 346 | .lsb_first = { STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_LSBFRST }, |
| 347 | .br = { STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_MBR, |
| 348 | STM32H7_SPI_CFG1_MBR_SHIFT }, |
| 349 | |
| 350 | .rx = { STM32H7_SPI_RXDR }, |
| 351 | .tx = { STM32H7_SPI_TXDR }, |
| 352 | }; |
| 353 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 354 | static inline void stm32_spi_set_bits(struct stm32_spi *spi, |
| 355 | u32 offset, u32 bits) |
| 356 | { |
| 357 | writel_relaxed(readl_relaxed(spi->base + offset) | bits, |
| 358 | spi->base + offset); |
| 359 | } |
| 360 | |
| 361 | static inline void stm32_spi_clr_bits(struct stm32_spi *spi, |
| 362 | u32 offset, u32 bits) |
| 363 | { |
| 364 | writel_relaxed(readl_relaxed(spi->base + offset) & ~bits, |
| 365 | spi->base + offset); |
| 366 | } |
| 367 | |
| 368 | /** |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 369 | * stm32h7_spi_get_fifo_size - Return fifo size |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 370 | * @spi: pointer to the spi controller data structure |
| 371 | */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 372 | static int stm32h7_spi_get_fifo_size(struct stm32_spi *spi) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 373 | { |
| 374 | unsigned long flags; |
| 375 | u32 count = 0; |
| 376 | |
| 377 | spin_lock_irqsave(&spi->lock, flags); |
| 378 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 379 | stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 380 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 381 | while (readl_relaxed(spi->base + STM32H7_SPI_SR) & STM32H7_SPI_SR_TXP) |
| 382 | writeb_relaxed(++count, spi->base + STM32H7_SPI_TXDR); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 383 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 384 | stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 385 | |
| 386 | spin_unlock_irqrestore(&spi->lock, flags); |
| 387 | |
| 388 | dev_dbg(spi->dev, "%d x 8-bit fifo size\n", count); |
| 389 | |
| 390 | return count; |
| 391 | } |
| 392 | |
| 393 | /** |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 394 | * stm32f4_spi_get_bpw_mask - Return bits per word mask |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 395 | * @spi: pointer to the spi controller data structure |
| 396 | */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 397 | static int stm32f4_spi_get_bpw_mask(struct stm32_spi *spi) |
| 398 | { |
| 399 | dev_dbg(spi->dev, "8-bit or 16-bit data frame supported\n"); |
| 400 | return SPI_BPW_MASK(8) | SPI_BPW_MASK(16); |
| 401 | } |
| 402 | |
| 403 | /** |
| 404 | * stm32h7_spi_get_bpw_mask - Return bits per word mask |
| 405 | * @spi: pointer to the spi controller data structure |
| 406 | */ |
| 407 | static int stm32h7_spi_get_bpw_mask(struct stm32_spi *spi) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 408 | { |
| 409 | unsigned long flags; |
| 410 | u32 cfg1, max_bpw; |
| 411 | |
| 412 | spin_lock_irqsave(&spi->lock, flags); |
| 413 | |
| 414 | /* |
| 415 | * The most significant bit at DSIZE bit field is reserved when the |
| 416 | * maximum data size of periperal instances is limited to 16-bit |
| 417 | */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 418 | stm32_spi_set_bits(spi, STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_DSIZE); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 419 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 420 | cfg1 = readl_relaxed(spi->base + STM32H7_SPI_CFG1); |
| 421 | max_bpw = (cfg1 & STM32H7_SPI_CFG1_DSIZE) >> |
| 422 | STM32H7_SPI_CFG1_DSIZE_SHIFT; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 423 | max_bpw += 1; |
| 424 | |
| 425 | spin_unlock_irqrestore(&spi->lock, flags); |
| 426 | |
| 427 | dev_dbg(spi->dev, "%d-bit maximum data frame\n", max_bpw); |
| 428 | |
| 429 | return SPI_BPW_RANGE_MASK(4, max_bpw); |
| 430 | } |
| 431 | |
| 432 | /** |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 433 | * stm32_spi_prepare_mbr - Determine baud rate divisor value |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 434 | * @spi: pointer to the spi controller data structure |
| 435 | * @speed_hz: requested speed |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 436 | * @min_div: minimum baud rate divisor |
| 437 | * @max_div: maximum baud rate divisor |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 438 | * |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 439 | * Return baud rate divisor value in case of success or -EINVAL |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 440 | */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 441 | static int stm32_spi_prepare_mbr(struct stm32_spi *spi, u32 speed_hz, |
| 442 | u32 min_div, u32 max_div) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 443 | { |
| 444 | u32 div, mbrdiv; |
| 445 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 446 | /* Ensure spi->clk_rate is even */ |
| 447 | div = DIV_ROUND_UP(spi->clk_rate & ~0x1, speed_hz); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 448 | |
| 449 | /* |
| 450 | * SPI framework set xfer->speed_hz to master->max_speed_hz if |
| 451 | * xfer->speed_hz is greater than master->max_speed_hz, and it returns |
| 452 | * an error when xfer->speed_hz is lower than master->min_speed_hz, so |
| 453 | * no need to check it there. |
| 454 | * However, we need to ensure the following calculations. |
| 455 | */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 456 | if ((div < min_div) || (div > max_div)) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 457 | return -EINVAL; |
| 458 | |
| 459 | /* Determine the first power of 2 greater than or equal to div */ |
| 460 | if (div & (div - 1)) |
| 461 | mbrdiv = fls(div); |
| 462 | else |
| 463 | mbrdiv = fls(div) - 1; |
| 464 | |
| 465 | spi->cur_speed = spi->clk_rate / (1 << mbrdiv); |
| 466 | |
| 467 | return mbrdiv - 1; |
| 468 | } |
| 469 | |
| 470 | /** |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 471 | * stm32h7_spi_prepare_fthlv - Determine FIFO threshold level |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 472 | * @spi: pointer to the spi controller data structure |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 473 | * @xfer_len: length of the message to be transferred |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 474 | */ |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 475 | static u32 stm32h7_spi_prepare_fthlv(struct stm32_spi *spi, u32 xfer_len) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 476 | { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 477 | u32 fthlv, half_fifo, packet; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 478 | |
| 479 | /* data packet should not exceed 1/2 of fifo space */ |
| 480 | half_fifo = (spi->fifo_size / 2); |
| 481 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 482 | /* data_packet should not exceed transfer length */ |
| 483 | if (half_fifo > xfer_len) |
| 484 | packet = xfer_len; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 485 | else |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 486 | packet = half_fifo; |
| 487 | |
| 488 | if (spi->cur_bpw <= 8) |
| 489 | fthlv = packet; |
| 490 | else if (spi->cur_bpw <= 16) |
| 491 | fthlv = packet / 2; |
| 492 | else |
| 493 | fthlv = packet / 4; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 494 | |
| 495 | /* align packet size with data registers access */ |
| 496 | if (spi->cur_bpw > 8) |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 497 | fthlv += (fthlv % 2) ? 1 : 0; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 498 | else |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 499 | fthlv += (fthlv % 4) ? (4 - (fthlv % 4)) : 0; |
| 500 | |
| 501 | if (!fthlv) |
| 502 | fthlv = 1; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 503 | |
| 504 | return fthlv; |
| 505 | } |
| 506 | |
| 507 | /** |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 508 | * stm32f4_spi_write_tx - Write bytes to Transmit Data Register |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 509 | * @spi: pointer to the spi controller data structure |
| 510 | * |
| 511 | * Read from tx_buf depends on remaining bytes to avoid to read beyond |
| 512 | * tx_buf end. |
| 513 | */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 514 | static void stm32f4_spi_write_tx(struct stm32_spi *spi) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 515 | { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 516 | if ((spi->tx_len > 0) && (readl_relaxed(spi->base + STM32F4_SPI_SR) & |
| 517 | STM32F4_SPI_SR_TXE)) { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 518 | u32 offs = spi->cur_xferlen - spi->tx_len; |
| 519 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 520 | if (spi->cur_bpw == 16) { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 521 | const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs); |
| 522 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 523 | writew_relaxed(*tx_buf16, spi->base + STM32F4_SPI_DR); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 524 | spi->tx_len -= sizeof(u16); |
| 525 | } else { |
| 526 | const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs); |
| 527 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 528 | writeb_relaxed(*tx_buf8, spi->base + STM32F4_SPI_DR); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 529 | spi->tx_len -= sizeof(u8); |
| 530 | } |
| 531 | } |
| 532 | |
| 533 | dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len); |
| 534 | } |
| 535 | |
| 536 | /** |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 537 | * stm32h7_spi_write_txfifo - Write bytes in Transmit Data Register |
| 538 | * @spi: pointer to the spi controller data structure |
| 539 | * |
| 540 | * Read from tx_buf depends on remaining bytes to avoid to read beyond |
| 541 | * tx_buf end. |
| 542 | */ |
| 543 | static void stm32h7_spi_write_txfifo(struct stm32_spi *spi) |
| 544 | { |
| 545 | while ((spi->tx_len > 0) && |
| 546 | (readl_relaxed(spi->base + STM32H7_SPI_SR) & |
| 547 | STM32H7_SPI_SR_TXP)) { |
| 548 | u32 offs = spi->cur_xferlen - spi->tx_len; |
| 549 | |
| 550 | if (spi->tx_len >= sizeof(u32)) { |
| 551 | const u32 *tx_buf32 = (const u32 *)(spi->tx_buf + offs); |
| 552 | |
| 553 | writel_relaxed(*tx_buf32, spi->base + STM32H7_SPI_TXDR); |
| 554 | spi->tx_len -= sizeof(u32); |
| 555 | } else if (spi->tx_len >= sizeof(u16)) { |
| 556 | const u16 *tx_buf16 = (const u16 *)(spi->tx_buf + offs); |
| 557 | |
| 558 | writew_relaxed(*tx_buf16, spi->base + STM32H7_SPI_TXDR); |
| 559 | spi->tx_len -= sizeof(u16); |
| 560 | } else { |
| 561 | const u8 *tx_buf8 = (const u8 *)(spi->tx_buf + offs); |
| 562 | |
| 563 | writeb_relaxed(*tx_buf8, spi->base + STM32H7_SPI_TXDR); |
| 564 | spi->tx_len -= sizeof(u8); |
| 565 | } |
| 566 | } |
| 567 | |
| 568 | dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->tx_len); |
| 569 | } |
| 570 | |
| 571 | /** |
| 572 | * stm32f4_spi_read_rx - Read bytes from Receive Data Register |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 573 | * @spi: pointer to the spi controller data structure |
| 574 | * |
| 575 | * Write in rx_buf depends on remaining bytes to avoid to write beyond |
| 576 | * rx_buf end. |
| 577 | */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 578 | static void stm32f4_spi_read_rx(struct stm32_spi *spi) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 579 | { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 580 | if ((spi->rx_len > 0) && (readl_relaxed(spi->base + STM32F4_SPI_SR) & |
| 581 | STM32F4_SPI_SR_RXNE)) { |
| 582 | u32 offs = spi->cur_xferlen - spi->rx_len; |
| 583 | |
| 584 | if (spi->cur_bpw == 16) { |
| 585 | u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs); |
| 586 | |
| 587 | *rx_buf16 = readw_relaxed(spi->base + STM32F4_SPI_DR); |
| 588 | spi->rx_len -= sizeof(u16); |
| 589 | } else { |
| 590 | u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs); |
| 591 | |
| 592 | *rx_buf8 = readb_relaxed(spi->base + STM32F4_SPI_DR); |
| 593 | spi->rx_len -= sizeof(u8); |
| 594 | } |
| 595 | } |
| 596 | |
| 597 | dev_dbg(spi->dev, "%s: %d bytes left\n", __func__, spi->rx_len); |
| 598 | } |
| 599 | |
| 600 | /** |
| 601 | * stm32h7_spi_read_rxfifo - Read bytes in Receive Data Register |
| 602 | * @spi: pointer to the spi controller data structure |
| 603 | * |
| 604 | * Write in rx_buf depends on remaining bytes to avoid to write beyond |
| 605 | * rx_buf end. |
| 606 | */ |
| 607 | static void stm32h7_spi_read_rxfifo(struct stm32_spi *spi, bool flush) |
| 608 | { |
| 609 | u32 sr = readl_relaxed(spi->base + STM32H7_SPI_SR); |
| 610 | u32 rxplvl = (sr & STM32H7_SPI_SR_RXPLVL) >> |
| 611 | STM32H7_SPI_SR_RXPLVL_SHIFT; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 612 | |
| 613 | while ((spi->rx_len > 0) && |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 614 | ((sr & STM32H7_SPI_SR_RXP) || |
| 615 | (flush && ((sr & STM32H7_SPI_SR_RXWNE) || (rxplvl > 0))))) { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 616 | u32 offs = spi->cur_xferlen - spi->rx_len; |
| 617 | |
| 618 | if ((spi->rx_len >= sizeof(u32)) || |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 619 | (flush && (sr & STM32H7_SPI_SR_RXWNE))) { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 620 | u32 *rx_buf32 = (u32 *)(spi->rx_buf + offs); |
| 621 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 622 | *rx_buf32 = readl_relaxed(spi->base + STM32H7_SPI_RXDR); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 623 | spi->rx_len -= sizeof(u32); |
| 624 | } else if ((spi->rx_len >= sizeof(u16)) || |
| 625 | (flush && (rxplvl >= 2 || spi->cur_bpw > 8))) { |
| 626 | u16 *rx_buf16 = (u16 *)(spi->rx_buf + offs); |
| 627 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 628 | *rx_buf16 = readw_relaxed(spi->base + STM32H7_SPI_RXDR); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 629 | spi->rx_len -= sizeof(u16); |
| 630 | } else { |
| 631 | u8 *rx_buf8 = (u8 *)(spi->rx_buf + offs); |
| 632 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 633 | *rx_buf8 = readb_relaxed(spi->base + STM32H7_SPI_RXDR); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 634 | spi->rx_len -= sizeof(u8); |
| 635 | } |
| 636 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 637 | sr = readl_relaxed(spi->base + STM32H7_SPI_SR); |
| 638 | rxplvl = (sr & STM32H7_SPI_SR_RXPLVL) >> |
| 639 | STM32H7_SPI_SR_RXPLVL_SHIFT; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 640 | } |
| 641 | |
| 642 | dev_dbg(spi->dev, "%s%s: %d bytes left\n", __func__, |
| 643 | flush ? "(flush)" : "", spi->rx_len); |
| 644 | } |
| 645 | |
| 646 | /** |
| 647 | * stm32_spi_enable - Enable SPI controller |
| 648 | * @spi: pointer to the spi controller data structure |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 649 | */ |
| 650 | static void stm32_spi_enable(struct stm32_spi *spi) |
| 651 | { |
| 652 | dev_dbg(spi->dev, "enable controller\n"); |
| 653 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 654 | stm32_spi_set_bits(spi, spi->cfg->regs->en.reg, |
| 655 | spi->cfg->regs->en.mask); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 656 | } |
| 657 | |
| 658 | /** |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 659 | * stm32f4_spi_disable - Disable SPI controller |
| 660 | * @spi: pointer to the spi controller data structure |
| 661 | */ |
| 662 | static void stm32f4_spi_disable(struct stm32_spi *spi) |
| 663 | { |
| 664 | unsigned long flags; |
| 665 | u32 sr; |
| 666 | |
| 667 | dev_dbg(spi->dev, "disable controller\n"); |
| 668 | |
| 669 | spin_lock_irqsave(&spi->lock, flags); |
| 670 | |
| 671 | if (!(readl_relaxed(spi->base + STM32F4_SPI_CR1) & |
| 672 | STM32F4_SPI_CR1_SPE)) { |
| 673 | spin_unlock_irqrestore(&spi->lock, flags); |
| 674 | return; |
| 675 | } |
| 676 | |
| 677 | /* Disable interrupts */ |
| 678 | stm32_spi_clr_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXEIE | |
| 679 | STM32F4_SPI_CR2_RXNEIE | |
| 680 | STM32F4_SPI_CR2_ERRIE); |
| 681 | |
| 682 | /* Wait until BSY = 0 */ |
| 683 | if (readl_relaxed_poll_timeout_atomic(spi->base + STM32F4_SPI_SR, |
| 684 | sr, !(sr & STM32F4_SPI_SR_BSY), |
| 685 | 10, 100000) < 0) { |
| 686 | dev_warn(spi->dev, "disabling condition timeout\n"); |
| 687 | } |
| 688 | |
| 689 | if (spi->cur_usedma && spi->dma_tx) |
| 690 | dmaengine_terminate_all(spi->dma_tx); |
| 691 | if (spi->cur_usedma && spi->dma_rx) |
| 692 | dmaengine_terminate_all(spi->dma_rx); |
| 693 | |
| 694 | stm32_spi_clr_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_SPE); |
| 695 | |
| 696 | stm32_spi_clr_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_TXDMAEN | |
| 697 | STM32F4_SPI_CR2_RXDMAEN); |
| 698 | |
| 699 | /* Sequence to clear OVR flag */ |
| 700 | readl_relaxed(spi->base + STM32F4_SPI_DR); |
| 701 | readl_relaxed(spi->base + STM32F4_SPI_SR); |
| 702 | |
| 703 | spin_unlock_irqrestore(&spi->lock, flags); |
| 704 | } |
| 705 | |
| 706 | /** |
| 707 | * stm32h7_spi_disable - Disable SPI controller |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 708 | * @spi: pointer to the spi controller data structure |
| 709 | * |
| 710 | * RX-Fifo is flushed when SPI controller is disabled. To prevent any data |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 711 | * loss, use stm32h7_spi_read_rxfifo(flush) to read the remaining bytes in |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 712 | * RX-Fifo. |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 713 | * Normally, if TSIZE has been configured, we should relax the hardware at the |
| 714 | * reception of the EOT interrupt. But in case of error, EOT will not be |
| 715 | * raised. So the subsystem unprepare_message call allows us to properly |
| 716 | * complete the transfer from an hardware point of view. |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 717 | */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 718 | static void stm32h7_spi_disable(struct stm32_spi *spi) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 719 | { |
| 720 | unsigned long flags; |
| 721 | u32 cr1, sr; |
| 722 | |
| 723 | dev_dbg(spi->dev, "disable controller\n"); |
| 724 | |
| 725 | spin_lock_irqsave(&spi->lock, flags); |
| 726 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 727 | cr1 = readl_relaxed(spi->base + STM32H7_SPI_CR1); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 728 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 729 | if (!(cr1 & STM32H7_SPI_CR1_SPE)) { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 730 | spin_unlock_irqrestore(&spi->lock, flags); |
| 731 | return; |
| 732 | } |
| 733 | |
| 734 | /* Wait on EOT or suspend the flow */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 735 | if (readl_relaxed_poll_timeout_atomic(spi->base + STM32H7_SPI_SR, |
| 736 | sr, !(sr & STM32H7_SPI_SR_EOT), |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 737 | 10, 100000) < 0) { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 738 | if (cr1 & STM32H7_SPI_CR1_CSTART) { |
| 739 | writel_relaxed(cr1 | STM32H7_SPI_CR1_CSUSP, |
| 740 | spi->base + STM32H7_SPI_CR1); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 741 | if (readl_relaxed_poll_timeout_atomic( |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 742 | spi->base + STM32H7_SPI_SR, |
| 743 | sr, !(sr & STM32H7_SPI_SR_SUSP), |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 744 | 10, 100000) < 0) |
| 745 | dev_warn(spi->dev, |
| 746 | "Suspend request timeout\n"); |
| 747 | } |
| 748 | } |
| 749 | |
| 750 | if (!spi->cur_usedma && spi->rx_buf && (spi->rx_len > 0)) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 751 | stm32h7_spi_read_rxfifo(spi, true); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 752 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 753 | if (spi->cur_usedma && spi->dma_tx) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 754 | dmaengine_terminate_all(spi->dma_tx); |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 755 | if (spi->cur_usedma && spi->dma_rx) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 756 | dmaengine_terminate_all(spi->dma_rx); |
| 757 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 758 | stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SPE); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 759 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 760 | stm32_spi_clr_bits(spi, STM32H7_SPI_CFG1, STM32H7_SPI_CFG1_TXDMAEN | |
| 761 | STM32H7_SPI_CFG1_RXDMAEN); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 762 | |
| 763 | /* Disable interrupts and clear status flags */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 764 | writel_relaxed(0, spi->base + STM32H7_SPI_IER); |
| 765 | writel_relaxed(STM32H7_SPI_IFCR_ALL, spi->base + STM32H7_SPI_IFCR); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 766 | |
| 767 | spin_unlock_irqrestore(&spi->lock, flags); |
| 768 | } |
| 769 | |
| 770 | /** |
| 771 | * stm32_spi_can_dma - Determine if the transfer is eligible for DMA use |
| 772 | * |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 773 | * If driver has fifo and the current transfer size is greater than fifo size, |
| 774 | * use DMA. Otherwise use DMA for transfer longer than defined DMA min bytes. |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 775 | */ |
| 776 | static bool stm32_spi_can_dma(struct spi_master *master, |
| 777 | struct spi_device *spi_dev, |
| 778 | struct spi_transfer *transfer) |
| 779 | { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 780 | unsigned int dma_size; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 781 | struct stm32_spi *spi = spi_master_get_devdata(master); |
| 782 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 783 | if (spi->cfg->has_fifo) |
| 784 | dma_size = spi->fifo_size; |
| 785 | else |
| 786 | dma_size = SPI_DMA_MIN_BYTES; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 787 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 788 | dev_dbg(spi->dev, "%s: %s\n", __func__, |
| 789 | (transfer->len > dma_size) ? "true" : "false"); |
| 790 | |
| 791 | return (transfer->len > dma_size); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 792 | } |
| 793 | |
| 794 | /** |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 795 | * stm32f4_spi_irq_event - Interrupt handler for SPI controller events |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 796 | * @irq: interrupt line |
| 797 | * @dev_id: SPI controller master interface |
| 798 | */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 799 | static irqreturn_t stm32f4_spi_irq_event(int irq, void *dev_id) |
| 800 | { |
| 801 | struct spi_master *master = dev_id; |
| 802 | struct stm32_spi *spi = spi_master_get_devdata(master); |
| 803 | u32 sr, mask = 0; |
| 804 | unsigned long flags; |
| 805 | bool end = false; |
| 806 | |
| 807 | spin_lock_irqsave(&spi->lock, flags); |
| 808 | |
| 809 | sr = readl_relaxed(spi->base + STM32F4_SPI_SR); |
| 810 | /* |
| 811 | * BSY flag is not handled in interrupt but it is normal behavior when |
| 812 | * this flag is set. |
| 813 | */ |
| 814 | sr &= ~STM32F4_SPI_SR_BSY; |
| 815 | |
| 816 | if (!spi->cur_usedma && (spi->cur_comm == SPI_SIMPLEX_TX || |
| 817 | spi->cur_comm == SPI_3WIRE_TX)) { |
| 818 | /* OVR flag shouldn't be handled for TX only mode */ |
| 819 | sr &= ~STM32F4_SPI_SR_OVR | STM32F4_SPI_SR_RXNE; |
| 820 | mask |= STM32F4_SPI_SR_TXE; |
| 821 | } |
| 822 | |
| 823 | if (!spi->cur_usedma && spi->cur_comm == SPI_FULL_DUPLEX) { |
| 824 | /* TXE flag is set and is handled when RXNE flag occurs */ |
| 825 | sr &= ~STM32F4_SPI_SR_TXE; |
| 826 | mask |= STM32F4_SPI_SR_RXNE | STM32F4_SPI_SR_OVR; |
| 827 | } |
| 828 | |
| 829 | if (!(sr & mask)) { |
| 830 | dev_dbg(spi->dev, "spurious IT (sr=0x%08x)\n", sr); |
| 831 | spin_unlock_irqrestore(&spi->lock, flags); |
| 832 | return IRQ_NONE; |
| 833 | } |
| 834 | |
| 835 | if (sr & STM32F4_SPI_SR_OVR) { |
| 836 | dev_warn(spi->dev, "Overrun: received value discarded\n"); |
| 837 | |
| 838 | /* Sequence to clear OVR flag */ |
| 839 | readl_relaxed(spi->base + STM32F4_SPI_DR); |
| 840 | readl_relaxed(spi->base + STM32F4_SPI_SR); |
| 841 | |
| 842 | /* |
| 843 | * If overrun is detected, it means that something went wrong, |
| 844 | * so stop the current transfer. Transfer can wait for next |
| 845 | * RXNE but DR is already read and end never happens. |
| 846 | */ |
| 847 | end = true; |
| 848 | goto end_irq; |
| 849 | } |
| 850 | |
| 851 | if (sr & STM32F4_SPI_SR_TXE) { |
| 852 | if (spi->tx_buf) |
| 853 | stm32f4_spi_write_tx(spi); |
| 854 | if (spi->tx_len == 0) |
| 855 | end = true; |
| 856 | } |
| 857 | |
| 858 | if (sr & STM32F4_SPI_SR_RXNE) { |
| 859 | stm32f4_spi_read_rx(spi); |
| 860 | if (spi->rx_len == 0) |
| 861 | end = true; |
| 862 | else /* Load data for discontinuous mode */ |
| 863 | stm32f4_spi_write_tx(spi); |
| 864 | } |
| 865 | |
| 866 | end_irq: |
| 867 | if (end) { |
| 868 | /* Immediately disable interrupts to do not generate new one */ |
| 869 | stm32_spi_clr_bits(spi, STM32F4_SPI_CR2, |
| 870 | STM32F4_SPI_CR2_TXEIE | |
| 871 | STM32F4_SPI_CR2_RXNEIE | |
| 872 | STM32F4_SPI_CR2_ERRIE); |
| 873 | spin_unlock_irqrestore(&spi->lock, flags); |
| 874 | return IRQ_WAKE_THREAD; |
| 875 | } |
| 876 | |
| 877 | spin_unlock_irqrestore(&spi->lock, flags); |
| 878 | return IRQ_HANDLED; |
| 879 | } |
| 880 | |
| 881 | /** |
| 882 | * stm32f4_spi_irq_thread - Thread of interrupt handler for SPI controller |
| 883 | * @irq: interrupt line |
| 884 | * @dev_id: SPI controller master interface |
| 885 | */ |
| 886 | static irqreturn_t stm32f4_spi_irq_thread(int irq, void *dev_id) |
| 887 | { |
| 888 | struct spi_master *master = dev_id; |
| 889 | struct stm32_spi *spi = spi_master_get_devdata(master); |
| 890 | |
| 891 | spi_finalize_current_transfer(master); |
| 892 | stm32f4_spi_disable(spi); |
| 893 | |
| 894 | return IRQ_HANDLED; |
| 895 | } |
| 896 | |
| 897 | /** |
| 898 | * stm32h7_spi_irq_thread - Thread of interrupt handler for SPI controller |
| 899 | * @irq: interrupt line |
| 900 | * @dev_id: SPI controller master interface |
| 901 | */ |
| 902 | static irqreturn_t stm32h7_spi_irq_thread(int irq, void *dev_id) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 903 | { |
| 904 | struct spi_master *master = dev_id; |
| 905 | struct stm32_spi *spi = spi_master_get_devdata(master); |
| 906 | u32 sr, ier, mask; |
| 907 | unsigned long flags; |
| 908 | bool end = false; |
| 909 | |
| 910 | spin_lock_irqsave(&spi->lock, flags); |
| 911 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 912 | sr = readl_relaxed(spi->base + STM32H7_SPI_SR); |
| 913 | ier = readl_relaxed(spi->base + STM32H7_SPI_IER); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 914 | |
| 915 | mask = ier; |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 916 | /* |
| 917 | * EOTIE enables irq from EOT, SUSP and TXC events. We need to set |
| 918 | * SUSP to acknowledge it later. TXC is automatically cleared |
| 919 | */ |
| 920 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 921 | mask |= STM32H7_SPI_SR_SUSP; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 922 | /* |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 923 | * DXPIE is set in Full-Duplex, one IT will be raised if TXP and RXP |
| 924 | * are set. So in case of Full-Duplex, need to poll TXP and RXP event. |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 925 | */ |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 926 | if ((spi->cur_comm == SPI_FULL_DUPLEX) && !spi->cur_usedma) |
| 927 | mask |= STM32H7_SPI_SR_TXP | STM32H7_SPI_SR_RXP; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 928 | |
| 929 | if (!(sr & mask)) { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 930 | dev_warn(spi->dev, "spurious IT (sr=0x%08x, ier=0x%08x)\n", |
| 931 | sr, ier); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 932 | spin_unlock_irqrestore(&spi->lock, flags); |
| 933 | return IRQ_NONE; |
| 934 | } |
| 935 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 936 | if (sr & STM32H7_SPI_SR_SUSP) { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 937 | static DEFINE_RATELIMIT_STATE(rs, |
| 938 | DEFAULT_RATELIMIT_INTERVAL * 10, |
| 939 | 1); |
| 940 | if (__ratelimit(&rs)) |
| 941 | dev_dbg_ratelimited(spi->dev, "Communication suspended\n"); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 942 | if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0))) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 943 | stm32h7_spi_read_rxfifo(spi, false); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 944 | /* |
| 945 | * If communication is suspended while using DMA, it means |
| 946 | * that something went wrong, so stop the current transfer |
| 947 | */ |
| 948 | if (spi->cur_usedma) |
| 949 | end = true; |
| 950 | } |
| 951 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 952 | if (sr & STM32H7_SPI_SR_MODF) { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 953 | dev_warn(spi->dev, "Mode fault: transfer aborted\n"); |
| 954 | end = true; |
| 955 | } |
| 956 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 957 | if (sr & STM32H7_SPI_SR_OVR) { |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 958 | dev_err(spi->dev, "Overrun: RX data lost\n"); |
| 959 | end = true; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 960 | } |
| 961 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 962 | if (sr & STM32H7_SPI_SR_EOT) { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 963 | if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0))) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 964 | stm32h7_spi_read_rxfifo(spi, true); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 965 | end = true; |
| 966 | } |
| 967 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 968 | if (sr & STM32H7_SPI_SR_TXP) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 969 | if (!spi->cur_usedma && (spi->tx_buf && (spi->tx_len > 0))) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 970 | stm32h7_spi_write_txfifo(spi); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 971 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 972 | if (sr & STM32H7_SPI_SR_RXP) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 973 | if (!spi->cur_usedma && (spi->rx_buf && (spi->rx_len > 0))) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 974 | stm32h7_spi_read_rxfifo(spi, false); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 975 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 976 | writel_relaxed(sr & mask, spi->base + STM32H7_SPI_IFCR); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 977 | |
| 978 | spin_unlock_irqrestore(&spi->lock, flags); |
| 979 | |
| 980 | if (end) { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 981 | stm32h7_spi_disable(spi); |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 982 | spi_finalize_current_transfer(master); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 983 | } |
| 984 | |
| 985 | return IRQ_HANDLED; |
| 986 | } |
| 987 | |
| 988 | /** |
| 989 | * stm32_spi_setup - setup device chip select |
| 990 | */ |
| 991 | static int stm32_spi_setup(struct spi_device *spi_dev) |
| 992 | { |
| 993 | int ret = 0; |
| 994 | |
| 995 | if (!gpio_is_valid(spi_dev->cs_gpio)) { |
| 996 | dev_err(&spi_dev->dev, "%d is not a valid gpio\n", |
| 997 | spi_dev->cs_gpio); |
| 998 | return -EINVAL; |
| 999 | } |
| 1000 | |
| 1001 | dev_dbg(&spi_dev->dev, "%s: set gpio%d output %s\n", __func__, |
| 1002 | spi_dev->cs_gpio, |
| 1003 | (spi_dev->mode & SPI_CS_HIGH) ? "low" : "high"); |
| 1004 | |
| 1005 | ret = gpio_direction_output(spi_dev->cs_gpio, |
| 1006 | !(spi_dev->mode & SPI_CS_HIGH)); |
| 1007 | |
| 1008 | return ret; |
| 1009 | } |
| 1010 | |
| 1011 | /** |
| 1012 | * stm32_spi_prepare_msg - set up the controller to transfer a single message |
| 1013 | */ |
| 1014 | static int stm32_spi_prepare_msg(struct spi_master *master, |
| 1015 | struct spi_message *msg) |
| 1016 | { |
| 1017 | struct stm32_spi *spi = spi_master_get_devdata(master); |
| 1018 | struct spi_device *spi_dev = msg->spi; |
| 1019 | struct device_node *np = spi_dev->dev.of_node; |
| 1020 | unsigned long flags; |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1021 | u32 clrb = 0, setb = 0; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1022 | |
| 1023 | /* SPI slave device may need time between data frames */ |
| 1024 | spi->cur_midi = 0; |
| 1025 | if (np && !of_property_read_u32(np, "st,spi-midi-ns", &spi->cur_midi)) |
| 1026 | dev_dbg(spi->dev, "%dns inter-data idleness\n", spi->cur_midi); |
| 1027 | |
| 1028 | if (spi_dev->mode & SPI_CPOL) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1029 | setb |= spi->cfg->regs->cpol.mask; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1030 | else |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1031 | clrb |= spi->cfg->regs->cpol.mask; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1032 | |
| 1033 | if (spi_dev->mode & SPI_CPHA) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1034 | setb |= spi->cfg->regs->cpha.mask; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1035 | else |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1036 | clrb |= spi->cfg->regs->cpha.mask; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1037 | |
| 1038 | if (spi_dev->mode & SPI_LSB_FIRST) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1039 | setb |= spi->cfg->regs->lsb_first.mask; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1040 | else |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1041 | clrb |= spi->cfg->regs->lsb_first.mask; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1042 | |
| 1043 | dev_dbg(spi->dev, "cpol=%d cpha=%d lsb_first=%d cs_high=%d\n", |
| 1044 | spi_dev->mode & SPI_CPOL, |
| 1045 | spi_dev->mode & SPI_CPHA, |
| 1046 | spi_dev->mode & SPI_LSB_FIRST, |
| 1047 | spi_dev->mode & SPI_CS_HIGH); |
| 1048 | |
| 1049 | spin_lock_irqsave(&spi->lock, flags); |
| 1050 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1051 | /* CPOL, CPHA and LSB FIRST bits have common register */ |
| 1052 | if (clrb || setb) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1053 | writel_relaxed( |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1054 | (readl_relaxed(spi->base + spi->cfg->regs->cpol.reg) & |
| 1055 | ~clrb) | setb, |
| 1056 | spi->base + spi->cfg->regs->cpol.reg); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1057 | |
| 1058 | spin_unlock_irqrestore(&spi->lock, flags); |
| 1059 | |
| 1060 | return 0; |
| 1061 | } |
| 1062 | |
| 1063 | /** |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1064 | * stm32f4_spi_dma_tx_cb - dma callback |
| 1065 | * |
| 1066 | * DMA callback is called when the transfer is complete for DMA TX channel. |
| 1067 | */ |
| 1068 | static void stm32f4_spi_dma_tx_cb(void *data) |
| 1069 | { |
| 1070 | struct stm32_spi *spi = data; |
| 1071 | |
| 1072 | if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) { |
| 1073 | spi_finalize_current_transfer(spi->master); |
| 1074 | stm32f4_spi_disable(spi); |
| 1075 | } |
| 1076 | } |
| 1077 | |
| 1078 | /** |
| 1079 | * stm32f4_spi_dma_rx_cb - dma callback |
| 1080 | * |
| 1081 | * DMA callback is called when the transfer is complete for DMA RX channel. |
| 1082 | */ |
| 1083 | static void stm32f4_spi_dma_rx_cb(void *data) |
| 1084 | { |
| 1085 | struct stm32_spi *spi = data; |
| 1086 | |
| 1087 | spi_finalize_current_transfer(spi->master); |
| 1088 | stm32f4_spi_disable(spi); |
| 1089 | } |
| 1090 | |
| 1091 | /** |
| 1092 | * stm32h7_spi_dma_cb - dma callback |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1093 | * |
| 1094 | * DMA callback is called when the transfer is complete or when an error |
| 1095 | * occurs. If the transfer is complete, EOT flag is raised. |
| 1096 | */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1097 | static void stm32h7_spi_dma_cb(void *data) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1098 | { |
| 1099 | struct stm32_spi *spi = data; |
| 1100 | unsigned long flags; |
| 1101 | u32 sr; |
| 1102 | |
| 1103 | spin_lock_irqsave(&spi->lock, flags); |
| 1104 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1105 | sr = readl_relaxed(spi->base + STM32H7_SPI_SR); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1106 | |
| 1107 | spin_unlock_irqrestore(&spi->lock, flags); |
| 1108 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1109 | if (!(sr & STM32H7_SPI_SR_EOT)) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1110 | dev_warn(spi->dev, "DMA error (sr=0x%08x)\n", sr); |
| 1111 | |
| 1112 | /* Now wait for EOT, or SUSP or OVR in case of error */ |
| 1113 | } |
| 1114 | |
| 1115 | /** |
| 1116 | * stm32_spi_dma_config - configure dma slave channel depending on current |
| 1117 | * transfer bits_per_word. |
| 1118 | */ |
| 1119 | static void stm32_spi_dma_config(struct stm32_spi *spi, |
| 1120 | struct dma_slave_config *dma_conf, |
| 1121 | enum dma_transfer_direction dir) |
| 1122 | { |
| 1123 | enum dma_slave_buswidth buswidth; |
| 1124 | u32 maxburst; |
| 1125 | |
| 1126 | if (spi->cur_bpw <= 8) |
| 1127 | buswidth = DMA_SLAVE_BUSWIDTH_1_BYTE; |
| 1128 | else if (spi->cur_bpw <= 16) |
| 1129 | buswidth = DMA_SLAVE_BUSWIDTH_2_BYTES; |
| 1130 | else |
| 1131 | buswidth = DMA_SLAVE_BUSWIDTH_4_BYTES; |
| 1132 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1133 | if (spi->cfg->has_fifo) { |
| 1134 | /* Valid for DMA Half or Full Fifo threshold */ |
| 1135 | if (spi->cur_fthlv == 2) |
| 1136 | maxburst = 1; |
| 1137 | else |
| 1138 | maxburst = spi->cur_fthlv; |
| 1139 | } else { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1140 | maxburst = 1; |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1141 | } |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1142 | |
| 1143 | memset(dma_conf, 0, sizeof(struct dma_slave_config)); |
| 1144 | dma_conf->direction = dir; |
| 1145 | if (dma_conf->direction == DMA_DEV_TO_MEM) { /* RX */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1146 | dma_conf->src_addr = spi->phys_addr + spi->cfg->regs->rx.reg; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1147 | dma_conf->src_addr_width = buswidth; |
| 1148 | dma_conf->src_maxburst = maxburst; |
| 1149 | |
| 1150 | dev_dbg(spi->dev, "Rx DMA config buswidth=%d, maxburst=%d\n", |
| 1151 | buswidth, maxburst); |
| 1152 | } else if (dma_conf->direction == DMA_MEM_TO_DEV) { /* TX */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1153 | dma_conf->dst_addr = spi->phys_addr + spi->cfg->regs->tx.reg; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1154 | dma_conf->dst_addr_width = buswidth; |
| 1155 | dma_conf->dst_maxburst = maxburst; |
| 1156 | |
| 1157 | dev_dbg(spi->dev, "Tx DMA config buswidth=%d, maxburst=%d\n", |
| 1158 | buswidth, maxburst); |
| 1159 | } |
| 1160 | } |
| 1161 | |
| 1162 | /** |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1163 | * stm32f4_spi_transfer_one_irq - transfer a single spi_transfer using |
| 1164 | * interrupts |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1165 | * |
| 1166 | * It must returns 0 if the transfer is finished or 1 if the transfer is still |
| 1167 | * in progress. |
| 1168 | */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1169 | static int stm32f4_spi_transfer_one_irq(struct stm32_spi *spi) |
| 1170 | { |
| 1171 | unsigned long flags; |
| 1172 | u32 cr2 = 0; |
| 1173 | |
| 1174 | /* Enable the interrupts relative to the current communication mode */ |
| 1175 | if (spi->cur_comm == SPI_SIMPLEX_TX || spi->cur_comm == SPI_3WIRE_TX) { |
| 1176 | cr2 |= STM32F4_SPI_CR2_TXEIE; |
| 1177 | } else if (spi->cur_comm == SPI_FULL_DUPLEX) { |
| 1178 | /* In transmit-only mode, the OVR flag is set in the SR register |
| 1179 | * since the received data are never read. Therefore set OVR |
| 1180 | * interrupt only when rx buffer is available. |
| 1181 | */ |
| 1182 | cr2 |= STM32F4_SPI_CR2_RXNEIE | STM32F4_SPI_CR2_ERRIE; |
| 1183 | } else { |
| 1184 | return -EINVAL; |
| 1185 | } |
| 1186 | |
| 1187 | spin_lock_irqsave(&spi->lock, flags); |
| 1188 | |
| 1189 | stm32_spi_set_bits(spi, STM32F4_SPI_CR2, cr2); |
| 1190 | |
| 1191 | stm32_spi_enable(spi); |
| 1192 | |
| 1193 | /* starting data transfer when buffer is loaded */ |
| 1194 | if (spi->tx_buf) |
| 1195 | stm32f4_spi_write_tx(spi); |
| 1196 | |
| 1197 | spin_unlock_irqrestore(&spi->lock, flags); |
| 1198 | |
| 1199 | return 1; |
| 1200 | } |
| 1201 | |
| 1202 | /** |
| 1203 | * stm32h7_spi_transfer_one_irq - transfer a single spi_transfer using |
| 1204 | * interrupts |
| 1205 | * |
| 1206 | * It must returns 0 if the transfer is finished or 1 if the transfer is still |
| 1207 | * in progress. |
| 1208 | */ |
| 1209 | static int stm32h7_spi_transfer_one_irq(struct stm32_spi *spi) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1210 | { |
| 1211 | unsigned long flags; |
| 1212 | u32 ier = 0; |
| 1213 | |
| 1214 | /* Enable the interrupts relative to the current communication mode */ |
| 1215 | if (spi->tx_buf && spi->rx_buf) /* Full Duplex */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1216 | ier |= STM32H7_SPI_IER_DXPIE; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1217 | else if (spi->tx_buf) /* Half-Duplex TX dir or Simplex TX */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1218 | ier |= STM32H7_SPI_IER_TXPIE; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1219 | else if (spi->rx_buf) /* Half-Duplex RX dir or Simplex RX */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1220 | ier |= STM32H7_SPI_IER_RXPIE; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1221 | |
| 1222 | /* Enable the interrupts relative to the end of transfer */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1223 | ier |= STM32H7_SPI_IER_EOTIE | STM32H7_SPI_IER_TXTFIE | |
| 1224 | STM32H7_SPI_IER_OVRIE | STM32H7_SPI_IER_MODFIE; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1225 | |
| 1226 | spin_lock_irqsave(&spi->lock, flags); |
| 1227 | |
| 1228 | stm32_spi_enable(spi); |
| 1229 | |
| 1230 | /* Be sure to have data in fifo before starting data transfer */ |
| 1231 | if (spi->tx_buf) |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1232 | stm32h7_spi_write_txfifo(spi); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1233 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1234 | stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_CSTART); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1235 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1236 | writel_relaxed(ier, spi->base + STM32H7_SPI_IER); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1237 | |
| 1238 | spin_unlock_irqrestore(&spi->lock, flags); |
| 1239 | |
| 1240 | return 1; |
| 1241 | } |
| 1242 | |
| 1243 | /** |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1244 | * stm32f4_spi_transfer_one_dma_start - Set SPI driver registers to start |
| 1245 | * transfer using DMA |
| 1246 | */ |
| 1247 | static void stm32f4_spi_transfer_one_dma_start(struct stm32_spi *spi) |
| 1248 | { |
| 1249 | /* In DMA mode end of transfer is handled by DMA TX or RX callback. */ |
| 1250 | if (spi->cur_comm == SPI_SIMPLEX_RX || spi->cur_comm == SPI_3WIRE_RX || |
| 1251 | spi->cur_comm == SPI_FULL_DUPLEX) { |
| 1252 | /* |
| 1253 | * In transmit-only mode, the OVR flag is set in the SR register |
| 1254 | * since the received data are never read. Therefore set OVR |
| 1255 | * interrupt only when rx buffer is available. |
| 1256 | */ |
| 1257 | stm32_spi_set_bits(spi, STM32F4_SPI_CR2, STM32F4_SPI_CR2_ERRIE); |
| 1258 | } |
| 1259 | |
| 1260 | stm32_spi_enable(spi); |
| 1261 | } |
| 1262 | |
| 1263 | /** |
| 1264 | * stm32h7_spi_transfer_one_dma_start - Set SPI driver registers to start |
| 1265 | * transfer using DMA |
| 1266 | */ |
| 1267 | static void stm32h7_spi_transfer_one_dma_start(struct stm32_spi *spi) |
| 1268 | { |
| 1269 | /* Enable the interrupts relative to the end of transfer */ |
| 1270 | stm32_spi_set_bits(spi, STM32H7_SPI_IER, STM32H7_SPI_IER_EOTIE | |
| 1271 | STM32H7_SPI_IER_TXTFIE | |
| 1272 | STM32H7_SPI_IER_OVRIE | |
| 1273 | STM32H7_SPI_IER_MODFIE); |
| 1274 | |
| 1275 | stm32_spi_enable(spi); |
| 1276 | |
| 1277 | stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_CSTART); |
| 1278 | } |
| 1279 | |
| 1280 | /** |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1281 | * stm32_spi_transfer_one_dma - transfer a single spi_transfer using DMA |
| 1282 | * |
| 1283 | * It must returns 0 if the transfer is finished or 1 if the transfer is still |
| 1284 | * in progress. |
| 1285 | */ |
| 1286 | static int stm32_spi_transfer_one_dma(struct stm32_spi *spi, |
| 1287 | struct spi_transfer *xfer) |
| 1288 | { |
| 1289 | struct dma_slave_config tx_dma_conf, rx_dma_conf; |
| 1290 | struct dma_async_tx_descriptor *tx_dma_desc, *rx_dma_desc; |
| 1291 | unsigned long flags; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1292 | |
| 1293 | spin_lock_irqsave(&spi->lock, flags); |
| 1294 | |
| 1295 | rx_dma_desc = NULL; |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1296 | if (spi->rx_buf && spi->dma_rx) { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1297 | stm32_spi_dma_config(spi, &rx_dma_conf, DMA_DEV_TO_MEM); |
| 1298 | dmaengine_slave_config(spi->dma_rx, &rx_dma_conf); |
| 1299 | |
| 1300 | /* Enable Rx DMA request */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1301 | stm32_spi_set_bits(spi, spi->cfg->regs->dma_rx_en.reg, |
| 1302 | spi->cfg->regs->dma_rx_en.mask); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1303 | |
| 1304 | rx_dma_desc = dmaengine_prep_slave_sg( |
| 1305 | spi->dma_rx, xfer->rx_sg.sgl, |
| 1306 | xfer->rx_sg.nents, |
| 1307 | rx_dma_conf.direction, |
| 1308 | DMA_PREP_INTERRUPT); |
| 1309 | } |
| 1310 | |
| 1311 | tx_dma_desc = NULL; |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1312 | if (spi->tx_buf && spi->dma_tx) { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1313 | stm32_spi_dma_config(spi, &tx_dma_conf, DMA_MEM_TO_DEV); |
| 1314 | dmaengine_slave_config(spi->dma_tx, &tx_dma_conf); |
| 1315 | |
| 1316 | tx_dma_desc = dmaengine_prep_slave_sg( |
| 1317 | spi->dma_tx, xfer->tx_sg.sgl, |
| 1318 | xfer->tx_sg.nents, |
| 1319 | tx_dma_conf.direction, |
| 1320 | DMA_PREP_INTERRUPT); |
| 1321 | } |
| 1322 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1323 | if ((spi->tx_buf && spi->dma_tx && !tx_dma_desc) || |
| 1324 | (spi->rx_buf && spi->dma_rx && !rx_dma_desc)) |
| 1325 | goto dma_desc_error; |
| 1326 | |
| 1327 | if (spi->cur_comm == SPI_FULL_DUPLEX && (!tx_dma_desc || !rx_dma_desc)) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1328 | goto dma_desc_error; |
| 1329 | |
| 1330 | if (rx_dma_desc) { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1331 | rx_dma_desc->callback = spi->cfg->dma_rx_cb; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1332 | rx_dma_desc->callback_param = spi; |
| 1333 | |
| 1334 | if (dma_submit_error(dmaengine_submit(rx_dma_desc))) { |
| 1335 | dev_err(spi->dev, "Rx DMA submit failed\n"); |
| 1336 | goto dma_desc_error; |
| 1337 | } |
| 1338 | /* Enable Rx DMA channel */ |
| 1339 | dma_async_issue_pending(spi->dma_rx); |
| 1340 | } |
| 1341 | |
| 1342 | if (tx_dma_desc) { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1343 | if (spi->cur_comm == SPI_SIMPLEX_TX || |
| 1344 | spi->cur_comm == SPI_3WIRE_TX) { |
| 1345 | tx_dma_desc->callback = spi->cfg->dma_tx_cb; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1346 | tx_dma_desc->callback_param = spi; |
| 1347 | } |
| 1348 | |
| 1349 | if (dma_submit_error(dmaengine_submit(tx_dma_desc))) { |
| 1350 | dev_err(spi->dev, "Tx DMA submit failed\n"); |
| 1351 | goto dma_submit_error; |
| 1352 | } |
| 1353 | /* Enable Tx DMA channel */ |
| 1354 | dma_async_issue_pending(spi->dma_tx); |
| 1355 | |
| 1356 | /* Enable Tx DMA request */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1357 | stm32_spi_set_bits(spi, spi->cfg->regs->dma_tx_en.reg, |
| 1358 | spi->cfg->regs->dma_tx_en.mask); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1359 | } |
| 1360 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1361 | spi->cfg->transfer_one_dma_start(spi); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1362 | |
| 1363 | spin_unlock_irqrestore(&spi->lock, flags); |
| 1364 | |
| 1365 | return 1; |
| 1366 | |
| 1367 | dma_submit_error: |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1368 | if (spi->dma_rx) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1369 | dmaengine_terminate_all(spi->dma_rx); |
| 1370 | |
| 1371 | dma_desc_error: |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1372 | stm32_spi_clr_bits(spi, spi->cfg->regs->dma_rx_en.reg, |
| 1373 | spi->cfg->regs->dma_rx_en.mask); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1374 | |
| 1375 | spin_unlock_irqrestore(&spi->lock, flags); |
| 1376 | |
| 1377 | dev_info(spi->dev, "DMA issue: fall back to irq transfer\n"); |
| 1378 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1379 | spi->cur_usedma = false; |
| 1380 | return spi->cfg->transfer_one_irq(spi); |
| 1381 | } |
| 1382 | |
| 1383 | /** |
| 1384 | * stm32f4_spi_set_bpw - Configure bits per word |
| 1385 | * @spi: pointer to the spi controller data structure |
| 1386 | */ |
| 1387 | static void stm32f4_spi_set_bpw(struct stm32_spi *spi) |
| 1388 | { |
| 1389 | if (spi->cur_bpw == 16) |
| 1390 | stm32_spi_set_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_DFF); |
| 1391 | else |
| 1392 | stm32_spi_clr_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_DFF); |
| 1393 | } |
| 1394 | |
| 1395 | /** |
| 1396 | * stm32h7_spi_set_bpw - configure bits per word |
| 1397 | * @spi: pointer to the spi controller data structure |
| 1398 | */ |
| 1399 | static void stm32h7_spi_set_bpw(struct stm32_spi *spi) |
| 1400 | { |
| 1401 | u32 bpw, fthlv; |
| 1402 | u32 cfg1_clrb = 0, cfg1_setb = 0; |
| 1403 | |
| 1404 | bpw = spi->cur_bpw - 1; |
| 1405 | |
| 1406 | cfg1_clrb |= STM32H7_SPI_CFG1_DSIZE; |
| 1407 | cfg1_setb |= (bpw << STM32H7_SPI_CFG1_DSIZE_SHIFT) & |
| 1408 | STM32H7_SPI_CFG1_DSIZE; |
| 1409 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1410 | spi->cur_fthlv = stm32h7_spi_prepare_fthlv(spi, spi->cur_xferlen); |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1411 | fthlv = spi->cur_fthlv - 1; |
| 1412 | |
| 1413 | cfg1_clrb |= STM32H7_SPI_CFG1_FTHLV; |
| 1414 | cfg1_setb |= (fthlv << STM32H7_SPI_CFG1_FTHLV_SHIFT) & |
| 1415 | STM32H7_SPI_CFG1_FTHLV; |
| 1416 | |
| 1417 | writel_relaxed( |
| 1418 | (readl_relaxed(spi->base + STM32H7_SPI_CFG1) & |
| 1419 | ~cfg1_clrb) | cfg1_setb, |
| 1420 | spi->base + STM32H7_SPI_CFG1); |
| 1421 | } |
| 1422 | |
| 1423 | /** |
| 1424 | * stm32_spi_set_mbr - Configure baud rate divisor in master mode |
| 1425 | * @spi: pointer to the spi controller data structure |
| 1426 | * @mbrdiv: baud rate divisor value |
| 1427 | */ |
| 1428 | static void stm32_spi_set_mbr(struct stm32_spi *spi, u32 mbrdiv) |
| 1429 | { |
| 1430 | u32 clrb = 0, setb = 0; |
| 1431 | |
| 1432 | clrb |= spi->cfg->regs->br.mask; |
| 1433 | setb |= ((u32)mbrdiv << spi->cfg->regs->br.shift) & |
| 1434 | spi->cfg->regs->br.mask; |
| 1435 | |
| 1436 | writel_relaxed((readl_relaxed(spi->base + spi->cfg->regs->br.reg) & |
| 1437 | ~clrb) | setb, |
| 1438 | spi->base + spi->cfg->regs->br.reg); |
| 1439 | } |
| 1440 | |
| 1441 | /** |
| 1442 | * stm32_spi_communication_type - return transfer communication type |
| 1443 | * @spi_dev: pointer to the spi device |
| 1444 | * transfer: pointer to spi transfer |
| 1445 | */ |
| 1446 | static unsigned int stm32_spi_communication_type(struct spi_device *spi_dev, |
| 1447 | struct spi_transfer *transfer) |
| 1448 | { |
| 1449 | unsigned int type = SPI_FULL_DUPLEX; |
| 1450 | |
| 1451 | if (spi_dev->mode & SPI_3WIRE) { /* MISO/MOSI signals shared */ |
| 1452 | /* |
| 1453 | * SPI_3WIRE and xfer->tx_buf != NULL and xfer->rx_buf != NULL |
| 1454 | * is forbidden and unvalidated by SPI subsystem so depending |
| 1455 | * on the valid buffer, we can determine the direction of the |
| 1456 | * transfer. |
| 1457 | */ |
| 1458 | if (!transfer->tx_buf) |
| 1459 | type = SPI_3WIRE_RX; |
| 1460 | else |
| 1461 | type = SPI_3WIRE_TX; |
| 1462 | } else { |
| 1463 | if (!transfer->tx_buf) |
| 1464 | type = SPI_SIMPLEX_RX; |
| 1465 | else if (!transfer->rx_buf) |
| 1466 | type = SPI_SIMPLEX_TX; |
| 1467 | } |
| 1468 | |
| 1469 | return type; |
| 1470 | } |
| 1471 | |
| 1472 | /** |
| 1473 | * stm32f4_spi_set_mode - configure communication mode |
| 1474 | * @spi: pointer to the spi controller data structure |
| 1475 | * @comm_type: type of communication to configure |
| 1476 | */ |
| 1477 | static int stm32f4_spi_set_mode(struct stm32_spi *spi, unsigned int comm_type) |
| 1478 | { |
| 1479 | if (comm_type == SPI_3WIRE_TX || comm_type == SPI_SIMPLEX_TX) { |
| 1480 | stm32_spi_set_bits(spi, STM32F4_SPI_CR1, |
| 1481 | STM32F4_SPI_CR1_BIDIMODE | |
| 1482 | STM32F4_SPI_CR1_BIDIOE); |
| 1483 | } else if (comm_type == SPI_FULL_DUPLEX) { |
| 1484 | stm32_spi_clr_bits(spi, STM32F4_SPI_CR1, |
| 1485 | STM32F4_SPI_CR1_BIDIMODE | |
| 1486 | STM32F4_SPI_CR1_BIDIOE); |
| 1487 | } else { |
| 1488 | return -EINVAL; |
| 1489 | } |
| 1490 | |
| 1491 | return 0; |
| 1492 | } |
| 1493 | |
| 1494 | /** |
| 1495 | * stm32h7_spi_set_mode - configure communication mode |
| 1496 | * @spi: pointer to the spi controller data structure |
| 1497 | * @comm_type: type of communication to configure |
| 1498 | */ |
| 1499 | static int stm32h7_spi_set_mode(struct stm32_spi *spi, unsigned int comm_type) |
| 1500 | { |
| 1501 | u32 mode; |
| 1502 | u32 cfg2_clrb = 0, cfg2_setb = 0; |
| 1503 | |
| 1504 | if (comm_type == SPI_3WIRE_RX) { |
| 1505 | mode = STM32H7_SPI_HALF_DUPLEX; |
| 1506 | stm32_spi_clr_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_HDDIR); |
| 1507 | } else if (comm_type == SPI_3WIRE_TX) { |
| 1508 | mode = STM32H7_SPI_HALF_DUPLEX; |
| 1509 | stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_HDDIR); |
| 1510 | } else if (comm_type == SPI_SIMPLEX_RX) { |
| 1511 | mode = STM32H7_SPI_SIMPLEX_RX; |
| 1512 | } else if (comm_type == SPI_SIMPLEX_TX) { |
| 1513 | mode = STM32H7_SPI_SIMPLEX_TX; |
| 1514 | } else { |
| 1515 | mode = STM32H7_SPI_FULL_DUPLEX; |
| 1516 | } |
| 1517 | |
| 1518 | cfg2_clrb |= STM32H7_SPI_CFG2_COMM; |
| 1519 | cfg2_setb |= (mode << STM32H7_SPI_CFG2_COMM_SHIFT) & |
| 1520 | STM32H7_SPI_CFG2_COMM; |
| 1521 | |
| 1522 | writel_relaxed( |
| 1523 | (readl_relaxed(spi->base + STM32H7_SPI_CFG2) & |
| 1524 | ~cfg2_clrb) | cfg2_setb, |
| 1525 | spi->base + STM32H7_SPI_CFG2); |
| 1526 | |
| 1527 | return 0; |
| 1528 | } |
| 1529 | |
| 1530 | /** |
| 1531 | * stm32h7_spi_data_idleness - configure minimum time delay inserted between two |
| 1532 | * consecutive data frames in master mode |
| 1533 | * @spi: pointer to the spi controller data structure |
| 1534 | * @len: transfer len |
| 1535 | */ |
| 1536 | static void stm32h7_spi_data_idleness(struct stm32_spi *spi, u32 len) |
| 1537 | { |
| 1538 | u32 cfg2_clrb = 0, cfg2_setb = 0; |
| 1539 | |
| 1540 | cfg2_clrb |= STM32H7_SPI_CFG2_MIDI; |
| 1541 | if ((len > 1) && (spi->cur_midi > 0)) { |
| 1542 | u32 sck_period_ns = DIV_ROUND_UP(SPI_1HZ_NS, spi->cur_speed); |
| 1543 | u32 midi = min((u32)DIV_ROUND_UP(spi->cur_midi, sck_period_ns), |
| 1544 | (u32)STM32H7_SPI_CFG2_MIDI >> |
| 1545 | STM32H7_SPI_CFG2_MIDI_SHIFT); |
| 1546 | |
| 1547 | dev_dbg(spi->dev, "period=%dns, midi=%d(=%dns)\n", |
| 1548 | sck_period_ns, midi, midi * sck_period_ns); |
| 1549 | cfg2_setb |= (midi << STM32H7_SPI_CFG2_MIDI_SHIFT) & |
| 1550 | STM32H7_SPI_CFG2_MIDI; |
| 1551 | } |
| 1552 | |
| 1553 | writel_relaxed((readl_relaxed(spi->base + STM32H7_SPI_CFG2) & |
| 1554 | ~cfg2_clrb) | cfg2_setb, |
| 1555 | spi->base + STM32H7_SPI_CFG2); |
| 1556 | } |
| 1557 | |
| 1558 | /** |
| 1559 | * stm32h7_spi_number_of_data - configure number of data at current transfer |
| 1560 | * @spi: pointer to the spi controller data structure |
| 1561 | * @len: transfer length |
| 1562 | */ |
| 1563 | static int stm32h7_spi_number_of_data(struct stm32_spi *spi, u32 nb_words) |
| 1564 | { |
| 1565 | u32 cr2_clrb = 0, cr2_setb = 0; |
| 1566 | |
| 1567 | if (nb_words <= (STM32H7_SPI_CR2_TSIZE >> |
| 1568 | STM32H7_SPI_CR2_TSIZE_SHIFT)) { |
| 1569 | cr2_clrb |= STM32H7_SPI_CR2_TSIZE; |
| 1570 | cr2_setb = nb_words << STM32H7_SPI_CR2_TSIZE_SHIFT; |
| 1571 | writel_relaxed((readl_relaxed(spi->base + STM32H7_SPI_CR2) & |
| 1572 | ~cr2_clrb) | cr2_setb, |
| 1573 | spi->base + STM32H7_SPI_CR2); |
| 1574 | } else { |
| 1575 | return -EMSGSIZE; |
| 1576 | } |
| 1577 | |
| 1578 | return 0; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1579 | } |
| 1580 | |
| 1581 | /** |
| 1582 | * stm32_spi_transfer_one_setup - common setup to transfer a single |
| 1583 | * spi_transfer either using DMA or |
| 1584 | * interrupts. |
| 1585 | */ |
| 1586 | static int stm32_spi_transfer_one_setup(struct stm32_spi *spi, |
| 1587 | struct spi_device *spi_dev, |
| 1588 | struct spi_transfer *transfer) |
| 1589 | { |
| 1590 | unsigned long flags; |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1591 | unsigned int comm_type; |
| 1592 | int nb_words, ret = 0; |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1593 | int mbr; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1594 | |
| 1595 | spin_lock_irqsave(&spi->lock, flags); |
| 1596 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1597 | spi->cur_xferlen = transfer->len; |
| 1598 | |
| 1599 | spi->cur_bpw = transfer->bits_per_word; |
| 1600 | spi->cfg->set_bpw(spi); |
| 1601 | |
| 1602 | /* Update spi->cur_speed with real clock speed */ |
| 1603 | mbr = stm32_spi_prepare_mbr(spi, transfer->speed_hz, |
| 1604 | spi->cfg->baud_rate_div_min, |
| 1605 | spi->cfg->baud_rate_div_max); |
| 1606 | if (mbr < 0) { |
| 1607 | ret = mbr; |
| 1608 | goto out; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1609 | } |
| 1610 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1611 | transfer->speed_hz = spi->cur_speed; |
| 1612 | stm32_spi_set_mbr(spi, mbr); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1613 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1614 | comm_type = stm32_spi_communication_type(spi_dev, transfer); |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1615 | ret = spi->cfg->set_mode(spi, comm_type); |
| 1616 | if (ret < 0) |
| 1617 | goto out; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1618 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1619 | spi->cur_comm = comm_type; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1620 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1621 | if (spi->cfg->set_data_idleness) |
| 1622 | spi->cfg->set_data_idleness(spi, transfer->len); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1623 | |
| 1624 | if (spi->cur_bpw <= 8) |
| 1625 | nb_words = transfer->len; |
| 1626 | else if (spi->cur_bpw <= 16) |
| 1627 | nb_words = DIV_ROUND_UP(transfer->len * 8, 16); |
| 1628 | else |
| 1629 | nb_words = DIV_ROUND_UP(transfer->len * 8, 32); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1630 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1631 | if (spi->cfg->set_number_of_data) { |
| 1632 | ret = spi->cfg->set_number_of_data(spi, nb_words); |
| 1633 | if (ret < 0) |
| 1634 | goto out; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1635 | } |
| 1636 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1637 | dev_dbg(spi->dev, "transfer communication mode set to %d\n", |
| 1638 | spi->cur_comm); |
| 1639 | dev_dbg(spi->dev, |
| 1640 | "data frame of %d-bit, data packet of %d data frames\n", |
| 1641 | spi->cur_bpw, spi->cur_fthlv); |
| 1642 | dev_dbg(spi->dev, "speed set to %dHz\n", spi->cur_speed); |
| 1643 | dev_dbg(spi->dev, "transfer of %d bytes (%d data frames)\n", |
| 1644 | spi->cur_xferlen, nb_words); |
| 1645 | dev_dbg(spi->dev, "dma %s\n", |
| 1646 | (spi->cur_usedma) ? "enabled" : "disabled"); |
| 1647 | |
| 1648 | out: |
| 1649 | spin_unlock_irqrestore(&spi->lock, flags); |
| 1650 | |
| 1651 | return ret; |
| 1652 | } |
| 1653 | |
| 1654 | /** |
| 1655 | * stm32_spi_transfer_one - transfer a single spi_transfer |
| 1656 | * |
| 1657 | * It must return 0 if the transfer is finished or 1 if the transfer is still |
| 1658 | * in progress. |
| 1659 | */ |
| 1660 | static int stm32_spi_transfer_one(struct spi_master *master, |
| 1661 | struct spi_device *spi_dev, |
| 1662 | struct spi_transfer *transfer) |
| 1663 | { |
| 1664 | struct stm32_spi *spi = spi_master_get_devdata(master); |
| 1665 | int ret; |
| 1666 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1667 | /* Don't do anything on 0 bytes transfers */ |
| 1668 | if (transfer->len == 0) |
| 1669 | return 0; |
| 1670 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1671 | spi->tx_buf = transfer->tx_buf; |
| 1672 | spi->rx_buf = transfer->rx_buf; |
| 1673 | spi->tx_len = spi->tx_buf ? transfer->len : 0; |
| 1674 | spi->rx_len = spi->rx_buf ? transfer->len : 0; |
| 1675 | |
| 1676 | spi->cur_usedma = (master->can_dma && |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1677 | master->can_dma(master, spi_dev, transfer)); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1678 | |
| 1679 | ret = stm32_spi_transfer_one_setup(spi, spi_dev, transfer); |
| 1680 | if (ret) { |
| 1681 | dev_err(spi->dev, "SPI transfer setup failed\n"); |
| 1682 | return ret; |
| 1683 | } |
| 1684 | |
| 1685 | if (spi->cur_usedma) |
| 1686 | return stm32_spi_transfer_one_dma(spi, transfer); |
| 1687 | else |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1688 | return spi->cfg->transfer_one_irq(spi); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1689 | } |
| 1690 | |
| 1691 | /** |
| 1692 | * stm32_spi_unprepare_msg - relax the hardware |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1693 | */ |
| 1694 | static int stm32_spi_unprepare_msg(struct spi_master *master, |
| 1695 | struct spi_message *msg) |
| 1696 | { |
| 1697 | struct stm32_spi *spi = spi_master_get_devdata(master); |
| 1698 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1699 | spi->cfg->disable(spi); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1700 | |
| 1701 | return 0; |
| 1702 | } |
| 1703 | |
| 1704 | /** |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1705 | * stm32f4_spi_config - Configure SPI controller as SPI master |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1706 | */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1707 | static int stm32f4_spi_config(struct stm32_spi *spi) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1708 | { |
| 1709 | unsigned long flags; |
| 1710 | |
| 1711 | spin_lock_irqsave(&spi->lock, flags); |
| 1712 | |
| 1713 | /* Ensure I2SMOD bit is kept cleared */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1714 | stm32_spi_clr_bits(spi, STM32F4_SPI_I2SCFGR, |
| 1715 | STM32F4_SPI_I2SCFGR_I2SMOD); |
| 1716 | |
| 1717 | /* |
| 1718 | * - SS input value high |
| 1719 | * - transmitter half duplex direction |
| 1720 | * - Set the master mode (default Motorola mode) |
| 1721 | * - Consider 1 master/n slaves configuration and |
| 1722 | * SS input value is determined by the SSI bit |
| 1723 | */ |
| 1724 | stm32_spi_set_bits(spi, STM32F4_SPI_CR1, STM32F4_SPI_CR1_SSI | |
| 1725 | STM32F4_SPI_CR1_BIDIOE | |
| 1726 | STM32F4_SPI_CR1_MSTR | |
| 1727 | STM32F4_SPI_CR1_SSM); |
| 1728 | |
| 1729 | spin_unlock_irqrestore(&spi->lock, flags); |
| 1730 | |
| 1731 | return 0; |
| 1732 | } |
| 1733 | |
| 1734 | /** |
| 1735 | * stm32h7_spi_config - Configure SPI controller as SPI master |
| 1736 | */ |
| 1737 | static int stm32h7_spi_config(struct stm32_spi *spi) |
| 1738 | { |
| 1739 | unsigned long flags; |
| 1740 | |
| 1741 | spin_lock_irqsave(&spi->lock, flags); |
| 1742 | |
| 1743 | /* Ensure I2SMOD bit is kept cleared */ |
| 1744 | stm32_spi_clr_bits(spi, STM32H7_SPI_I2SCFGR, |
| 1745 | STM32H7_SPI_I2SCFGR_I2SMOD); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1746 | |
| 1747 | /* |
| 1748 | * - SS input value high |
| 1749 | * - transmitter half duplex direction |
| 1750 | * - automatic communication suspend when RX-Fifo is full |
| 1751 | */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1752 | stm32_spi_set_bits(spi, STM32H7_SPI_CR1, STM32H7_SPI_CR1_SSI | |
| 1753 | STM32H7_SPI_CR1_HDDIR | |
| 1754 | STM32H7_SPI_CR1_MASRX); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1755 | |
| 1756 | /* |
| 1757 | * - Set the master mode (default Motorola mode) |
| 1758 | * - Consider 1 master/n slaves configuration and |
| 1759 | * SS input value is determined by the SSI bit |
| 1760 | * - keep control of all associated GPIOs |
| 1761 | */ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1762 | stm32_spi_set_bits(spi, STM32H7_SPI_CFG2, STM32H7_SPI_CFG2_MASTER | |
| 1763 | STM32H7_SPI_CFG2_SSM | |
| 1764 | STM32H7_SPI_CFG2_AFCNTR); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1765 | |
| 1766 | spin_unlock_irqrestore(&spi->lock, flags); |
| 1767 | |
| 1768 | return 0; |
| 1769 | } |
| 1770 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1771 | static const struct stm32_spi_cfg stm32f4_spi_cfg = { |
| 1772 | .regs = &stm32f4_spi_regspec, |
| 1773 | .get_bpw_mask = stm32f4_spi_get_bpw_mask, |
| 1774 | .disable = stm32f4_spi_disable, |
| 1775 | .config = stm32f4_spi_config, |
| 1776 | .set_bpw = stm32f4_spi_set_bpw, |
| 1777 | .set_mode = stm32f4_spi_set_mode, |
| 1778 | .transfer_one_dma_start = stm32f4_spi_transfer_one_dma_start, |
| 1779 | .dma_tx_cb = stm32f4_spi_dma_tx_cb, |
| 1780 | .dma_rx_cb = stm32f4_spi_dma_rx_cb, |
| 1781 | .transfer_one_irq = stm32f4_spi_transfer_one_irq, |
| 1782 | .irq_handler_event = stm32f4_spi_irq_event, |
| 1783 | .irq_handler_thread = stm32f4_spi_irq_thread, |
| 1784 | .baud_rate_div_min = STM32F4_SPI_BR_DIV_MIN, |
| 1785 | .baud_rate_div_max = STM32F4_SPI_BR_DIV_MAX, |
| 1786 | .has_fifo = false, |
| 1787 | }; |
| 1788 | |
| 1789 | static const struct stm32_spi_cfg stm32h7_spi_cfg = { |
| 1790 | .regs = &stm32h7_spi_regspec, |
| 1791 | .get_fifo_size = stm32h7_spi_get_fifo_size, |
| 1792 | .get_bpw_mask = stm32h7_spi_get_bpw_mask, |
| 1793 | .disable = stm32h7_spi_disable, |
| 1794 | .config = stm32h7_spi_config, |
| 1795 | .set_bpw = stm32h7_spi_set_bpw, |
| 1796 | .set_mode = stm32h7_spi_set_mode, |
| 1797 | .set_data_idleness = stm32h7_spi_data_idleness, |
| 1798 | .set_number_of_data = stm32h7_spi_number_of_data, |
| 1799 | .transfer_one_dma_start = stm32h7_spi_transfer_one_dma_start, |
| 1800 | .dma_rx_cb = stm32h7_spi_dma_cb, |
| 1801 | .dma_tx_cb = stm32h7_spi_dma_cb, |
| 1802 | .transfer_one_irq = stm32h7_spi_transfer_one_irq, |
| 1803 | .irq_handler_thread = stm32h7_spi_irq_thread, |
| 1804 | .baud_rate_div_min = STM32H7_SPI_MBR_DIV_MIN, |
| 1805 | .baud_rate_div_max = STM32H7_SPI_MBR_DIV_MAX, |
| 1806 | .has_fifo = true, |
| 1807 | }; |
| 1808 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1809 | static const struct of_device_id stm32_spi_of_match[] = { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1810 | { .compatible = "st,stm32h7-spi", .data = (void *)&stm32h7_spi_cfg }, |
| 1811 | { .compatible = "st,stm32f4-spi", .data = (void *)&stm32f4_spi_cfg }, |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1812 | {}, |
| 1813 | }; |
| 1814 | MODULE_DEVICE_TABLE(of, stm32_spi_of_match); |
| 1815 | |
| 1816 | static int stm32_spi_probe(struct platform_device *pdev) |
| 1817 | { |
| 1818 | struct spi_master *master; |
| 1819 | struct stm32_spi *spi; |
| 1820 | struct resource *res; |
| 1821 | int i, ret; |
| 1822 | |
| 1823 | master = spi_alloc_master(&pdev->dev, sizeof(struct stm32_spi)); |
| 1824 | if (!master) { |
| 1825 | dev_err(&pdev->dev, "spi master allocation failed\n"); |
| 1826 | return -ENOMEM; |
| 1827 | } |
| 1828 | platform_set_drvdata(pdev, master); |
| 1829 | |
| 1830 | spi = spi_master_get_devdata(master); |
| 1831 | spi->dev = &pdev->dev; |
| 1832 | spi->master = master; |
| 1833 | spin_lock_init(&spi->lock); |
| 1834 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1835 | spi->cfg = (const struct stm32_spi_cfg *) |
| 1836 | of_match_device(pdev->dev.driver->of_match_table, |
| 1837 | &pdev->dev)->data; |
| 1838 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1839 | res = platform_get_resource(pdev, IORESOURCE_MEM, 0); |
| 1840 | spi->base = devm_ioremap_resource(&pdev->dev, res); |
| 1841 | if (IS_ERR(spi->base)) { |
| 1842 | ret = PTR_ERR(spi->base); |
| 1843 | goto err_master_put; |
| 1844 | } |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1845 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1846 | spi->phys_addr = (dma_addr_t)res->start; |
| 1847 | |
| 1848 | spi->irq = platform_get_irq(pdev, 0); |
| 1849 | if (spi->irq <= 0) { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1850 | ret = spi->irq; |
| 1851 | if (ret != -EPROBE_DEFER) |
| 1852 | dev_err(&pdev->dev, "failed to get irq: %d\n", ret); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1853 | goto err_master_put; |
| 1854 | } |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1855 | ret = devm_request_threaded_irq(&pdev->dev, spi->irq, |
| 1856 | spi->cfg->irq_handler_event, |
| 1857 | spi->cfg->irq_handler_thread, |
| 1858 | IRQF_ONESHOT, pdev->name, master); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1859 | if (ret) { |
| 1860 | dev_err(&pdev->dev, "irq%d request failed: %d\n", spi->irq, |
| 1861 | ret); |
| 1862 | goto err_master_put; |
| 1863 | } |
| 1864 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1865 | spi->clk = devm_clk_get(&pdev->dev, NULL); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1866 | if (IS_ERR(spi->clk)) { |
| 1867 | ret = PTR_ERR(spi->clk); |
| 1868 | dev_err(&pdev->dev, "clk get failed: %d\n", ret); |
| 1869 | goto err_master_put; |
| 1870 | } |
| 1871 | |
| 1872 | ret = clk_prepare_enable(spi->clk); |
| 1873 | if (ret) { |
| 1874 | dev_err(&pdev->dev, "clk enable failed: %d\n", ret); |
| 1875 | goto err_master_put; |
| 1876 | } |
| 1877 | spi->clk_rate = clk_get_rate(spi->clk); |
| 1878 | if (!spi->clk_rate) { |
| 1879 | dev_err(&pdev->dev, "clk rate = 0\n"); |
| 1880 | ret = -EINVAL; |
| 1881 | goto err_clk_disable; |
| 1882 | } |
| 1883 | |
| 1884 | spi->rst = devm_reset_control_get_exclusive(&pdev->dev, NULL); |
| 1885 | if (!IS_ERR(spi->rst)) { |
| 1886 | reset_control_assert(spi->rst); |
| 1887 | udelay(2); |
| 1888 | reset_control_deassert(spi->rst); |
| 1889 | } |
| 1890 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1891 | if (spi->cfg->has_fifo) |
| 1892 | spi->fifo_size = spi->cfg->get_fifo_size(spi); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1893 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1894 | ret = spi->cfg->config(spi); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1895 | if (ret) { |
| 1896 | dev_err(&pdev->dev, "controller configuration failed: %d\n", |
| 1897 | ret); |
| 1898 | goto err_clk_disable; |
| 1899 | } |
| 1900 | |
| 1901 | master->dev.of_node = pdev->dev.of_node; |
| 1902 | master->auto_runtime_pm = true; |
| 1903 | master->bus_num = pdev->id; |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 1904 | master->mode_bits = SPI_CPHA | SPI_CPOL | SPI_CS_HIGH | SPI_LSB_FIRST | |
| 1905 | SPI_3WIRE; |
| 1906 | master->bits_per_word_mask = spi->cfg->get_bpw_mask(spi); |
| 1907 | master->max_speed_hz = spi->clk_rate / spi->cfg->baud_rate_div_min; |
| 1908 | master->min_speed_hz = spi->clk_rate / spi->cfg->baud_rate_div_max; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1909 | master->setup = stm32_spi_setup; |
| 1910 | master->prepare_message = stm32_spi_prepare_msg; |
| 1911 | master->transfer_one = stm32_spi_transfer_one; |
| 1912 | master->unprepare_message = stm32_spi_unprepare_msg; |
| 1913 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1914 | spi->dma_tx = dma_request_chan(spi->dev, "tx"); |
| 1915 | if (IS_ERR(spi->dma_tx)) { |
| 1916 | ret = PTR_ERR(spi->dma_tx); |
| 1917 | spi->dma_tx = NULL; |
| 1918 | if (ret == -EPROBE_DEFER) |
| 1919 | goto err_clk_disable; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1920 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1921 | dev_warn(&pdev->dev, "failed to request tx dma channel\n"); |
| 1922 | } else { |
| 1923 | master->dma_tx = spi->dma_tx; |
| 1924 | } |
| 1925 | |
| 1926 | spi->dma_rx = dma_request_chan(spi->dev, "rx"); |
| 1927 | if (IS_ERR(spi->dma_rx)) { |
| 1928 | ret = PTR_ERR(spi->dma_rx); |
| 1929 | spi->dma_rx = NULL; |
| 1930 | if (ret == -EPROBE_DEFER) |
| 1931 | goto err_dma_release; |
| 1932 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1933 | dev_warn(&pdev->dev, "failed to request rx dma channel\n"); |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1934 | } else { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1935 | master->dma_rx = spi->dma_rx; |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1936 | } |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1937 | |
| 1938 | if (spi->dma_tx || spi->dma_rx) |
| 1939 | master->can_dma = stm32_spi_can_dma; |
| 1940 | |
| 1941 | pm_runtime_set_active(&pdev->dev); |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1942 | pm_runtime_get_noresume(&pdev->dev); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1943 | pm_runtime_enable(&pdev->dev); |
| 1944 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1945 | ret = spi_register_master(master); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1946 | if (ret) { |
| 1947 | dev_err(&pdev->dev, "spi master registration failed: %d\n", |
| 1948 | ret); |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1949 | goto err_pm_disable; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1950 | } |
| 1951 | |
| 1952 | if (!master->cs_gpios) { |
| 1953 | dev_err(&pdev->dev, "no CS gpios available\n"); |
| 1954 | ret = -EINVAL; |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1955 | goto err_pm_disable; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1956 | } |
| 1957 | |
| 1958 | for (i = 0; i < master->num_chipselect; i++) { |
| 1959 | if (!gpio_is_valid(master->cs_gpios[i])) { |
| 1960 | dev_err(&pdev->dev, "%i is not a valid gpio\n", |
| 1961 | master->cs_gpios[i]); |
| 1962 | ret = -EINVAL; |
| 1963 | goto err_dma_release; |
| 1964 | } |
| 1965 | |
| 1966 | ret = devm_gpio_request(&pdev->dev, master->cs_gpios[i], |
| 1967 | DRIVER_NAME); |
| 1968 | if (ret) { |
| 1969 | dev_err(&pdev->dev, "can't get CS gpio %i\n", |
| 1970 | master->cs_gpios[i]); |
| 1971 | goto err_dma_release; |
| 1972 | } |
| 1973 | } |
| 1974 | |
| 1975 | dev_info(&pdev->dev, "driver initialized\n"); |
| 1976 | |
| 1977 | return 0; |
| 1978 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 1979 | err_pm_disable: |
| 1980 | pm_runtime_disable(&pdev->dev); |
| 1981 | pm_runtime_put_noidle(&pdev->dev); |
| 1982 | pm_runtime_set_suspended(&pdev->dev); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1983 | err_dma_release: |
| 1984 | if (spi->dma_tx) |
| 1985 | dma_release_channel(spi->dma_tx); |
| 1986 | if (spi->dma_rx) |
| 1987 | dma_release_channel(spi->dma_rx); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1988 | err_clk_disable: |
| 1989 | clk_disable_unprepare(spi->clk); |
| 1990 | err_master_put: |
| 1991 | spi_master_put(master); |
| 1992 | |
| 1993 | return ret; |
| 1994 | } |
| 1995 | |
| 1996 | static int stm32_spi_remove(struct platform_device *pdev) |
| 1997 | { |
| 1998 | struct spi_master *master = platform_get_drvdata(pdev); |
| 1999 | struct stm32_spi *spi = spi_master_get_devdata(master); |
| 2000 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 2001 | pm_runtime_get_sync(&pdev->dev); |
| 2002 | |
| 2003 | spi_unregister_master(master); |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 2004 | spi->cfg->disable(spi); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2005 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 2006 | pm_runtime_disable(&pdev->dev); |
| 2007 | pm_runtime_put_noidle(&pdev->dev); |
| 2008 | pm_runtime_set_suspended(&pdev->dev); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2009 | if (master->dma_tx) |
| 2010 | dma_release_channel(master->dma_tx); |
| 2011 | if (master->dma_rx) |
| 2012 | dma_release_channel(master->dma_rx); |
| 2013 | |
| 2014 | clk_disable_unprepare(spi->clk); |
| 2015 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 2016 | |
| 2017 | pinctrl_pm_select_sleep_state(&pdev->dev); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2018 | |
| 2019 | return 0; |
| 2020 | } |
| 2021 | |
| 2022 | #ifdef CONFIG_PM |
| 2023 | static int stm32_spi_runtime_suspend(struct device *dev) |
| 2024 | { |
| 2025 | struct spi_master *master = dev_get_drvdata(dev); |
| 2026 | struct stm32_spi *spi = spi_master_get_devdata(master); |
| 2027 | |
| 2028 | clk_disable_unprepare(spi->clk); |
| 2029 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 2030 | return pinctrl_pm_select_sleep_state(dev); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2031 | } |
| 2032 | |
| 2033 | static int stm32_spi_runtime_resume(struct device *dev) |
| 2034 | { |
| 2035 | struct spi_master *master = dev_get_drvdata(dev); |
| 2036 | struct stm32_spi *spi = spi_master_get_devdata(master); |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 2037 | int ret; |
| 2038 | |
| 2039 | ret = pinctrl_pm_select_default_state(dev); |
| 2040 | if (ret) |
| 2041 | return ret; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2042 | |
| 2043 | return clk_prepare_enable(spi->clk); |
| 2044 | } |
| 2045 | #endif |
| 2046 | |
| 2047 | #ifdef CONFIG_PM_SLEEP |
| 2048 | static int stm32_spi_suspend(struct device *dev) |
| 2049 | { |
| 2050 | struct spi_master *master = dev_get_drvdata(dev); |
| 2051 | int ret; |
| 2052 | |
| 2053 | ret = spi_master_suspend(master); |
| 2054 | if (ret) |
| 2055 | return ret; |
| 2056 | |
| 2057 | return pm_runtime_force_suspend(dev); |
| 2058 | } |
| 2059 | |
| 2060 | static int stm32_spi_resume(struct device *dev) |
| 2061 | { |
| 2062 | struct spi_master *master = dev_get_drvdata(dev); |
| 2063 | struct stm32_spi *spi = spi_master_get_devdata(master); |
| 2064 | int ret; |
| 2065 | |
| 2066 | ret = pm_runtime_force_resume(dev); |
| 2067 | if (ret) |
| 2068 | return ret; |
| 2069 | |
| 2070 | ret = spi_master_resume(master); |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 2071 | if (ret) { |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2072 | clk_disable_unprepare(spi->clk); |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 2073 | return ret; |
| 2074 | } |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2075 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame^] | 2076 | ret = pm_runtime_get_sync(dev); |
| 2077 | if (ret < 0) { |
| 2078 | pm_runtime_put_noidle(dev); |
| 2079 | dev_err(dev, "Unable to power device:%d\n", ret); |
| 2080 | return ret; |
| 2081 | } |
| 2082 | |
| 2083 | spi->cfg->config(spi); |
| 2084 | |
| 2085 | pm_runtime_mark_last_busy(dev); |
| 2086 | pm_runtime_put_autosuspend(dev); |
| 2087 | |
| 2088 | return 0; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2089 | } |
| 2090 | #endif |
| 2091 | |
| 2092 | static const struct dev_pm_ops stm32_spi_pm_ops = { |
| 2093 | SET_SYSTEM_SLEEP_PM_OPS(stm32_spi_suspend, stm32_spi_resume) |
| 2094 | SET_RUNTIME_PM_OPS(stm32_spi_runtime_suspend, |
| 2095 | stm32_spi_runtime_resume, NULL) |
| 2096 | }; |
| 2097 | |
| 2098 | static struct platform_driver stm32_spi_driver = { |
| 2099 | .probe = stm32_spi_probe, |
| 2100 | .remove = stm32_spi_remove, |
| 2101 | .driver = { |
| 2102 | .name = DRIVER_NAME, |
| 2103 | .pm = &stm32_spi_pm_ops, |
| 2104 | .of_match_table = stm32_spi_of_match, |
| 2105 | }, |
| 2106 | }; |
| 2107 | |
| 2108 | module_platform_driver(stm32_spi_driver); |
| 2109 | |
| 2110 | MODULE_ALIAS("platform:" DRIVER_NAME); |
| 2111 | MODULE_DESCRIPTION("STMicroelectronics STM32 SPI Controller driver"); |
| 2112 | MODULE_AUTHOR("Amelie Delaunay <amelie.delaunay@st.com>"); |
| 2113 | MODULE_LICENSE("GPL v2"); |