1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) STMicroelectronics 2018
4 * Author: Christophe Kerello <christophe.kerello@st.com>
8 #include <linux/dmaengine.h>
9 #include <linux/dma-mapping.h>
10 #include <linux/errno.h>
11 #include <linux/interrupt.h>
12 #include <linux/iopoll.h>
13 #include <linux/module.h>
14 #include <linux/mtd/rawnand.h>
15 #include <linux/pinctrl/consumer.h>
16 #include <linux/platform_device.h>
17 #include <linux/reset.h>
19 /* Bad block marker length */
20 #define FMC2_BBM_LEN 2
23 #define FMC2_ECC_STEP_SIZE 512
25 /* BCHDSRx registers length */
26 #define FMC2_BCHDSRS_LEN 20
29 #define FMC2_HECCR_LEN 4
31 /* Max requests done for a 8k nand page size */
32 #define FMC2_MAX_SG 16
37 /* Max ECC buffer length */
38 #define FMC2_MAX_ECC_BUF_LEN (FMC2_BCHDSRS_LEN * FMC2_MAX_SG)
40 #define FMC2_TIMEOUT_MS 1000
45 #define FMC2_TSYNC 3000
46 #define FMC2_PCR_TIMING_MASK 0xf
47 #define FMC2_PMEM_PATT_TIMING_MASK 0xff
49 /* FMC2 Controller Registers */
53 #define FMC2_PMEM 0x88
54 #define FMC2_PATT 0x8c
55 #define FMC2_HECCR 0x94
56 #define FMC2_CSQCR 0x200
57 #define FMC2_CSQCFGR1 0x204
58 #define FMC2_CSQCFGR2 0x208
59 #define FMC2_CSQCFGR3 0x20c
60 #define FMC2_CSQAR1 0x210
61 #define FMC2_CSQAR2 0x214
62 #define FMC2_CSQIER 0x220
63 #define FMC2_CSQISR 0x224
64 #define FMC2_CSQICR 0x228
65 #define FMC2_CSQEMSR 0x230
66 #define FMC2_BCHIER 0x250
67 #define FMC2_BCHISR 0x254
68 #define FMC2_BCHICR 0x258
69 #define FMC2_BCHPBR1 0x260
70 #define FMC2_BCHPBR2 0x264
71 #define FMC2_BCHPBR3 0x268
72 #define FMC2_BCHPBR4 0x26c
73 #define FMC2_BCHDSR0 0x27c
74 #define FMC2_BCHDSR1 0x280
75 #define FMC2_BCHDSR2 0x284
76 #define FMC2_BCHDSR3 0x288
77 #define FMC2_BCHDSR4 0x28c
79 /* Register: FMC2_BCR1 */
80 #define FMC2_BCR1_FMC2EN BIT(31)
82 /* Register: FMC2_PCR */
83 #define FMC2_PCR_PWAITEN BIT(1)
84 #define FMC2_PCR_PBKEN BIT(2)
85 #define FMC2_PCR_PWID_MASK GENMASK(5, 4)
86 #define FMC2_PCR_PWID(x) (((x) & 0x3) << 4)
87 #define FMC2_PCR_PWID_BUSWIDTH_8 0
88 #define FMC2_PCR_PWID_BUSWIDTH_16 1
89 #define FMC2_PCR_ECCEN BIT(6)
90 #define FMC2_PCR_ECCALG BIT(8)
91 #define FMC2_PCR_TCLR_MASK GENMASK(12, 9)
92 #define FMC2_PCR_TCLR(x) (((x) & 0xf) << 9)
93 #define FMC2_PCR_TCLR_DEFAULT 0xf
94 #define FMC2_PCR_TAR_MASK GENMASK(16, 13)
95 #define FMC2_PCR_TAR(x) (((x) & 0xf) << 13)
96 #define FMC2_PCR_TAR_DEFAULT 0xf
97 #define FMC2_PCR_ECCSS_MASK GENMASK(19, 17)
98 #define FMC2_PCR_ECCSS(x) (((x) & 0x7) << 17)
99 #define FMC2_PCR_ECCSS_512 1
100 #define FMC2_PCR_ECCSS_2048 3
101 #define FMC2_PCR_BCHECC BIT(24)
102 #define FMC2_PCR_WEN BIT(25)
104 /* Register: FMC2_SR */
105 #define FMC2_SR_NWRF BIT(6)
107 /* Register: FMC2_PMEM */
108 #define FMC2_PMEM_MEMSET(x) (((x) & 0xff) << 0)
109 #define FMC2_PMEM_MEMWAIT(x) (((x) & 0xff) << 8)
110 #define FMC2_PMEM_MEMHOLD(x) (((x) & 0xff) << 16)
111 #define FMC2_PMEM_MEMHIZ(x) (((x) & 0xff) << 24)
112 #define FMC2_PMEM_DEFAULT 0x0a0a0a0a
114 /* Register: FMC2_PATT */
115 #define FMC2_PATT_ATTSET(x) (((x) & 0xff) << 0)
116 #define FMC2_PATT_ATTWAIT(x) (((x) & 0xff) << 8)
117 #define FMC2_PATT_ATTHOLD(x) (((x) & 0xff) << 16)
118 #define FMC2_PATT_ATTHIZ(x) (((x) & 0xff) << 24)
119 #define FMC2_PATT_DEFAULT 0x0a0a0a0a
121 /* Register: FMC2_CSQCR */
122 #define FMC2_CSQCR_CSQSTART BIT(0)
124 /* Register: FMC2_CSQCFGR1 */
125 #define FMC2_CSQCFGR1_CMD2EN BIT(1)
126 #define FMC2_CSQCFGR1_DMADEN BIT(2)
127 #define FMC2_CSQCFGR1_ACYNBR(x) (((x) & 0x7) << 4)
128 #define FMC2_CSQCFGR1_CMD1(x) (((x) & 0xff) << 8)
129 #define FMC2_CSQCFGR1_CMD2(x) (((x) & 0xff) << 16)
130 #define FMC2_CSQCFGR1_CMD1T BIT(24)
131 #define FMC2_CSQCFGR1_CMD2T BIT(25)
133 /* Register: FMC2_CSQCFGR2 */
134 #define FMC2_CSQCFGR2_SQSDTEN BIT(0)
135 #define FMC2_CSQCFGR2_RCMD2EN BIT(1)
136 #define FMC2_CSQCFGR2_DMASEN BIT(2)
137 #define FMC2_CSQCFGR2_RCMD1(x) (((x) & 0xff) << 8)
138 #define FMC2_CSQCFGR2_RCMD2(x) (((x) & 0xff) << 16)
139 #define FMC2_CSQCFGR2_RCMD1T BIT(24)
140 #define FMC2_CSQCFGR2_RCMD2T BIT(25)
142 /* Register: FMC2_CSQCFGR3 */
143 #define FMC2_CSQCFGR3_SNBR(x) (((x) & 0x1f) << 8)
144 #define FMC2_CSQCFGR3_AC1T BIT(16)
145 #define FMC2_CSQCFGR3_AC2T BIT(17)
146 #define FMC2_CSQCFGR3_AC3T BIT(18)
147 #define FMC2_CSQCFGR3_AC4T BIT(19)
148 #define FMC2_CSQCFGR3_AC5T BIT(20)
149 #define FMC2_CSQCFGR3_SDT BIT(21)
150 #define FMC2_CSQCFGR3_RAC1T BIT(22)
151 #define FMC2_CSQCFGR3_RAC2T BIT(23)
153 /* Register: FMC2_CSQCAR1 */
154 #define FMC2_CSQCAR1_ADDC1(x) (((x) & 0xff) << 0)
155 #define FMC2_CSQCAR1_ADDC2(x) (((x) & 0xff) << 8)
156 #define FMC2_CSQCAR1_ADDC3(x) (((x) & 0xff) << 16)
157 #define FMC2_CSQCAR1_ADDC4(x) (((x) & 0xff) << 24)
159 /* Register: FMC2_CSQCAR2 */
160 #define FMC2_CSQCAR2_ADDC5(x) (((x) & 0xff) << 0)
161 #define FMC2_CSQCAR2_NANDCEN(x) (((x) & 0x3) << 10)
162 #define FMC2_CSQCAR2_SAO(x) (((x) & 0xffff) << 16)
164 /* Register: FMC2_CSQIER */
165 #define FMC2_CSQIER_TCIE BIT(0)
167 /* Register: FMC2_CSQICR */
168 #define FMC2_CSQICR_CLEAR_IRQ GENMASK(4, 0)
170 /* Register: FMC2_CSQEMSR */
171 #define FMC2_CSQEMSR_SEM GENMASK(15, 0)
173 /* Register: FMC2_BCHIER */
174 #define FMC2_BCHIER_DERIE BIT(1)
175 #define FMC2_BCHIER_EPBRIE BIT(4)
177 /* Register: FMC2_BCHICR */
178 #define FMC2_BCHICR_CLEAR_IRQ GENMASK(4, 0)
180 /* Register: FMC2_BCHDSR0 */
181 #define FMC2_BCHDSR0_DUE BIT(0)
182 #define FMC2_BCHDSR0_DEF BIT(1)
183 #define FMC2_BCHDSR0_DEN_MASK GENMASK(7, 4)
184 #define FMC2_BCHDSR0_DEN_SHIFT 4
186 /* Register: FMC2_BCHDSR1 */
187 #define FMC2_BCHDSR1_EBP1_MASK GENMASK(12, 0)
188 #define FMC2_BCHDSR1_EBP2_MASK GENMASK(28, 16)
189 #define FMC2_BCHDSR1_EBP2_SHIFT 16
191 /* Register: FMC2_BCHDSR2 */
192 #define FMC2_BCHDSR2_EBP3_MASK GENMASK(12, 0)
193 #define FMC2_BCHDSR2_EBP4_MASK GENMASK(28, 16)
194 #define FMC2_BCHDSR2_EBP4_SHIFT 16
196 /* Register: FMC2_BCHDSR3 */
197 #define FMC2_BCHDSR3_EBP5_MASK GENMASK(12, 0)
198 #define FMC2_BCHDSR3_EBP6_MASK GENMASK(28, 16)
199 #define FMC2_BCHDSR3_EBP6_SHIFT 16
201 /* Register: FMC2_BCHDSR4 */
202 #define FMC2_BCHDSR4_EBP7_MASK GENMASK(12, 0)
203 #define FMC2_BCHDSR4_EBP8_MASK GENMASK(28, 16)
204 #define FMC2_BCHDSR4_EBP8_SHIFT 16
206 enum stm32_fmc2_ecc {
212 enum stm32_fmc2_irq_state {
213 FMC2_IRQ_UNKNOWN = 0,
218 struct stm32_fmc2_timings {
229 struct stm32_fmc2_nand {
230 struct nand_chip chip;
231 struct stm32_fmc2_timings timings;
233 int cs_used[FMC2_MAX_CE];
236 static inline struct stm32_fmc2_nand *to_fmc2_nand(struct nand_chip *chip)
238 return container_of(chip, struct stm32_fmc2_nand, chip);
241 struct stm32_fmc2_nfc {
242 struct nand_controller base;
243 struct stm32_fmc2_nand nand;
245 void __iomem *io_base;
246 void __iomem *data_base[FMC2_MAX_CE];
247 void __iomem *cmd_base[FMC2_MAX_CE];
248 void __iomem *addr_base[FMC2_MAX_CE];
249 phys_addr_t io_phys_addr;
250 phys_addr_t data_phys_addr[FMC2_MAX_CE];
254 struct dma_chan *dma_tx_ch;
255 struct dma_chan *dma_rx_ch;
256 struct dma_chan *dma_ecc_ch;
257 struct sg_table dma_data_sg;
258 struct sg_table dma_ecc_sg;
262 struct completion complete;
263 struct completion dma_data_complete;
264 struct completion dma_ecc_complete;
270 static inline struct stm32_fmc2_nfc *to_stm32_nfc(struct nand_controller *base)
272 return container_of(base, struct stm32_fmc2_nfc, base);
275 /* Timings configuration */
276 static void stm32_fmc2_timings_init(struct nand_chip *chip)
278 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
279 struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
280 struct stm32_fmc2_timings *timings = &nand->timings;
281 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
284 /* Set tclr/tar timings */
285 pcr &= ~FMC2_PCR_TCLR_MASK;
286 pcr |= FMC2_PCR_TCLR(timings->tclr);
287 pcr &= ~FMC2_PCR_TAR_MASK;
288 pcr |= FMC2_PCR_TAR(timings->tar);
290 /* Set tset/twait/thold/thiz timings in common bank */
291 pmem = FMC2_PMEM_MEMSET(timings->tset_mem);
292 pmem |= FMC2_PMEM_MEMWAIT(timings->twait);
293 pmem |= FMC2_PMEM_MEMHOLD(timings->thold_mem);
294 pmem |= FMC2_PMEM_MEMHIZ(timings->thiz);
296 /* Set tset/twait/thold/thiz timings in attribut bank */
297 patt = FMC2_PATT_ATTSET(timings->tset_att);
298 patt |= FMC2_PATT_ATTWAIT(timings->twait);
299 patt |= FMC2_PATT_ATTHOLD(timings->thold_att);
300 patt |= FMC2_PATT_ATTHIZ(timings->thiz);
302 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
303 writel_relaxed(pmem, fmc2->io_base + FMC2_PMEM);
304 writel_relaxed(patt, fmc2->io_base + FMC2_PATT);
307 /* Controller configuration */
308 static void stm32_fmc2_setup(struct nand_chip *chip)
310 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
311 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
313 /* Configure ECC algorithm (default configuration is Hamming) */
314 pcr &= ~FMC2_PCR_ECCALG;
315 pcr &= ~FMC2_PCR_BCHECC;
316 if (chip->ecc.strength == FMC2_ECC_BCH8) {
317 pcr |= FMC2_PCR_ECCALG;
318 pcr |= FMC2_PCR_BCHECC;
319 } else if (chip->ecc.strength == FMC2_ECC_BCH4) {
320 pcr |= FMC2_PCR_ECCALG;
324 pcr &= ~FMC2_PCR_PWID_MASK;
325 if (chip->options & NAND_BUSWIDTH_16)
326 pcr |= FMC2_PCR_PWID(FMC2_PCR_PWID_BUSWIDTH_16);
328 /* Set ECC sector size */
329 pcr &= ~FMC2_PCR_ECCSS_MASK;
330 pcr |= FMC2_PCR_ECCSS(FMC2_PCR_ECCSS_512);
332 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
336 static int stm32_fmc2_select_chip(struct nand_chip *chip, int chipnr)
338 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
339 struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
340 struct dma_slave_config dma_cfg;
343 if (nand->cs_used[chipnr] == fmc2->cs_sel)
346 fmc2->cs_sel = nand->cs_used[chipnr];
348 /* FMC2 setup routine */
349 stm32_fmc2_setup(chip);
352 stm32_fmc2_timings_init(chip);
354 if (fmc2->dma_tx_ch && fmc2->dma_rx_ch) {
355 memset(&dma_cfg, 0, sizeof(dma_cfg));
356 dma_cfg.src_addr = fmc2->data_phys_addr[fmc2->cs_sel];
357 dma_cfg.dst_addr = fmc2->data_phys_addr[fmc2->cs_sel];
358 dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
359 dma_cfg.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
360 dma_cfg.src_maxburst = 32;
361 dma_cfg.dst_maxburst = 32;
363 ret = dmaengine_slave_config(fmc2->dma_tx_ch, &dma_cfg);
365 dev_err(fmc2->dev, "tx DMA engine slave config failed\n");
369 ret = dmaengine_slave_config(fmc2->dma_rx_ch, &dma_cfg);
371 dev_err(fmc2->dev, "rx DMA engine slave config failed\n");
376 if (fmc2->dma_ecc_ch) {
378 * Hamming: we read HECCR register
379 * BCH4/BCH8: we read BCHDSRSx registers
381 memset(&dma_cfg, 0, sizeof(dma_cfg));
382 dma_cfg.src_addr = fmc2->io_phys_addr;
383 dma_cfg.src_addr += chip->ecc.strength == FMC2_ECC_HAM ?
384 FMC2_HECCR : FMC2_BCHDSR0;
385 dma_cfg.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
387 ret = dmaengine_slave_config(fmc2->dma_ecc_ch, &dma_cfg);
389 dev_err(fmc2->dev, "ECC DMA engine slave config failed\n");
393 /* Calculate ECC length needed for one sector */
394 fmc2->dma_ecc_len = chip->ecc.strength == FMC2_ECC_HAM ?
395 FMC2_HECCR_LEN : FMC2_BCHDSRS_LEN;
401 /* Set bus width to 16-bit or 8-bit */
402 static void stm32_fmc2_set_buswidth_16(struct stm32_fmc2_nfc *fmc2, bool set)
404 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
406 pcr &= ~FMC2_PCR_PWID_MASK;
408 pcr |= FMC2_PCR_PWID(FMC2_PCR_PWID_BUSWIDTH_16);
409 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
412 /* Enable/disable ECC */
413 static void stm32_fmc2_set_ecc(struct stm32_fmc2_nfc *fmc2, bool enable)
415 u32 pcr = readl(fmc2->io_base + FMC2_PCR);
417 pcr &= ~FMC2_PCR_ECCEN;
419 pcr |= FMC2_PCR_ECCEN;
420 writel(pcr, fmc2->io_base + FMC2_PCR);
423 /* Enable irq sources in case of the sequencer is used */
424 static inline void stm32_fmc2_enable_seq_irq(struct stm32_fmc2_nfc *fmc2)
426 u32 csqier = readl_relaxed(fmc2->io_base + FMC2_CSQIER);
428 csqier |= FMC2_CSQIER_TCIE;
430 fmc2->irq_state = FMC2_IRQ_SEQ;
432 writel_relaxed(csqier, fmc2->io_base + FMC2_CSQIER);
435 /* Disable irq sources in case of the sequencer is used */
436 static inline void stm32_fmc2_disable_seq_irq(struct stm32_fmc2_nfc *fmc2)
438 u32 csqier = readl_relaxed(fmc2->io_base + FMC2_CSQIER);
440 csqier &= ~FMC2_CSQIER_TCIE;
442 writel_relaxed(csqier, fmc2->io_base + FMC2_CSQIER);
444 fmc2->irq_state = FMC2_IRQ_UNKNOWN;
447 /* Clear irq sources in case of the sequencer is used */
448 static inline void stm32_fmc2_clear_seq_irq(struct stm32_fmc2_nfc *fmc2)
450 writel_relaxed(FMC2_CSQICR_CLEAR_IRQ, fmc2->io_base + FMC2_CSQICR);
453 /* Enable irq sources in case of bch is used */
454 static inline void stm32_fmc2_enable_bch_irq(struct stm32_fmc2_nfc *fmc2,
457 u32 bchier = readl_relaxed(fmc2->io_base + FMC2_BCHIER);
459 if (mode == NAND_ECC_WRITE)
460 bchier |= FMC2_BCHIER_EPBRIE;
462 bchier |= FMC2_BCHIER_DERIE;
464 fmc2->irq_state = FMC2_IRQ_BCH;
466 writel_relaxed(bchier, fmc2->io_base + FMC2_BCHIER);
469 /* Disable irq sources in case of bch is used */
470 static inline void stm32_fmc2_disable_bch_irq(struct stm32_fmc2_nfc *fmc2)
472 u32 bchier = readl_relaxed(fmc2->io_base + FMC2_BCHIER);
474 bchier &= ~FMC2_BCHIER_DERIE;
475 bchier &= ~FMC2_BCHIER_EPBRIE;
477 writel_relaxed(bchier, fmc2->io_base + FMC2_BCHIER);
479 fmc2->irq_state = FMC2_IRQ_UNKNOWN;
482 /* Clear irq sources in case of bch is used */
483 static inline void stm32_fmc2_clear_bch_irq(struct stm32_fmc2_nfc *fmc2)
485 writel_relaxed(FMC2_BCHICR_CLEAR_IRQ, fmc2->io_base + FMC2_BCHICR);
489 * Enable ECC logic and reset syndrome/parity bits previously calculated
490 * Syndrome/parity bits is cleared by setting the ECCEN bit to 0
492 static void stm32_fmc2_hwctl(struct nand_chip *chip, int mode)
494 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
496 stm32_fmc2_set_ecc(fmc2, false);
498 if (chip->ecc.strength != FMC2_ECC_HAM) {
499 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
501 if (mode == NAND_ECC_WRITE)
504 pcr &= ~FMC2_PCR_WEN;
505 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
507 reinit_completion(&fmc2->complete);
508 stm32_fmc2_clear_bch_irq(fmc2);
509 stm32_fmc2_enable_bch_irq(fmc2, mode);
512 stm32_fmc2_set_ecc(fmc2, true);
516 * ECC Hamming calculation
517 * ECC is 3 bytes for 512 bytes of data (supports error correction up to
520 static inline void stm32_fmc2_ham_set_ecc(const u32 ecc_sta, u8 *ecc)
523 ecc[1] = ecc_sta >> 8;
524 ecc[2] = ecc_sta >> 16;
527 static int stm32_fmc2_ham_calculate(struct nand_chip *chip, const u8 *data,
530 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
534 ret = readl_relaxed_poll_timeout(fmc2->io_base + FMC2_SR,
535 sr, sr & FMC2_SR_NWRF, 10,
538 dev_err(fmc2->dev, "ham timeout\n");
542 heccr = readl_relaxed(fmc2->io_base + FMC2_HECCR);
544 stm32_fmc2_ham_set_ecc(heccr, ecc);
547 stm32_fmc2_set_ecc(fmc2, false);
552 static int stm32_fmc2_ham_correct(struct nand_chip *chip, u8 *dat,
553 u8 *read_ecc, u8 *calc_ecc)
555 u8 bit_position = 0, b0, b1, b2;
556 u32 byte_addr = 0, b;
559 /* Indicate which bit and byte is faulty (if any) */
560 b0 = read_ecc[0] ^ calc_ecc[0];
561 b1 = read_ecc[1] ^ calc_ecc[1];
562 b2 = read_ecc[2] ^ calc_ecc[2];
563 b = b0 | (b1 << 8) | (b2 << 16);
569 /* Calculate bit position */
570 for (i = 0; i < 3; i++) {
573 bit_position += shifting;
583 /* Calculate byte position */
585 for (i = 0; i < 9; i++) {
588 byte_addr += shifting;
599 dat[byte_addr] ^= (1 << bit_position);
605 * ECC BCH calculation and correction
606 * ECC is 7/13 bytes for 512 bytes of data (supports error correction up to
607 * max of 4-bit/8-bit)
609 static int stm32_fmc2_bch_calculate(struct nand_chip *chip, const u8 *data,
612 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
615 /* Wait until the BCH code is ready */
616 if (!wait_for_completion_timeout(&fmc2->complete,
617 msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
618 dev_err(fmc2->dev, "bch timeout\n");
619 stm32_fmc2_disable_bch_irq(fmc2);
623 /* Read parity bits */
624 bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR1);
626 ecc[1] = bchpbr >> 8;
627 ecc[2] = bchpbr >> 16;
628 ecc[3] = bchpbr >> 24;
630 bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR2);
632 ecc[5] = bchpbr >> 8;
633 ecc[6] = bchpbr >> 16;
635 if (chip->ecc.strength == FMC2_ECC_BCH8) {
636 ecc[7] = bchpbr >> 24;
638 bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR3);
640 ecc[9] = bchpbr >> 8;
641 ecc[10] = bchpbr >> 16;
642 ecc[11] = bchpbr >> 24;
644 bchpbr = readl_relaxed(fmc2->io_base + FMC2_BCHPBR4);
649 stm32_fmc2_set_ecc(fmc2, false);
654 /* BCH algorithm correction */
655 static int stm32_fmc2_bch_decode(int eccsize, u8 *dat, u32 *ecc_sta)
657 u32 bchdsr0 = ecc_sta[0];
658 u32 bchdsr1 = ecc_sta[1];
659 u32 bchdsr2 = ecc_sta[2];
660 u32 bchdsr3 = ecc_sta[3];
661 u32 bchdsr4 = ecc_sta[4];
664 unsigned int nb_errs = 0;
666 /* No errors found */
667 if (likely(!(bchdsr0 & FMC2_BCHDSR0_DEF)))
670 /* Too many errors detected */
671 if (unlikely(bchdsr0 & FMC2_BCHDSR0_DUE))
674 pos[0] = bchdsr1 & FMC2_BCHDSR1_EBP1_MASK;
675 pos[1] = (bchdsr1 & FMC2_BCHDSR1_EBP2_MASK) >> FMC2_BCHDSR1_EBP2_SHIFT;
676 pos[2] = bchdsr2 & FMC2_BCHDSR2_EBP3_MASK;
677 pos[3] = (bchdsr2 & FMC2_BCHDSR2_EBP4_MASK) >> FMC2_BCHDSR2_EBP4_SHIFT;
678 pos[4] = bchdsr3 & FMC2_BCHDSR3_EBP5_MASK;
679 pos[5] = (bchdsr3 & FMC2_BCHDSR3_EBP6_MASK) >> FMC2_BCHDSR3_EBP6_SHIFT;
680 pos[6] = bchdsr4 & FMC2_BCHDSR4_EBP7_MASK;
681 pos[7] = (bchdsr4 & FMC2_BCHDSR4_EBP8_MASK) >> FMC2_BCHDSR4_EBP8_SHIFT;
683 den = (bchdsr0 & FMC2_BCHDSR0_DEN_MASK) >> FMC2_BCHDSR0_DEN_SHIFT;
684 for (i = 0; i < den; i++) {
685 if (pos[i] < eccsize * 8) {
686 change_bit(pos[i], (unsigned long *)dat);
694 static int stm32_fmc2_bch_correct(struct nand_chip *chip, u8 *dat,
695 u8 *read_ecc, u8 *calc_ecc)
697 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
700 /* Wait until the decoding error is ready */
701 if (!wait_for_completion_timeout(&fmc2->complete,
702 msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
703 dev_err(fmc2->dev, "bch timeout\n");
704 stm32_fmc2_disable_bch_irq(fmc2);
708 ecc_sta[0] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR0);
709 ecc_sta[1] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR1);
710 ecc_sta[2] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR2);
711 ecc_sta[3] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR3);
712 ecc_sta[4] = readl_relaxed(fmc2->io_base + FMC2_BCHDSR4);
715 stm32_fmc2_set_ecc(fmc2, false);
717 return stm32_fmc2_bch_decode(chip->ecc.size, dat, ecc_sta);
720 static int stm32_fmc2_read_page(struct nand_chip *chip, u8 *buf,
721 int oob_required, int page)
723 struct mtd_info *mtd = nand_to_mtd(chip);
724 int ret, i, s, stat, eccsize = chip->ecc.size;
725 int eccbytes = chip->ecc.bytes;
726 int eccsteps = chip->ecc.steps;
727 int eccstrength = chip->ecc.strength;
729 u8 *ecc_calc = chip->ecc.calc_buf;
730 u8 *ecc_code = chip->ecc.code_buf;
731 unsigned int max_bitflips = 0;
733 ret = nand_read_page_op(chip, page, 0, NULL, 0);
737 for (i = mtd->writesize + FMC2_BBM_LEN, s = 0; s < eccsteps;
738 s++, i += eccbytes, p += eccsize) {
739 chip->ecc.hwctl(chip, NAND_ECC_READ);
741 /* Read the nand page sector (512 bytes) */
742 ret = nand_change_read_column_op(chip, s * eccsize, p,
747 /* Read the corresponding ECC bytes */
748 ret = nand_change_read_column_op(chip, i, ecc_code,
753 /* Correct the data */
754 stat = chip->ecc.correct(chip, p, ecc_code, ecc_calc);
755 if (stat == -EBADMSG)
756 /* Check for empty pages with bitflips */
757 stat = nand_check_erased_ecc_chunk(p, eccsize,
763 mtd->ecc_stats.failed++;
765 mtd->ecc_stats.corrected += stat;
766 max_bitflips = max_t(unsigned int, max_bitflips, stat);
772 ret = nand_change_read_column_op(chip, mtd->writesize,
773 chip->oob_poi, mtd->oobsize,
782 /* Sequencer read/write configuration */
783 static void stm32_fmc2_rw_page_init(struct nand_chip *chip, int page,
784 int raw, bool write_data)
786 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
787 struct mtd_info *mtd = nand_to_mtd(chip);
788 u32 csqcfgr1, csqcfgr2, csqcfgr3;
790 u32 ecc_offset = mtd->writesize + FMC2_BBM_LEN;
791 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
796 pcr &= ~FMC2_PCR_WEN;
797 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
800 * - Set Program Page/Page Read command
801 * - Enable DMA request data
804 csqcfgr1 = FMC2_CSQCFGR1_DMADEN | FMC2_CSQCFGR1_CMD1T;
806 csqcfgr1 |= FMC2_CSQCFGR1_CMD1(NAND_CMD_SEQIN);
808 csqcfgr1 |= FMC2_CSQCFGR1_CMD1(NAND_CMD_READ0) |
809 FMC2_CSQCFGR1_CMD2EN |
810 FMC2_CSQCFGR1_CMD2(NAND_CMD_READSTART) |
814 * - Set Random Data Input/Random Data Read command
815 * - Enable the sequencer to access the Spare data area
816 * - Enable DMA request status decoding for read
820 csqcfgr2 = FMC2_CSQCFGR2_RCMD1(NAND_CMD_RNDIN);
822 csqcfgr2 = FMC2_CSQCFGR2_RCMD1(NAND_CMD_RNDOUT) |
823 FMC2_CSQCFGR2_RCMD2EN |
824 FMC2_CSQCFGR2_RCMD2(NAND_CMD_RNDOUTSTART) |
825 FMC2_CSQCFGR2_RCMD1T |
826 FMC2_CSQCFGR2_RCMD2T;
828 csqcfgr2 |= write_data ? 0 : FMC2_CSQCFGR2_DMASEN;
829 csqcfgr2 |= FMC2_CSQCFGR2_SQSDTEN;
833 * - Set the number of sectors to be written
836 csqcfgr3 = FMC2_CSQCFGR3_SNBR(chip->ecc.steps - 1);
838 csqcfgr3 |= FMC2_CSQCFGR3_RAC2T;
839 if (chip->options & NAND_ROW_ADDR_3)
840 csqcfgr3 |= FMC2_CSQCFGR3_AC5T;
842 csqcfgr3 |= FMC2_CSQCFGR3_AC4T;
846 * Set the fourth first address cycles
847 * Byte 1 and byte 2 => column, we start at 0x0
848 * Byte 3 and byte 4 => page
850 csqar1 = FMC2_CSQCAR1_ADDC3(page);
851 csqar1 |= FMC2_CSQCAR1_ADDC4(page >> 8);
854 * - Set chip enable number
855 * - Set ECC byte offset in the spare area
856 * - Calculate the number of address cycles to be issued
857 * - Set byte 5 of address cycle if needed
859 csqar2 = FMC2_CSQCAR2_NANDCEN(fmc2->cs_sel);
860 if (chip->options & NAND_BUSWIDTH_16)
861 csqar2 |= FMC2_CSQCAR2_SAO(ecc_offset >> 1);
863 csqar2 |= FMC2_CSQCAR2_SAO(ecc_offset);
864 if (chip->options & NAND_ROW_ADDR_3) {
865 csqcfgr1 |= FMC2_CSQCFGR1_ACYNBR(5);
866 csqar2 |= FMC2_CSQCAR2_ADDC5(page >> 16);
868 csqcfgr1 |= FMC2_CSQCFGR1_ACYNBR(4);
871 writel_relaxed(csqcfgr1, fmc2->io_base + FMC2_CSQCFGR1);
872 writel_relaxed(csqcfgr2, fmc2->io_base + FMC2_CSQCFGR2);
873 writel_relaxed(csqcfgr3, fmc2->io_base + FMC2_CSQCFGR3);
874 writel_relaxed(csqar1, fmc2->io_base + FMC2_CSQAR1);
875 writel_relaxed(csqar2, fmc2->io_base + FMC2_CSQAR2);
878 static void stm32_fmc2_dma_callback(void *arg)
880 complete((struct completion *)arg);
883 /* Read/write data from/to a page */
884 static int stm32_fmc2_xfer(struct nand_chip *chip, const u8 *buf,
885 int raw, bool write_data)
887 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
888 struct dma_async_tx_descriptor *desc_data, *desc_ecc;
889 struct scatterlist *sg;
890 struct dma_chan *dma_ch = fmc2->dma_rx_ch;
891 enum dma_data_direction dma_data_dir = DMA_FROM_DEVICE;
892 enum dma_transfer_direction dma_transfer_dir = DMA_DEV_TO_MEM;
893 u32 csqcr = readl_relaxed(fmc2->io_base + FMC2_CSQCR);
894 int eccsteps = chip->ecc.steps;
895 int eccsize = chip->ecc.size;
899 /* Configure DMA data */
901 dma_data_dir = DMA_TO_DEVICE;
902 dma_transfer_dir = DMA_MEM_TO_DEV;
903 dma_ch = fmc2->dma_tx_ch;
906 for_each_sg(fmc2->dma_data_sg.sgl, sg, eccsteps, s) {
907 sg_set_buf(sg, p, eccsize);
911 ret = dma_map_sg(fmc2->dev, fmc2->dma_data_sg.sgl,
912 eccsteps, dma_data_dir);
916 desc_data = dmaengine_prep_slave_sg(dma_ch, fmc2->dma_data_sg.sgl,
917 eccsteps, dma_transfer_dir,
924 reinit_completion(&fmc2->dma_data_complete);
925 reinit_completion(&fmc2->complete);
926 desc_data->callback = stm32_fmc2_dma_callback;
927 desc_data->callback_param = &fmc2->dma_data_complete;
928 ret = dma_submit_error(dmaengine_submit(desc_data));
932 dma_async_issue_pending(dma_ch);
934 if (!write_data && !raw) {
935 /* Configure DMA ECC status */
937 for_each_sg(fmc2->dma_ecc_sg.sgl, sg, eccsteps, s) {
938 sg_set_buf(sg, p, fmc2->dma_ecc_len);
939 p += fmc2->dma_ecc_len;
942 ret = dma_map_sg(fmc2->dev, fmc2->dma_ecc_sg.sgl,
943 eccsteps, dma_data_dir);
947 desc_ecc = dmaengine_prep_slave_sg(fmc2->dma_ecc_ch,
948 fmc2->dma_ecc_sg.sgl,
949 eccsteps, dma_transfer_dir,
956 reinit_completion(&fmc2->dma_ecc_complete);
957 desc_ecc->callback = stm32_fmc2_dma_callback;
958 desc_ecc->callback_param = &fmc2->dma_ecc_complete;
959 ret = dma_submit_error(dmaengine_submit(desc_ecc));
963 dma_async_issue_pending(fmc2->dma_ecc_ch);
966 stm32_fmc2_clear_seq_irq(fmc2);
967 stm32_fmc2_enable_seq_irq(fmc2);
969 /* Start the transfer */
970 csqcr |= FMC2_CSQCR_CSQSTART;
971 writel_relaxed(csqcr, fmc2->io_base + FMC2_CSQCR);
973 /* Wait end of sequencer transfer */
974 if (!wait_for_completion_timeout(&fmc2->complete,
975 msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
976 dev_err(fmc2->dev, "seq timeout\n");
977 stm32_fmc2_disable_seq_irq(fmc2);
978 dmaengine_terminate_all(dma_ch);
979 if (!write_data && !raw)
980 dmaengine_terminate_all(fmc2->dma_ecc_ch);
985 /* Wait DMA data transfer completion */
986 if (!wait_for_completion_timeout(&fmc2->dma_data_complete,
987 msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
988 dev_err(fmc2->dev, "data DMA timeout\n");
989 dmaengine_terminate_all(dma_ch);
993 /* Wait DMA ECC transfer completion */
994 if (!write_data && !raw) {
995 if (!wait_for_completion_timeout(&fmc2->dma_ecc_complete,
996 msecs_to_jiffies(FMC2_TIMEOUT_MS))) {
997 dev_err(fmc2->dev, "ECC DMA timeout\n");
998 dmaengine_terminate_all(fmc2->dma_ecc_ch);
1004 if (!write_data && !raw)
1005 dma_unmap_sg(fmc2->dev, fmc2->dma_ecc_sg.sgl,
1006 eccsteps, dma_data_dir);
1009 dma_unmap_sg(fmc2->dev, fmc2->dma_data_sg.sgl, eccsteps, dma_data_dir);
1014 static int stm32_fmc2_sequencer_write(struct nand_chip *chip,
1015 const u8 *buf, int oob_required,
1018 struct mtd_info *mtd = nand_to_mtd(chip);
1021 /* Configure the sequencer */
1022 stm32_fmc2_rw_page_init(chip, page, raw, true);
1024 /* Write the page */
1025 ret = stm32_fmc2_xfer(chip, buf, raw, true);
1031 ret = nand_change_write_column_op(chip, mtd->writesize,
1032 chip->oob_poi, mtd->oobsize,
1038 return nand_prog_page_end_op(chip);
1041 static int stm32_fmc2_sequencer_write_page(struct nand_chip *chip,
1048 /* Select the target */
1049 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1053 return stm32_fmc2_sequencer_write(chip, buf, oob_required, page, false);
1056 static int stm32_fmc2_sequencer_write_page_raw(struct nand_chip *chip,
1063 /* Select the target */
1064 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1068 return stm32_fmc2_sequencer_write(chip, buf, oob_required, page, true);
1071 /* Get a status indicating which sectors have errors */
1072 static inline u16 stm32_fmc2_get_mapping_status(struct stm32_fmc2_nfc *fmc2)
1074 u32 csqemsr = readl_relaxed(fmc2->io_base + FMC2_CSQEMSR);
1076 return csqemsr & FMC2_CSQEMSR_SEM;
1079 static int stm32_fmc2_sequencer_correct(struct nand_chip *chip, u8 *dat,
1080 u8 *read_ecc, u8 *calc_ecc)
1082 struct mtd_info *mtd = nand_to_mtd(chip);
1083 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1084 int eccbytes = chip->ecc.bytes;
1085 int eccsteps = chip->ecc.steps;
1086 int eccstrength = chip->ecc.strength;
1087 int i, s, eccsize = chip->ecc.size;
1088 u32 *ecc_sta = (u32 *)fmc2->ecc_buf;
1089 u16 sta_map = stm32_fmc2_get_mapping_status(fmc2);
1090 unsigned int max_bitflips = 0;
1092 for (i = 0, s = 0; s < eccsteps; s++, i += eccbytes, dat += eccsize) {
1095 if (eccstrength == FMC2_ECC_HAM) {
1096 /* Ecc_sta = FMC2_HECCR */
1097 if (sta_map & BIT(s)) {
1098 stm32_fmc2_ham_set_ecc(*ecc_sta, &calc_ecc[i]);
1099 stat = stm32_fmc2_ham_correct(chip, dat,
1106 * Ecc_sta[0] = FMC2_BCHDSR0
1107 * Ecc_sta[1] = FMC2_BCHDSR1
1108 * Ecc_sta[2] = FMC2_BCHDSR2
1109 * Ecc_sta[3] = FMC2_BCHDSR3
1110 * Ecc_sta[4] = FMC2_BCHDSR4
1112 if (sta_map & BIT(s))
1113 stat = stm32_fmc2_bch_decode(eccsize, dat,
1118 if (stat == -EBADMSG)
1119 /* Check for empty pages with bitflips */
1120 stat = nand_check_erased_ecc_chunk(dat, eccsize,
1127 mtd->ecc_stats.failed++;
1129 mtd->ecc_stats.corrected += stat;
1130 max_bitflips = max_t(unsigned int, max_bitflips, stat);
1134 return max_bitflips;
1137 static int stm32_fmc2_sequencer_read_page(struct nand_chip *chip, u8 *buf,
1138 int oob_required, int page)
1140 struct mtd_info *mtd = nand_to_mtd(chip);
1141 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1142 u8 *ecc_calc = chip->ecc.calc_buf;
1143 u8 *ecc_code = chip->ecc.code_buf;
1147 /* Select the target */
1148 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1152 /* Configure the sequencer */
1153 stm32_fmc2_rw_page_init(chip, page, 0, false);
1156 ret = stm32_fmc2_xfer(chip, buf, 0, false);
1160 sta_map = stm32_fmc2_get_mapping_status(fmc2);
1162 /* Check if errors happen */
1163 if (likely(!sta_map)) {
1165 return nand_change_read_column_op(chip, mtd->writesize,
1167 mtd->oobsize, false);
1173 ret = nand_change_read_column_op(chip, mtd->writesize,
1174 chip->oob_poi, mtd->oobsize, false);
1178 ret = mtd_ooblayout_get_eccbytes(mtd, ecc_code, chip->oob_poi, 0,
1184 return chip->ecc.correct(chip, buf, ecc_code, ecc_calc);
1187 static int stm32_fmc2_sequencer_read_page_raw(struct nand_chip *chip, u8 *buf,
1188 int oob_required, int page)
1190 struct mtd_info *mtd = nand_to_mtd(chip);
1193 /* Select the target */
1194 ret = stm32_fmc2_select_chip(chip, chip->cur_cs);
1198 /* Configure the sequencer */
1199 stm32_fmc2_rw_page_init(chip, page, 1, false);
1202 ret = stm32_fmc2_xfer(chip, buf, 1, false);
1208 return nand_change_read_column_op(chip, mtd->writesize,
1209 chip->oob_poi, mtd->oobsize,
1215 static irqreturn_t stm32_fmc2_irq(int irq, void *dev_id)
1217 struct stm32_fmc2_nfc *fmc2 = (struct stm32_fmc2_nfc *)dev_id;
1219 if (fmc2->irq_state == FMC2_IRQ_SEQ)
1220 /* Sequencer is used */
1221 stm32_fmc2_disable_seq_irq(fmc2);
1222 else if (fmc2->irq_state == FMC2_IRQ_BCH)
1224 stm32_fmc2_disable_bch_irq(fmc2);
1226 complete(&fmc2->complete);
1231 static void stm32_fmc2_read_data(struct nand_chip *chip, void *buf,
1232 unsigned int len, bool force_8bit)
1234 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1235 void __iomem *io_addr_r = fmc2->data_base[fmc2->cs_sel];
1237 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1238 /* Reconfigure bus width to 8-bit */
1239 stm32_fmc2_set_buswidth_16(fmc2, false);
1241 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32))) {
1242 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u16)) && len) {
1243 *(u8 *)buf = readb_relaxed(io_addr_r);
1248 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32)) &&
1249 len >= sizeof(u16)) {
1250 *(u16 *)buf = readw_relaxed(io_addr_r);
1256 /* Buf is aligned */
1257 while (len >= sizeof(u32)) {
1258 *(u32 *)buf = readl_relaxed(io_addr_r);
1263 /* Read remaining bytes */
1264 if (len >= sizeof(u16)) {
1265 *(u16 *)buf = readw_relaxed(io_addr_r);
1271 *(u8 *)buf = readb_relaxed(io_addr_r);
1273 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1274 /* Reconfigure bus width to 16-bit */
1275 stm32_fmc2_set_buswidth_16(fmc2, true);
1278 static void stm32_fmc2_write_data(struct nand_chip *chip, const void *buf,
1279 unsigned int len, bool force_8bit)
1281 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1282 void __iomem *io_addr_w = fmc2->data_base[fmc2->cs_sel];
1284 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1285 /* Reconfigure bus width to 8-bit */
1286 stm32_fmc2_set_buswidth_16(fmc2, false);
1288 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32))) {
1289 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u16)) && len) {
1290 writeb_relaxed(*(u8 *)buf, io_addr_w);
1295 if (!IS_ALIGNED((uintptr_t)buf, sizeof(u32)) &&
1296 len >= sizeof(u16)) {
1297 writew_relaxed(*(u16 *)buf, io_addr_w);
1303 /* Buf is aligned */
1304 while (len >= sizeof(u32)) {
1305 writel_relaxed(*(u32 *)buf, io_addr_w);
1310 /* Write remaining bytes */
1311 if (len >= sizeof(u16)) {
1312 writew_relaxed(*(u16 *)buf, io_addr_w);
1318 writeb_relaxed(*(u8 *)buf, io_addr_w);
1320 if (force_8bit && chip->options & NAND_BUSWIDTH_16)
1321 /* Reconfigure bus width to 16-bit */
1322 stm32_fmc2_set_buswidth_16(fmc2, true);
1325 static int stm32_fmc2_exec_op(struct nand_chip *chip,
1326 const struct nand_operation *op,
1329 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1330 const struct nand_op_instr *instr = NULL;
1331 unsigned int op_id, i;
1334 ret = stm32_fmc2_select_chip(chip, op->cs);
1341 for (op_id = 0; op_id < op->ninstrs; op_id++) {
1342 instr = &op->instrs[op_id];
1344 switch (instr->type) {
1345 case NAND_OP_CMD_INSTR:
1346 writeb_relaxed(instr->ctx.cmd.opcode,
1347 fmc2->cmd_base[fmc2->cs_sel]);
1350 case NAND_OP_ADDR_INSTR:
1351 for (i = 0; i < instr->ctx.addr.naddrs; i++)
1352 writeb_relaxed(instr->ctx.addr.addrs[i],
1353 fmc2->addr_base[fmc2->cs_sel]);
1356 case NAND_OP_DATA_IN_INSTR:
1357 stm32_fmc2_read_data(chip, instr->ctx.data.buf.in,
1358 instr->ctx.data.len,
1359 instr->ctx.data.force_8bit);
1362 case NAND_OP_DATA_OUT_INSTR:
1363 stm32_fmc2_write_data(chip, instr->ctx.data.buf.out,
1364 instr->ctx.data.len,
1365 instr->ctx.data.force_8bit);
1368 case NAND_OP_WAITRDY_INSTR:
1369 ret = nand_soft_waitrdy(chip,
1370 instr->ctx.waitrdy.timeout_ms);
1378 /* Controller initialization */
1379 static void stm32_fmc2_init(struct stm32_fmc2_nfc *fmc2)
1381 u32 pcr = readl_relaxed(fmc2->io_base + FMC2_PCR);
1382 u32 bcr1 = readl_relaxed(fmc2->io_base + FMC2_BCR1);
1384 /* Set CS used to undefined */
1387 /* Enable wait feature and nand flash memory bank */
1388 pcr |= FMC2_PCR_PWAITEN;
1389 pcr |= FMC2_PCR_PBKEN;
1391 /* Set buswidth to 8 bits mode for identification */
1392 pcr &= ~FMC2_PCR_PWID_MASK;
1394 /* ECC logic is disabled */
1395 pcr &= ~FMC2_PCR_ECCEN;
1398 pcr &= ~FMC2_PCR_ECCALG;
1399 pcr &= ~FMC2_PCR_BCHECC;
1400 pcr &= ~FMC2_PCR_WEN;
1402 /* Set default ECC sector size */
1403 pcr &= ~FMC2_PCR_ECCSS_MASK;
1404 pcr |= FMC2_PCR_ECCSS(FMC2_PCR_ECCSS_2048);
1406 /* Set default tclr/tar timings */
1407 pcr &= ~FMC2_PCR_TCLR_MASK;
1408 pcr |= FMC2_PCR_TCLR(FMC2_PCR_TCLR_DEFAULT);
1409 pcr &= ~FMC2_PCR_TAR_MASK;
1410 pcr |= FMC2_PCR_TAR(FMC2_PCR_TAR_DEFAULT);
1412 /* Enable FMC2 controller */
1413 bcr1 |= FMC2_BCR1_FMC2EN;
1415 writel_relaxed(bcr1, fmc2->io_base + FMC2_BCR1);
1416 writel_relaxed(pcr, fmc2->io_base + FMC2_PCR);
1417 writel_relaxed(FMC2_PMEM_DEFAULT, fmc2->io_base + FMC2_PMEM);
1418 writel_relaxed(FMC2_PATT_DEFAULT, fmc2->io_base + FMC2_PATT);
1421 /* Controller timings */
1422 static void stm32_fmc2_calc_timings(struct nand_chip *chip,
1423 const struct nand_sdr_timings *sdrt)
1425 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1426 struct stm32_fmc2_nand *nand = to_fmc2_nand(chip);
1427 struct stm32_fmc2_timings *tims = &nand->timings;
1428 unsigned long hclk = clk_get_rate(fmc2->clk);
1429 unsigned long hclkp = NSEC_PER_SEC / (hclk / 1000);
1430 unsigned long timing, tar, tclr, thiz, twait;
1431 unsigned long tset_mem, tset_att, thold_mem, thold_att;
1433 tar = max_t(unsigned long, hclkp, sdrt->tAR_min);
1434 timing = DIV_ROUND_UP(tar, hclkp) - 1;
1435 tims->tar = min_t(unsigned long, timing, FMC2_PCR_TIMING_MASK);
1437 tclr = max_t(unsigned long, hclkp, sdrt->tCLR_min);
1438 timing = DIV_ROUND_UP(tclr, hclkp) - 1;
1439 tims->tclr = min_t(unsigned long, timing, FMC2_PCR_TIMING_MASK);
1441 tims->thiz = FMC2_THIZ;
1442 thiz = (tims->thiz + 1) * hclkp;
1447 * tWAIT > tREA + tIO
1449 twait = max_t(unsigned long, hclkp, sdrt->tRP_min);
1450 twait = max_t(unsigned long, twait, sdrt->tWP_min);
1451 twait = max_t(unsigned long, twait, sdrt->tREA_max + FMC2_TIO);
1452 timing = DIV_ROUND_UP(twait, hclkp);
1453 tims->twait = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1456 * tSETUP_MEM > tCS - tWAIT
1457 * tSETUP_MEM > tALS - tWAIT
1458 * tSETUP_MEM > tDS - (tWAIT - tHIZ)
1461 if (sdrt->tCS_min > twait && (tset_mem < sdrt->tCS_min - twait))
1462 tset_mem = sdrt->tCS_min - twait;
1463 if (sdrt->tALS_min > twait && (tset_mem < sdrt->tALS_min - twait))
1464 tset_mem = sdrt->tALS_min - twait;
1465 if (twait > thiz && (sdrt->tDS_min > twait - thiz) &&
1466 (tset_mem < sdrt->tDS_min - (twait - thiz)))
1467 tset_mem = sdrt->tDS_min - (twait - thiz);
1468 timing = DIV_ROUND_UP(tset_mem, hclkp);
1469 tims->tset_mem = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1473 * tHOLD_MEM > tREH - tSETUP_MEM
1474 * tHOLD_MEM > max(tRC, tWC) - (tSETUP_MEM + tWAIT)
1476 thold_mem = max_t(unsigned long, hclkp, sdrt->tCH_min);
1477 if (sdrt->tREH_min > tset_mem &&
1478 (thold_mem < sdrt->tREH_min - tset_mem))
1479 thold_mem = sdrt->tREH_min - tset_mem;
1480 if ((sdrt->tRC_min > tset_mem + twait) &&
1481 (thold_mem < sdrt->tRC_min - (tset_mem + twait)))
1482 thold_mem = sdrt->tRC_min - (tset_mem + twait);
1483 if ((sdrt->tWC_min > tset_mem + twait) &&
1484 (thold_mem < sdrt->tWC_min - (tset_mem + twait)))
1485 thold_mem = sdrt->tWC_min - (tset_mem + twait);
1486 timing = DIV_ROUND_UP(thold_mem, hclkp);
1487 tims->thold_mem = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1490 * tSETUP_ATT > tCS - tWAIT
1491 * tSETUP_ATT > tCLS - tWAIT
1492 * tSETUP_ATT > tALS - tWAIT
1493 * tSETUP_ATT > tRHW - tHOLD_MEM
1494 * tSETUP_ATT > tDS - (tWAIT - tHIZ)
1497 if (sdrt->tCS_min > twait && (tset_att < sdrt->tCS_min - twait))
1498 tset_att = sdrt->tCS_min - twait;
1499 if (sdrt->tCLS_min > twait && (tset_att < sdrt->tCLS_min - twait))
1500 tset_att = sdrt->tCLS_min - twait;
1501 if (sdrt->tALS_min > twait && (tset_att < sdrt->tALS_min - twait))
1502 tset_att = sdrt->tALS_min - twait;
1503 if (sdrt->tRHW_min > thold_mem &&
1504 (tset_att < sdrt->tRHW_min - thold_mem))
1505 tset_att = sdrt->tRHW_min - thold_mem;
1506 if (twait > thiz && (sdrt->tDS_min > twait - thiz) &&
1507 (tset_att < sdrt->tDS_min - (twait - thiz)))
1508 tset_att = sdrt->tDS_min - (twait - thiz);
1509 timing = DIV_ROUND_UP(tset_att, hclkp);
1510 tims->tset_att = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1518 * tHOLD_ATT > tWB + tIO + tSYNC - tSETUP_MEM
1519 * tHOLD_ATT > tADL - tSETUP_MEM
1520 * tHOLD_ATT > tWH - tSETUP_MEM
1521 * tHOLD_ATT > tWHR - tSETUP_MEM
1522 * tHOLD_ATT > tRC - (tSETUP_ATT + tWAIT)
1523 * tHOLD_ATT > tWC - (tSETUP_ATT + tWAIT)
1525 thold_att = max_t(unsigned long, hclkp, sdrt->tALH_min);
1526 thold_att = max_t(unsigned long, thold_att, sdrt->tCH_min);
1527 thold_att = max_t(unsigned long, thold_att, sdrt->tCLH_min);
1528 thold_att = max_t(unsigned long, thold_att, sdrt->tCOH_min);
1529 thold_att = max_t(unsigned long, thold_att, sdrt->tDH_min);
1530 if ((sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC > tset_mem) &&
1531 (thold_att < sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC - tset_mem))
1532 thold_att = sdrt->tWB_max + FMC2_TIO + FMC2_TSYNC - tset_mem;
1533 if (sdrt->tADL_min > tset_mem &&
1534 (thold_att < sdrt->tADL_min - tset_mem))
1535 thold_att = sdrt->tADL_min - tset_mem;
1536 if (sdrt->tWH_min > tset_mem &&
1537 (thold_att < sdrt->tWH_min - tset_mem))
1538 thold_att = sdrt->tWH_min - tset_mem;
1539 if (sdrt->tWHR_min > tset_mem &&
1540 (thold_att < sdrt->tWHR_min - tset_mem))
1541 thold_att = sdrt->tWHR_min - tset_mem;
1542 if ((sdrt->tRC_min > tset_att + twait) &&
1543 (thold_att < sdrt->tRC_min - (tset_att + twait)))
1544 thold_att = sdrt->tRC_min - (tset_att + twait);
1545 if ((sdrt->tWC_min > tset_att + twait) &&
1546 (thold_att < sdrt->tWC_min - (tset_att + twait)))
1547 thold_att = sdrt->tWC_min - (tset_att + twait);
1548 timing = DIV_ROUND_UP(thold_att, hclkp);
1549 tims->thold_att = clamp_val(timing, 1, FMC2_PMEM_PATT_TIMING_MASK);
1552 static int stm32_fmc2_setup_interface(struct nand_chip *chip, int chipnr,
1553 const struct nand_data_interface *conf)
1555 const struct nand_sdr_timings *sdrt;
1557 sdrt = nand_get_sdr_timings(conf);
1559 return PTR_ERR(sdrt);
1561 if (chipnr == NAND_DATA_IFACE_CHECK_ONLY)
1564 stm32_fmc2_calc_timings(chip, sdrt);
1567 stm32_fmc2_timings_init(chip);
1572 /* DMA configuration */
1573 static int stm32_fmc2_dma_setup(struct stm32_fmc2_nfc *fmc2)
1577 fmc2->dma_tx_ch = dma_request_slave_channel(fmc2->dev, "tx");
1578 fmc2->dma_rx_ch = dma_request_slave_channel(fmc2->dev, "rx");
1579 fmc2->dma_ecc_ch = dma_request_slave_channel(fmc2->dev, "ecc");
1581 if (!fmc2->dma_tx_ch || !fmc2->dma_rx_ch || !fmc2->dma_ecc_ch) {
1582 dev_warn(fmc2->dev, "DMAs not defined in the device tree, polling mode is used\n");
1586 ret = sg_alloc_table(&fmc2->dma_ecc_sg, FMC2_MAX_SG, GFP_KERNEL);
1590 /* Allocate a buffer to store ECC status registers */
1591 fmc2->ecc_buf = devm_kzalloc(fmc2->dev, FMC2_MAX_ECC_BUF_LEN,
1596 ret = sg_alloc_table(&fmc2->dma_data_sg, FMC2_MAX_SG, GFP_KERNEL);
1600 init_completion(&fmc2->dma_data_complete);
1601 init_completion(&fmc2->dma_ecc_complete);
1606 /* NAND callbacks setup */
1607 static void stm32_fmc2_nand_callbacks_setup(struct nand_chip *chip)
1609 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1612 * Specific callbacks to read/write a page depending on
1613 * the mode (polling/sequencer) and the algo used (Hamming, BCH).
1615 if (fmc2->dma_tx_ch && fmc2->dma_rx_ch && fmc2->dma_ecc_ch) {
1616 /* DMA => use sequencer mode callbacks */
1617 chip->ecc.correct = stm32_fmc2_sequencer_correct;
1618 chip->ecc.write_page = stm32_fmc2_sequencer_write_page;
1619 chip->ecc.read_page = stm32_fmc2_sequencer_read_page;
1620 chip->ecc.write_page_raw = stm32_fmc2_sequencer_write_page_raw;
1621 chip->ecc.read_page_raw = stm32_fmc2_sequencer_read_page_raw;
1623 /* No DMA => use polling mode callbacks */
1624 chip->ecc.hwctl = stm32_fmc2_hwctl;
1625 if (chip->ecc.strength == FMC2_ECC_HAM) {
1626 /* Hamming is used */
1627 chip->ecc.calculate = stm32_fmc2_ham_calculate;
1628 chip->ecc.correct = stm32_fmc2_ham_correct;
1629 chip->ecc.options |= NAND_ECC_GENERIC_ERASED_CHECK;
1632 chip->ecc.calculate = stm32_fmc2_bch_calculate;
1633 chip->ecc.correct = stm32_fmc2_bch_correct;
1634 chip->ecc.read_page = stm32_fmc2_read_page;
1638 /* Specific configurations depending on the algo used */
1639 if (chip->ecc.strength == FMC2_ECC_HAM)
1640 chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 4 : 3;
1641 else if (chip->ecc.strength == FMC2_ECC_BCH8)
1642 chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 14 : 13;
1644 chip->ecc.bytes = chip->options & NAND_BUSWIDTH_16 ? 8 : 7;
1648 static int stm32_fmc2_nand_ooblayout_ecc(struct mtd_info *mtd, int section,
1649 struct mtd_oob_region *oobregion)
1651 struct nand_chip *chip = mtd_to_nand(mtd);
1652 struct nand_ecc_ctrl *ecc = &chip->ecc;
1657 oobregion->length = ecc->total;
1658 oobregion->offset = FMC2_BBM_LEN;
1663 static int stm32_fmc2_nand_ooblayout_free(struct mtd_info *mtd, int section,
1664 struct mtd_oob_region *oobregion)
1666 struct nand_chip *chip = mtd_to_nand(mtd);
1667 struct nand_ecc_ctrl *ecc = &chip->ecc;
1672 oobregion->length = mtd->oobsize - ecc->total - FMC2_BBM_LEN;
1673 oobregion->offset = ecc->total + FMC2_BBM_LEN;
1678 static const struct mtd_ooblayout_ops stm32_fmc2_nand_ooblayout_ops = {
1679 .ecc = stm32_fmc2_nand_ooblayout_ecc,
1680 .free = stm32_fmc2_nand_ooblayout_free,
1684 static int stm32_fmc2_calc_ecc_bytes(int step_size, int strength)
1687 if (strength == FMC2_ECC_HAM)
1691 if (strength == FMC2_ECC_BCH8)
1698 NAND_ECC_CAPS_SINGLE(stm32_fmc2_ecc_caps, stm32_fmc2_calc_ecc_bytes,
1700 FMC2_ECC_HAM, FMC2_ECC_BCH4, FMC2_ECC_BCH8);
1702 /* FMC2 controller ops */
1703 static int stm32_fmc2_attach_chip(struct nand_chip *chip)
1705 struct stm32_fmc2_nfc *fmc2 = to_stm32_nfc(chip->controller);
1706 struct mtd_info *mtd = nand_to_mtd(chip);
1710 * Only NAND_ECC_HW mode is actually supported
1711 * Hamming => ecc.strength = 1
1712 * BCH4 => ecc.strength = 4
1713 * BCH8 => ecc.strength = 8
1714 * ECC sector size = 512
1716 if (chip->ecc.mode != NAND_ECC_HW) {
1717 dev_err(fmc2->dev, "nand_ecc_mode is not well defined in the DT\n");
1721 ret = nand_ecc_choose_conf(chip, &stm32_fmc2_ecc_caps,
1722 mtd->oobsize - FMC2_BBM_LEN);
1724 dev_err(fmc2->dev, "no valid ECC settings set\n");
1728 if (mtd->writesize / chip->ecc.size > FMC2_MAX_SG) {
1729 dev_err(fmc2->dev, "nand page size is not supported\n");
1733 if (chip->bbt_options & NAND_BBT_USE_FLASH)
1734 chip->bbt_options |= NAND_BBT_NO_OOB;
1736 /* NAND callbacks setup */
1737 stm32_fmc2_nand_callbacks_setup(chip);
1739 /* Define ECC layout */
1740 mtd_set_ooblayout(mtd, &stm32_fmc2_nand_ooblayout_ops);
1742 /* Configure bus width to 16-bit */
1743 if (chip->options & NAND_BUSWIDTH_16)
1744 stm32_fmc2_set_buswidth_16(fmc2, true);
1749 static const struct nand_controller_ops stm32_fmc2_nand_controller_ops = {
1750 .attach_chip = stm32_fmc2_attach_chip,
1751 .exec_op = stm32_fmc2_exec_op,
1752 .setup_data_interface = stm32_fmc2_setup_interface,
1756 static int stm32_fmc2_parse_child(struct stm32_fmc2_nfc *fmc2,
1757 struct device_node *dn)
1759 struct stm32_fmc2_nand *nand = &fmc2->nand;
1763 if (!of_get_property(dn, "reg", &nand->ncs))
1766 nand->ncs /= sizeof(u32);
1768 dev_err(fmc2->dev, "invalid reg property size\n");
1772 for (i = 0; i < nand->ncs; i++) {
1773 ret = of_property_read_u32_index(dn, "reg", i, &cs);
1775 dev_err(fmc2->dev, "could not retrieve reg property: %d\n",
1780 if (cs > FMC2_MAX_CE) {
1781 dev_err(fmc2->dev, "invalid reg value: %d\n", cs);
1785 if (fmc2->cs_assigned & BIT(cs)) {
1786 dev_err(fmc2->dev, "cs already assigned: %d\n", cs);
1790 fmc2->cs_assigned |= BIT(cs);
1791 nand->cs_used[i] = cs;
1794 nand_set_flash_node(&nand->chip, dn);
1799 static int stm32_fmc2_parse_dt(struct stm32_fmc2_nfc *fmc2)
1801 struct device_node *dn = fmc2->dev->of_node;
1802 struct device_node *child;
1803 int nchips = of_get_child_count(dn);
1807 dev_err(fmc2->dev, "NAND chip not defined\n");
1812 dev_err(fmc2->dev, "too many NAND chips defined\n");
1816 for_each_child_of_node(dn, child) {
1817 ret = stm32_fmc2_parse_child(fmc2, child);
1827 static int stm32_fmc2_probe(struct platform_device *pdev)
1829 struct device *dev = &pdev->dev;
1830 struct reset_control *rstc;
1831 struct stm32_fmc2_nfc *fmc2;
1832 struct stm32_fmc2_nand *nand;
1833 struct resource *res;
1834 struct mtd_info *mtd;
1835 struct nand_chip *chip;
1836 int chip_cs, mem_region, ret, irq;
1838 fmc2 = devm_kzalloc(dev, sizeof(*fmc2), GFP_KERNEL);
1843 nand_controller_init(&fmc2->base);
1844 fmc2->base.ops = &stm32_fmc2_nand_controller_ops;
1846 ret = stm32_fmc2_parse_dt(fmc2);
1850 res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1851 fmc2->io_base = devm_ioremap_resource(dev, res);
1852 if (IS_ERR(fmc2->io_base))
1853 return PTR_ERR(fmc2->io_base);
1855 fmc2->io_phys_addr = res->start;
1857 for (chip_cs = 0, mem_region = 1; chip_cs < FMC2_MAX_CE;
1858 chip_cs++, mem_region += 3) {
1859 if (!(fmc2->cs_assigned & BIT(chip_cs)))
1862 res = platform_get_resource(pdev, IORESOURCE_MEM, mem_region);
1863 fmc2->data_base[chip_cs] = devm_ioremap_resource(dev, res);
1864 if (IS_ERR(fmc2->data_base[chip_cs]))
1865 return PTR_ERR(fmc2->data_base[chip_cs]);
1867 fmc2->data_phys_addr[chip_cs] = res->start;
1869 res = platform_get_resource(pdev, IORESOURCE_MEM,
1871 fmc2->cmd_base[chip_cs] = devm_ioremap_resource(dev, res);
1872 if (IS_ERR(fmc2->cmd_base[chip_cs]))
1873 return PTR_ERR(fmc2->cmd_base[chip_cs]);
1875 res = platform_get_resource(pdev, IORESOURCE_MEM,
1877 fmc2->addr_base[chip_cs] = devm_ioremap_resource(dev, res);
1878 if (IS_ERR(fmc2->addr_base[chip_cs]))
1879 return PTR_ERR(fmc2->addr_base[chip_cs]);
1882 irq = platform_get_irq(pdev, 0);
1886 ret = devm_request_irq(dev, irq, stm32_fmc2_irq, 0,
1887 dev_name(dev), fmc2);
1889 dev_err(dev, "failed to request irq\n");
1893 init_completion(&fmc2->complete);
1895 fmc2->clk = devm_clk_get(dev, NULL);
1896 if (IS_ERR(fmc2->clk))
1897 return PTR_ERR(fmc2->clk);
1899 ret = clk_prepare_enable(fmc2->clk);
1901 dev_err(dev, "can not enable the clock\n");
1905 rstc = devm_reset_control_get(dev, NULL);
1906 if (!IS_ERR(rstc)) {
1907 reset_control_assert(rstc);
1908 reset_control_deassert(rstc);
1912 ret = stm32_fmc2_dma_setup(fmc2);
1916 /* FMC2 init routine */
1917 stm32_fmc2_init(fmc2);
1921 mtd = nand_to_mtd(chip);
1922 mtd->dev.parent = dev;
1924 chip->controller = &fmc2->base;
1925 chip->options |= NAND_BUSWIDTH_AUTO | NAND_NO_SUBPAGE_WRITE |
1926 NAND_USE_BOUNCE_BUFFER;
1928 /* Default ECC settings */
1929 chip->ecc.mode = NAND_ECC_HW;
1930 chip->ecc.size = FMC2_ECC_STEP_SIZE;
1931 chip->ecc.strength = FMC2_ECC_BCH8;
1933 /* Scan to find existence of the device */
1934 ret = nand_scan(chip, nand->ncs);
1938 ret = mtd_device_register(mtd, NULL, 0);
1940 goto err_device_register;
1942 platform_set_drvdata(pdev, fmc2);
1946 err_device_register:
1950 if (fmc2->dma_ecc_ch)
1951 dma_release_channel(fmc2->dma_ecc_ch);
1952 if (fmc2->dma_tx_ch)
1953 dma_release_channel(fmc2->dma_tx_ch);
1954 if (fmc2->dma_rx_ch)
1955 dma_release_channel(fmc2->dma_rx_ch);
1957 sg_free_table(&fmc2->dma_data_sg);
1958 sg_free_table(&fmc2->dma_ecc_sg);
1960 clk_disable_unprepare(fmc2->clk);
1965 static int stm32_fmc2_remove(struct platform_device *pdev)
1967 struct stm32_fmc2_nfc *fmc2 = platform_get_drvdata(pdev);
1968 struct stm32_fmc2_nand *nand = &fmc2->nand;
1970 nand_release(&nand->chip);
1972 if (fmc2->dma_ecc_ch)
1973 dma_release_channel(fmc2->dma_ecc_ch);
1974 if (fmc2->dma_tx_ch)
1975 dma_release_channel(fmc2->dma_tx_ch);
1976 if (fmc2->dma_rx_ch)
1977 dma_release_channel(fmc2->dma_rx_ch);
1979 sg_free_table(&fmc2->dma_data_sg);
1980 sg_free_table(&fmc2->dma_ecc_sg);
1982 clk_disable_unprepare(fmc2->clk);
1987 static int __maybe_unused stm32_fmc2_suspend(struct device *dev)
1989 struct stm32_fmc2_nfc *fmc2 = dev_get_drvdata(dev);
1991 clk_disable_unprepare(fmc2->clk);
1993 pinctrl_pm_select_sleep_state(dev);
1998 static int __maybe_unused stm32_fmc2_resume(struct device *dev)
2000 struct stm32_fmc2_nfc *fmc2 = dev_get_drvdata(dev);
2001 struct stm32_fmc2_nand *nand = &fmc2->nand;
2004 pinctrl_pm_select_default_state(dev);
2006 ret = clk_prepare_enable(fmc2->clk);
2008 dev_err(dev, "can not enable the clock\n");
2012 stm32_fmc2_init(fmc2);
2014 for (chip_cs = 0; chip_cs < FMC2_MAX_CE; chip_cs++) {
2015 if (!(fmc2->cs_assigned & BIT(chip_cs)))
2018 nand_reset(&nand->chip, chip_cs);
2024 static SIMPLE_DEV_PM_OPS(stm32_fmc2_pm_ops, stm32_fmc2_suspend,
2027 static const struct of_device_id stm32_fmc2_match[] = {
2028 {.compatible = "st,stm32mp15-fmc2"},
2031 MODULE_DEVICE_TABLE(of, stm32_fmc2_match);
2033 static struct platform_driver stm32_fmc2_driver = {
2034 .probe = stm32_fmc2_probe,
2035 .remove = stm32_fmc2_remove,
2037 .name = "stm32_fmc2_nand",
2038 .of_match_table = stm32_fmc2_match,
2039 .pm = &stm32_fmc2_pm_ops,
2042 module_platform_driver(stm32_fmc2_driver);
2044 MODULE_ALIAS("platform:stm32_fmc2_nand");
2045 MODULE_AUTHOR("Christophe Kerello <christophe.kerello@st.com>");
2046 MODULE_DESCRIPTION("STMicroelectronics STM32 FMC2 nand driver");
2047 MODULE_LICENSE("GPL v2");