linux/drivers/spi/spi-tegra114.c
<<
>>
Prefs
   1/*
   2 * SPI driver for NVIDIA's Tegra114 SPI Controller.
   3 *
   4 * Copyright (c) 2013, NVIDIA CORPORATION.  All rights reserved.
   5 *
   6 * This program is free software; you can redistribute it and/or modify it
   7 * under the terms and conditions of the GNU General Public License,
   8 * version 2, as published by the Free Software Foundation.
   9 *
  10 * This program is distributed in the hope it will be useful, but WITHOUT
  11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
  13 * more details.
  14 *
  15 * You should have received a copy of the GNU General Public License
  16 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
  17 */
  18
  19#include <linux/clk.h>
  20#include <linux/clk/tegra.h>
  21#include <linux/completion.h>
  22#include <linux/delay.h>
  23#include <linux/dmaengine.h>
  24#include <linux/dma-mapping.h>
  25#include <linux/dmapool.h>
  26#include <linux/err.h>
  27#include <linux/init.h>
  28#include <linux/interrupt.h>
  29#include <linux/io.h>
  30#include <linux/kernel.h>
  31#include <linux/kthread.h>
  32#include <linux/module.h>
  33#include <linux/platform_device.h>
  34#include <linux/pm_runtime.h>
  35#include <linux/of.h>
  36#include <linux/of_device.h>
  37#include <linux/spi/spi.h>
  38
  39#define SPI_COMMAND1                            0x000
  40#define SPI_BIT_LENGTH(x)                       (((x) & 0x1f) << 0)
  41#define SPI_PACKED                              (1 << 5)
  42#define SPI_TX_EN                               (1 << 11)
  43#define SPI_RX_EN                               (1 << 12)
  44#define SPI_BOTH_EN_BYTE                        (1 << 13)
  45#define SPI_BOTH_EN_BIT                         (1 << 14)
  46#define SPI_LSBYTE_FE                           (1 << 15)
  47#define SPI_LSBIT_FE                            (1 << 16)
  48#define SPI_BIDIROE                             (1 << 17)
  49#define SPI_IDLE_SDA_DRIVE_LOW                  (0 << 18)
  50#define SPI_IDLE_SDA_DRIVE_HIGH                 (1 << 18)
  51#define SPI_IDLE_SDA_PULL_LOW                   (2 << 18)
  52#define SPI_IDLE_SDA_PULL_HIGH                  (3 << 18)
  53#define SPI_IDLE_SDA_MASK                       (3 << 18)
  54#define SPI_CS_SS_VAL                           (1 << 20)
  55#define SPI_CS_SW_HW                            (1 << 21)
  56/* SPI_CS_POL_INACTIVE bits are default high */
  57#define SPI_CS_POL_INACTIVE                     22
  58#define SPI_CS_POL_INACTIVE_0                   (1 << 22)
  59#define SPI_CS_POL_INACTIVE_1                   (1 << 23)
  60#define SPI_CS_POL_INACTIVE_2                   (1 << 24)
  61#define SPI_CS_POL_INACTIVE_3                   (1 << 25)
  62#define SPI_CS_POL_INACTIVE_MASK                (0xF << 22)
  63
  64#define SPI_CS_SEL_0                            (0 << 26)
  65#define SPI_CS_SEL_1                            (1 << 26)
  66#define SPI_CS_SEL_2                            (2 << 26)
  67#define SPI_CS_SEL_3                            (3 << 26)
  68#define SPI_CS_SEL_MASK                         (3 << 26)
  69#define SPI_CS_SEL(x)                           (((x) & 0x3) << 26)
  70#define SPI_CONTROL_MODE_0                      (0 << 28)
  71#define SPI_CONTROL_MODE_1                      (1 << 28)
  72#define SPI_CONTROL_MODE_2                      (2 << 28)
  73#define SPI_CONTROL_MODE_3                      (3 << 28)
  74#define SPI_CONTROL_MODE_MASK                   (3 << 28)
  75#define SPI_MODE_SEL(x)                         (((x) & 0x3) << 28)
  76#define SPI_M_S                                 (1 << 30)
  77#define SPI_PIO                                 (1 << 31)
  78
  79#define SPI_COMMAND2                            0x004
  80#define SPI_TX_TAP_DELAY(x)                     (((x) & 0x3F) << 6)
  81#define SPI_RX_TAP_DELAY(x)                     (((x) & 0x3F) << 0)
  82
  83#define SPI_CS_TIMING1                          0x008
  84#define SPI_SETUP_HOLD(setup, hold)             (((setup) << 4) | (hold))
  85#define SPI_CS_SETUP_HOLD(reg, cs, val)                 \
  86                ((((val) & 0xFFu) << ((cs) * 8)) |      \
  87                ((reg) & ~(0xFFu << ((cs) * 8))))
  88
  89#define SPI_CS_TIMING2                          0x00C
  90#define CYCLES_BETWEEN_PACKETS_0(x)             (((x) & 0x1F) << 0)
  91#define CS_ACTIVE_BETWEEN_PACKETS_0             (1 << 5)
  92#define CYCLES_BETWEEN_PACKETS_1(x)             (((x) & 0x1F) << 8)
  93#define CS_ACTIVE_BETWEEN_PACKETS_1             (1 << 13)
  94#define CYCLES_BETWEEN_PACKETS_2(x)             (((x) & 0x1F) << 16)
  95#define CS_ACTIVE_BETWEEN_PACKETS_2             (1 << 21)
  96#define CYCLES_BETWEEN_PACKETS_3(x)             (((x) & 0x1F) << 24)
  97#define CS_ACTIVE_BETWEEN_PACKETS_3             (1 << 29)
  98#define SPI_SET_CS_ACTIVE_BETWEEN_PACKETS(reg, cs, val)         \
  99                (reg = (((val) & 0x1) << ((cs) * 8 + 5)) |      \
 100                        ((reg) & ~(1 << ((cs) * 8 + 5))))
 101#define SPI_SET_CYCLES_BETWEEN_PACKETS(reg, cs, val)            \
 102                (reg = (((val) & 0xF) << ((cs) * 8)) |          \
 103                        ((reg) & ~(0xF << ((cs) * 8))))
 104
 105#define SPI_TRANS_STATUS                        0x010
 106#define SPI_BLK_CNT(val)                        (((val) >> 0) & 0xFFFF)
 107#define SPI_SLV_IDLE_COUNT(val)                 (((val) >> 16) & 0xFF)
 108#define SPI_RDY                                 (1 << 30)
 109
 110#define SPI_FIFO_STATUS                         0x014
 111#define SPI_RX_FIFO_EMPTY                       (1 << 0)
 112#define SPI_RX_FIFO_FULL                        (1 << 1)
 113#define SPI_TX_FIFO_EMPTY                       (1 << 2)
 114#define SPI_TX_FIFO_FULL                        (1 << 3)
 115#define SPI_RX_FIFO_UNF                         (1 << 4)
 116#define SPI_RX_FIFO_OVF                         (1 << 5)
 117#define SPI_TX_FIFO_UNF                         (1 << 6)
 118#define SPI_TX_FIFO_OVF                         (1 << 7)
 119#define SPI_ERR                                 (1 << 8)
 120#define SPI_TX_FIFO_FLUSH                       (1 << 14)
 121#define SPI_RX_FIFO_FLUSH                       (1 << 15)
 122#define SPI_TX_FIFO_EMPTY_COUNT(val)            (((val) >> 16) & 0x7F)
 123#define SPI_RX_FIFO_FULL_COUNT(val)             (((val) >> 23) & 0x7F)
 124#define SPI_FRAME_END                           (1 << 30)
 125#define SPI_CS_INACTIVE                         (1 << 31)
 126
 127#define SPI_FIFO_ERROR                          (SPI_RX_FIFO_UNF | \
 128                        SPI_RX_FIFO_OVF | SPI_TX_FIFO_UNF | SPI_TX_FIFO_OVF)
 129#define SPI_FIFO_EMPTY                  (SPI_RX_FIFO_EMPTY | SPI_TX_FIFO_EMPTY)
 130
 131#define SPI_TX_DATA                             0x018
 132#define SPI_RX_DATA                             0x01C
 133
 134#define SPI_DMA_CTL                             0x020
 135#define SPI_TX_TRIG_1                           (0 << 15)
 136#define SPI_TX_TRIG_4                           (1 << 15)
 137#define SPI_TX_TRIG_8                           (2 << 15)
 138#define SPI_TX_TRIG_16                          (3 << 15)
 139#define SPI_TX_TRIG_MASK                        (3 << 15)
 140#define SPI_RX_TRIG_1                           (0 << 19)
 141#define SPI_RX_TRIG_4                           (1 << 19)
 142#define SPI_RX_TRIG_8                           (2 << 19)
 143#define SPI_RX_TRIG_16                          (3 << 19)
 144#define SPI_RX_TRIG_MASK                        (3 << 19)
 145#define SPI_IE_TX                               (1 << 28)
 146#define SPI_IE_RX                               (1 << 29)
 147#define SPI_CONT                                (1 << 30)
 148#define SPI_DMA                                 (1 << 31)
 149#define SPI_DMA_EN                              SPI_DMA
 150
 151#define SPI_DMA_BLK                             0x024
 152#define SPI_DMA_BLK_SET(x)                      (((x) & 0xFFFF) << 0)
 153
 154#define SPI_TX_FIFO                             0x108
 155#define SPI_RX_FIFO                             0x188
 156#define MAX_CHIP_SELECT                         4
 157#define SPI_FIFO_DEPTH                          64
 158#define DATA_DIR_TX                             (1 << 0)
 159#define DATA_DIR_RX                             (1 << 1)
 160
 161#define SPI_DMA_TIMEOUT                         (msecs_to_jiffies(1000))
 162#define DEFAULT_SPI_DMA_BUF_LEN                 (16*1024)
 163#define TX_FIFO_EMPTY_COUNT_MAX                 SPI_TX_FIFO_EMPTY_COUNT(0x40)
 164#define RX_FIFO_FULL_COUNT_ZERO                 SPI_RX_FIFO_FULL_COUNT(0)
 165#define MAX_HOLD_CYCLES                         16
 166#define SPI_DEFAULT_SPEED                       25000000
 167
 168#define MAX_CHIP_SELECT                         4
 169#define SPI_FIFO_DEPTH                          64
 170
 171struct tegra_spi_data {
 172        struct device                           *dev;
 173        struct spi_master                       *master;
 174        spinlock_t                              lock;
 175
 176        struct clk                              *clk;
 177        void __iomem                            *base;
 178        phys_addr_t                             phys;
 179        unsigned                                irq;
 180        int                                     dma_req_sel;
 181        u32                                     spi_max_frequency;
 182        u32                                     cur_speed;
 183
 184        struct spi_device                       *cur_spi;
 185        unsigned                                cur_pos;
 186        unsigned                                cur_len;
 187        unsigned                                words_per_32bit;
 188        unsigned                                bytes_per_word;
 189        unsigned                                curr_dma_words;
 190        unsigned                                cur_direction;
 191
 192        unsigned                                cur_rx_pos;
 193        unsigned                                cur_tx_pos;
 194
 195        unsigned                                dma_buf_size;
 196        unsigned                                max_buf_size;
 197        bool                                    is_curr_dma_xfer;
 198
 199        struct completion                       rx_dma_complete;
 200        struct completion                       tx_dma_complete;
 201
 202        u32                                     tx_status;
 203        u32                                     rx_status;
 204        u32                                     status_reg;
 205        bool                                    is_packed;
 206        unsigned long                           packed_size;
 207
 208        u32                                     command1_reg;
 209        u32                                     dma_control_reg;
 210        u32                                     def_command1_reg;
 211        u32                                     spi_cs_timing;
 212
 213        struct completion                       xfer_completion;
 214        struct spi_transfer                     *curr_xfer;
 215        struct dma_chan                         *rx_dma_chan;
 216        u32                                     *rx_dma_buf;
 217        dma_addr_t                              rx_dma_phys;
 218        struct dma_async_tx_descriptor          *rx_dma_desc;
 219
 220        struct dma_chan                         *tx_dma_chan;
 221        u32                                     *tx_dma_buf;
 222        dma_addr_t                              tx_dma_phys;
 223        struct dma_async_tx_descriptor          *tx_dma_desc;
 224};
 225
 226static int tegra_spi_runtime_suspend(struct device *dev);
 227static int tegra_spi_runtime_resume(struct device *dev);
 228
 229static inline unsigned long tegra_spi_readl(struct tegra_spi_data *tspi,
 230                unsigned long reg)
 231{
 232        return readl(tspi->base + reg);
 233}
 234
 235static inline void tegra_spi_writel(struct tegra_spi_data *tspi,
 236                unsigned long val, unsigned long reg)
 237{
 238        writel(val, tspi->base + reg);
 239
 240        /* Read back register to make sure that register writes completed */
 241        if (reg != SPI_TX_FIFO)
 242                readl(tspi->base + SPI_COMMAND1);
 243}
 244
 245static void tegra_spi_clear_status(struct tegra_spi_data *tspi)
 246{
 247        unsigned long val;
 248
 249        /* Write 1 to clear status register */
 250        val = tegra_spi_readl(tspi, SPI_TRANS_STATUS);
 251        tegra_spi_writel(tspi, val, SPI_TRANS_STATUS);
 252
 253        /* Clear fifo status error if any */
 254        val = tegra_spi_readl(tspi, SPI_FIFO_STATUS);
 255        if (val & SPI_ERR)
 256                tegra_spi_writel(tspi, SPI_ERR | SPI_FIFO_ERROR,
 257                                SPI_FIFO_STATUS);
 258}
 259
 260static unsigned tegra_spi_calculate_curr_xfer_param(
 261        struct spi_device *spi, struct tegra_spi_data *tspi,
 262        struct spi_transfer *t)
 263{
 264        unsigned remain_len = t->len - tspi->cur_pos;
 265        unsigned max_word;
 266        unsigned bits_per_word = t->bits_per_word;
 267        unsigned max_len;
 268        unsigned total_fifo_words;
 269
 270        tspi->bytes_per_word = (bits_per_word - 1) / 8 + 1;
 271
 272        if (bits_per_word == 8 || bits_per_word == 16) {
 273                tspi->is_packed = 1;
 274                tspi->words_per_32bit = 32/bits_per_word;
 275        } else {
 276                tspi->is_packed = 0;
 277                tspi->words_per_32bit = 1;
 278        }
 279
 280        if (tspi->is_packed) {
 281                max_len = min(remain_len, tspi->max_buf_size);
 282                tspi->curr_dma_words = max_len/tspi->bytes_per_word;
 283                total_fifo_words = (max_len + 3) / 4;
 284        } else {
 285                max_word = (remain_len - 1) / tspi->bytes_per_word + 1;
 286                max_word = min(max_word, tspi->max_buf_size/4);
 287                tspi->curr_dma_words = max_word;
 288                total_fifo_words = max_word;
 289        }
 290        return total_fifo_words;
 291}
 292
 293static unsigned tegra_spi_fill_tx_fifo_from_client_txbuf(
 294        struct tegra_spi_data *tspi, struct spi_transfer *t)
 295{
 296        unsigned nbytes;
 297        unsigned tx_empty_count;
 298        unsigned long fifo_status;
 299        unsigned max_n_32bit;
 300        unsigned i, count;
 301        unsigned long x;
 302        unsigned int written_words;
 303        unsigned fifo_words_left;
 304        u8 *tx_buf = (u8 *)t->tx_buf + tspi->cur_tx_pos;
 305
 306        fifo_status = tegra_spi_readl(tspi, SPI_FIFO_STATUS);
 307        tx_empty_count = SPI_TX_FIFO_EMPTY_COUNT(fifo_status);
 308
 309        if (tspi->is_packed) {
 310                fifo_words_left = tx_empty_count * tspi->words_per_32bit;
 311                written_words = min(fifo_words_left, tspi->curr_dma_words);
 312                nbytes = written_words * tspi->bytes_per_word;
 313                max_n_32bit = DIV_ROUND_UP(nbytes, 4);
 314                for (count = 0; count < max_n_32bit; count++) {
 315                        x = 0;
 316                        for (i = 0; (i < 4) && nbytes; i++, nbytes--)
 317                                x |= (*tx_buf++) << (i*8);
 318                        tegra_spi_writel(tspi, x, SPI_TX_FIFO);
 319                }
 320        } else {
 321                max_n_32bit = min(tspi->curr_dma_words,  tx_empty_count);
 322                written_words = max_n_32bit;
 323                nbytes = written_words * tspi->bytes_per_word;
 324                for (count = 0; count < max_n_32bit; count++) {
 325                        x = 0;
 326                        for (i = 0; nbytes && (i < tspi->bytes_per_word);
 327                                                        i++, nbytes--)
 328                                x |= ((*tx_buf++) << i*8);
 329                        tegra_spi_writel(tspi, x, SPI_TX_FIFO);
 330                }
 331        }
 332        tspi->cur_tx_pos += written_words * tspi->bytes_per_word;
 333        return written_words;
 334}
 335
 336static unsigned int tegra_spi_read_rx_fifo_to_client_rxbuf(
 337                struct tegra_spi_data *tspi, struct spi_transfer *t)
 338{
 339        unsigned rx_full_count;
 340        unsigned long fifo_status;
 341        unsigned i, count;
 342        unsigned long x;
 343        unsigned int read_words = 0;
 344        unsigned len;
 345        u8 *rx_buf = (u8 *)t->rx_buf + tspi->cur_rx_pos;
 346
 347        fifo_status = tegra_spi_readl(tspi, SPI_FIFO_STATUS);
 348        rx_full_count = SPI_RX_FIFO_FULL_COUNT(fifo_status);
 349        if (tspi->is_packed) {
 350                len = tspi->curr_dma_words * tspi->bytes_per_word;
 351                for (count = 0; count < rx_full_count; count++) {
 352                        x = tegra_spi_readl(tspi, SPI_RX_FIFO);
 353                        for (i = 0; len && (i < 4); i++, len--)
 354                                *rx_buf++ = (x >> i*8) & 0xFF;
 355                }
 356                tspi->cur_rx_pos += tspi->curr_dma_words * tspi->bytes_per_word;
 357                read_words += tspi->curr_dma_words;
 358        } else {
 359                unsigned int rx_mask;
 360                unsigned int bits_per_word = t->bits_per_word;
 361
 362                rx_mask = (1 << bits_per_word) - 1;
 363                for (count = 0; count < rx_full_count; count++) {
 364                        x = tegra_spi_readl(tspi, SPI_RX_FIFO);
 365                        x &= rx_mask;
 366                        for (i = 0; (i < tspi->bytes_per_word); i++)
 367                                *rx_buf++ = (x >> (i*8)) & 0xFF;
 368                }
 369                tspi->cur_rx_pos += rx_full_count * tspi->bytes_per_word;
 370                read_words += rx_full_count;
 371        }
 372        return read_words;
 373}
 374
 375static void tegra_spi_copy_client_txbuf_to_spi_txbuf(
 376                struct tegra_spi_data *tspi, struct spi_transfer *t)
 377{
 378        unsigned len;
 379
 380        /* Make the dma buffer to read by cpu */
 381        dma_sync_single_for_cpu(tspi->dev, tspi->tx_dma_phys,
 382                                tspi->dma_buf_size, DMA_TO_DEVICE);
 383
 384        if (tspi->is_packed) {
 385                len = tspi->curr_dma_words * tspi->bytes_per_word;
 386                memcpy(tspi->tx_dma_buf, t->tx_buf + tspi->cur_pos, len);
 387        } else {
 388                unsigned int i;
 389                unsigned int count;
 390                u8 *tx_buf = (u8 *)t->tx_buf + tspi->cur_tx_pos;
 391                unsigned consume = tspi->curr_dma_words * tspi->bytes_per_word;
 392                unsigned int x;
 393
 394                for (count = 0; count < tspi->curr_dma_words; count++) {
 395                        x = 0;
 396                        for (i = 0; consume && (i < tspi->bytes_per_word);
 397                                                        i++, consume--)
 398                                x |= ((*tx_buf++) << i * 8);
 399                        tspi->tx_dma_buf[count] = x;
 400                }
 401        }
 402        tspi->cur_tx_pos += tspi->curr_dma_words * tspi->bytes_per_word;
 403
 404        /* Make the dma buffer to read by dma */
 405        dma_sync_single_for_device(tspi->dev, tspi->tx_dma_phys,
 406                                tspi->dma_buf_size, DMA_TO_DEVICE);
 407}
 408
 409static void tegra_spi_copy_spi_rxbuf_to_client_rxbuf(
 410                struct tegra_spi_data *tspi, struct spi_transfer *t)
 411{
 412        unsigned len;
 413
 414        /* Make the dma buffer to read by cpu */
 415        dma_sync_single_for_cpu(tspi->dev, tspi->rx_dma_phys,
 416                tspi->dma_buf_size, DMA_FROM_DEVICE);
 417
 418        if (tspi->is_packed) {
 419                len = tspi->curr_dma_words * tspi->bytes_per_word;
 420                memcpy(t->rx_buf + tspi->cur_rx_pos, tspi->rx_dma_buf, len);
 421        } else {
 422                unsigned int i;
 423                unsigned int count;
 424                unsigned char *rx_buf = t->rx_buf + tspi->cur_rx_pos;
 425                unsigned int x;
 426                unsigned int rx_mask;
 427                unsigned int bits_per_word = t->bits_per_word;
 428
 429                rx_mask = (1 << bits_per_word) - 1;
 430                for (count = 0; count < tspi->curr_dma_words; count++) {
 431                        x = tspi->rx_dma_buf[count];
 432                        x &= rx_mask;
 433                        for (i = 0; (i < tspi->bytes_per_word); i++)
 434                                *rx_buf++ = (x >> (i*8)) & 0xFF;
 435                }
 436        }
 437        tspi->cur_rx_pos += tspi->curr_dma_words * tspi->bytes_per_word;
 438
 439        /* Make the dma buffer to read by dma */
 440        dma_sync_single_for_device(tspi->dev, tspi->rx_dma_phys,
 441                tspi->dma_buf_size, DMA_FROM_DEVICE);
 442}
 443
 444static void tegra_spi_dma_complete(void *args)
 445{
 446        struct completion *dma_complete = args;
 447
 448        complete(dma_complete);
 449}
 450
 451static int tegra_spi_start_tx_dma(struct tegra_spi_data *tspi, int len)
 452{
 453        INIT_COMPLETION(tspi->tx_dma_complete);
 454        tspi->tx_dma_desc = dmaengine_prep_slave_single(tspi->tx_dma_chan,
 455                                tspi->tx_dma_phys, len, DMA_MEM_TO_DEV,
 456                                DMA_PREP_INTERRUPT |  DMA_CTRL_ACK);
 457        if (!tspi->tx_dma_desc) {
 458                dev_err(tspi->dev, "Not able to get desc for Tx\n");
 459                return -EIO;
 460        }
 461
 462        tspi->tx_dma_desc->callback = tegra_spi_dma_complete;
 463        tspi->tx_dma_desc->callback_param = &tspi->tx_dma_complete;
 464
 465        dmaengine_submit(tspi->tx_dma_desc);
 466        dma_async_issue_pending(tspi->tx_dma_chan);
 467        return 0;
 468}
 469
 470static int tegra_spi_start_rx_dma(struct tegra_spi_data *tspi, int len)
 471{
 472        INIT_COMPLETION(tspi->rx_dma_complete);
 473        tspi->rx_dma_desc = dmaengine_prep_slave_single(tspi->rx_dma_chan,
 474                                tspi->rx_dma_phys, len, DMA_DEV_TO_MEM,
 475                                DMA_PREP_INTERRUPT |  DMA_CTRL_ACK);
 476        if (!tspi->rx_dma_desc) {
 477                dev_err(tspi->dev, "Not able to get desc for Rx\n");
 478                return -EIO;
 479        }
 480
 481        tspi->rx_dma_desc->callback = tegra_spi_dma_complete;
 482        tspi->rx_dma_desc->callback_param = &tspi->rx_dma_complete;
 483
 484        dmaengine_submit(tspi->rx_dma_desc);
 485        dma_async_issue_pending(tspi->rx_dma_chan);
 486        return 0;
 487}
 488
 489static int tegra_spi_start_dma_based_transfer(
 490                struct tegra_spi_data *tspi, struct spi_transfer *t)
 491{
 492        unsigned long val;
 493        unsigned int len;
 494        int ret = 0;
 495        unsigned long status;
 496
 497        /* Make sure that Rx and Tx fifo are empty */
 498        status = tegra_spi_readl(tspi, SPI_FIFO_STATUS);
 499        if ((status & SPI_FIFO_EMPTY) != SPI_FIFO_EMPTY) {
 500                dev_err(tspi->dev,
 501                        "Rx/Tx fifo are not empty status 0x%08lx\n", status);
 502                return -EIO;
 503        }
 504
 505        val = SPI_DMA_BLK_SET(tspi->curr_dma_words - 1);
 506        tegra_spi_writel(tspi, val, SPI_DMA_BLK);
 507
 508        if (tspi->is_packed)
 509                len = DIV_ROUND_UP(tspi->curr_dma_words * tspi->bytes_per_word,
 510                                        4) * 4;
 511        else
 512                len = tspi->curr_dma_words * 4;
 513
 514        /* Set attention level based on length of transfer */
 515        if (len & 0xF)
 516                val |= SPI_TX_TRIG_1 | SPI_RX_TRIG_1;
 517        else if (((len) >> 4) & 0x1)
 518                val |= SPI_TX_TRIG_4 | SPI_RX_TRIG_4;
 519        else
 520                val |= SPI_TX_TRIG_8 | SPI_RX_TRIG_8;
 521
 522        if (tspi->cur_direction & DATA_DIR_TX)
 523                val |= SPI_IE_TX;
 524
 525        if (tspi->cur_direction & DATA_DIR_RX)
 526                val |= SPI_IE_RX;
 527
 528        tegra_spi_writel(tspi, val, SPI_DMA_CTL);
 529        tspi->dma_control_reg = val;
 530
 531        if (tspi->cur_direction & DATA_DIR_TX) {
 532                tegra_spi_copy_client_txbuf_to_spi_txbuf(tspi, t);
 533                ret = tegra_spi_start_tx_dma(tspi, len);
 534                if (ret < 0) {
 535                        dev_err(tspi->dev,
 536                                "Starting tx dma failed, err %d\n", ret);
 537                        return ret;
 538                }
 539        }
 540
 541        if (tspi->cur_direction & DATA_DIR_RX) {
 542                /* Make the dma buffer to read by dma */
 543                dma_sync_single_for_device(tspi->dev, tspi->rx_dma_phys,
 544                                tspi->dma_buf_size, DMA_FROM_DEVICE);
 545
 546                ret = tegra_spi_start_rx_dma(tspi, len);
 547                if (ret < 0) {
 548                        dev_err(tspi->dev,
 549                                "Starting rx dma failed, err %d\n", ret);
 550                        if (tspi->cur_direction & DATA_DIR_TX)
 551                                dmaengine_terminate_all(tspi->tx_dma_chan);
 552                        return ret;
 553                }
 554        }
 555        tspi->is_curr_dma_xfer = true;
 556        tspi->dma_control_reg = val;
 557
 558        val |= SPI_DMA_EN;
 559        tegra_spi_writel(tspi, val, SPI_DMA_CTL);
 560        return ret;
 561}
 562
 563static int tegra_spi_start_cpu_based_transfer(
 564                struct tegra_spi_data *tspi, struct spi_transfer *t)
 565{
 566        unsigned long val;
 567        unsigned cur_words;
 568
 569        if (tspi->cur_direction & DATA_DIR_TX)
 570                cur_words = tegra_spi_fill_tx_fifo_from_client_txbuf(tspi, t);
 571        else
 572                cur_words = tspi->curr_dma_words;
 573
 574        val = SPI_DMA_BLK_SET(cur_words - 1);
 575        tegra_spi_writel(tspi, val, SPI_DMA_BLK);
 576
 577        val = 0;
 578        if (tspi->cur_direction & DATA_DIR_TX)
 579                val |= SPI_IE_TX;
 580
 581        if (tspi->cur_direction & DATA_DIR_RX)
 582                val |= SPI_IE_RX;
 583
 584        tegra_spi_writel(tspi, val, SPI_DMA_CTL);
 585        tspi->dma_control_reg = val;
 586
 587        tspi->is_curr_dma_xfer = false;
 588
 589        val |= SPI_DMA_EN;
 590        tegra_spi_writel(tspi, val, SPI_DMA_CTL);
 591        return 0;
 592}
 593
 594static int tegra_spi_init_dma_param(struct tegra_spi_data *tspi,
 595                        bool dma_to_memory)
 596{
 597        struct dma_chan *dma_chan;
 598        u32 *dma_buf;
 599        dma_addr_t dma_phys;
 600        int ret;
 601        struct dma_slave_config dma_sconfig;
 602        dma_cap_mask_t mask;
 603
 604        dma_cap_zero(mask);
 605        dma_cap_set(DMA_SLAVE, mask);
 606        dma_chan = dma_request_channel(mask, NULL, NULL);
 607        if (!dma_chan) {
 608                dev_err(tspi->dev,
 609                        "Dma channel is not available, will try later\n");
 610                return -EPROBE_DEFER;
 611        }
 612
 613        dma_buf = dma_alloc_coherent(tspi->dev, tspi->dma_buf_size,
 614                                &dma_phys, GFP_KERNEL);
 615        if (!dma_buf) {
 616                dev_err(tspi->dev, " Not able to allocate the dma buffer\n");
 617                dma_release_channel(dma_chan);
 618                return -ENOMEM;
 619        }
 620
 621        dma_sconfig.slave_id = tspi->dma_req_sel;
 622        if (dma_to_memory) {
 623                dma_sconfig.src_addr = tspi->phys + SPI_RX_FIFO;
 624                dma_sconfig.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
 625                dma_sconfig.src_maxburst = 0;
 626        } else {
 627                dma_sconfig.dst_addr = tspi->phys + SPI_TX_FIFO;
 628                dma_sconfig.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
 629                dma_sconfig.dst_maxburst = 0;
 630        }
 631
 632        ret = dmaengine_slave_config(dma_chan, &dma_sconfig);
 633        if (ret)
 634                goto scrub;
 635        if (dma_to_memory) {
 636                tspi->rx_dma_chan = dma_chan;
 637                tspi->rx_dma_buf = dma_buf;
 638                tspi->rx_dma_phys = dma_phys;
 639        } else {
 640                tspi->tx_dma_chan = dma_chan;
 641                tspi->tx_dma_buf = dma_buf;
 642                tspi->tx_dma_phys = dma_phys;
 643        }
 644        return 0;
 645
 646scrub:
 647        dma_free_coherent(tspi->dev, tspi->dma_buf_size, dma_buf, dma_phys);
 648        dma_release_channel(dma_chan);
 649        return ret;
 650}
 651
 652static void tegra_spi_deinit_dma_param(struct tegra_spi_data *tspi,
 653        bool dma_to_memory)
 654{
 655        u32 *dma_buf;
 656        dma_addr_t dma_phys;
 657        struct dma_chan *dma_chan;
 658
 659        if (dma_to_memory) {
 660                dma_buf = tspi->rx_dma_buf;
 661                dma_chan = tspi->rx_dma_chan;
 662                dma_phys = tspi->rx_dma_phys;
 663                tspi->rx_dma_chan = NULL;
 664                tspi->rx_dma_buf = NULL;
 665        } else {
 666                dma_buf = tspi->tx_dma_buf;
 667                dma_chan = tspi->tx_dma_chan;
 668                dma_phys = tspi->tx_dma_phys;
 669                tspi->tx_dma_buf = NULL;
 670                tspi->tx_dma_chan = NULL;
 671        }
 672        if (!dma_chan)
 673                return;
 674
 675        dma_free_coherent(tspi->dev, tspi->dma_buf_size, dma_buf, dma_phys);
 676        dma_release_channel(dma_chan);
 677}
 678
 679static int tegra_spi_start_transfer_one(struct spi_device *spi,
 680                struct spi_transfer *t, bool is_first_of_msg,
 681                bool is_single_xfer)
 682{
 683        struct tegra_spi_data *tspi = spi_master_get_devdata(spi->master);
 684        u32 speed = t->speed_hz;
 685        u8 bits_per_word = t->bits_per_word;
 686        unsigned total_fifo_words;
 687        int ret;
 688        unsigned long command1;
 689        int req_mode;
 690
 691        if (speed != tspi->cur_speed) {
 692                clk_set_rate(tspi->clk, speed);
 693                tspi->cur_speed = speed;
 694        }
 695
 696        tspi->cur_spi = spi;
 697        tspi->cur_pos = 0;
 698        tspi->cur_rx_pos = 0;
 699        tspi->cur_tx_pos = 0;
 700        tspi->curr_xfer = t;
 701        total_fifo_words = tegra_spi_calculate_curr_xfer_param(spi, tspi, t);
 702
 703        if (is_first_of_msg) {
 704                tegra_spi_clear_status(tspi);
 705
 706                command1 = tspi->def_command1_reg;
 707                command1 |= SPI_BIT_LENGTH(bits_per_word - 1);
 708
 709                command1 &= ~SPI_CONTROL_MODE_MASK;
 710                req_mode = spi->mode & 0x3;
 711                if (req_mode == SPI_MODE_0)
 712                        command1 |= SPI_CONTROL_MODE_0;
 713                else if (req_mode == SPI_MODE_1)
 714                        command1 |= SPI_CONTROL_MODE_1;
 715                else if (req_mode == SPI_MODE_2)
 716                        command1 |= SPI_CONTROL_MODE_2;
 717                else if (req_mode == SPI_MODE_3)
 718                        command1 |= SPI_CONTROL_MODE_3;
 719
 720                tegra_spi_writel(tspi, command1, SPI_COMMAND1);
 721
 722                command1 |= SPI_CS_SW_HW;
 723                if (spi->mode & SPI_CS_HIGH)
 724                        command1 |= SPI_CS_SS_VAL;
 725                else
 726                        command1 &= ~SPI_CS_SS_VAL;
 727
 728                tegra_spi_writel(tspi, 0, SPI_COMMAND2);
 729        } else {
 730                command1 = tspi->command1_reg;
 731                command1 &= ~SPI_BIT_LENGTH(~0);
 732                command1 |= SPI_BIT_LENGTH(bits_per_word - 1);
 733        }
 734
 735        if (tspi->is_packed)
 736                command1 |= SPI_PACKED;
 737
 738        command1 &= ~(SPI_CS_SEL_MASK | SPI_TX_EN | SPI_RX_EN);
 739        tspi->cur_direction = 0;
 740        if (t->rx_buf) {
 741                command1 |= SPI_RX_EN;
 742                tspi->cur_direction |= DATA_DIR_RX;
 743        }
 744        if (t->tx_buf) {
 745                command1 |= SPI_TX_EN;
 746                tspi->cur_direction |= DATA_DIR_TX;
 747        }
 748        command1 |= SPI_CS_SEL(spi->chip_select);
 749        tegra_spi_writel(tspi, command1, SPI_COMMAND1);
 750        tspi->command1_reg = command1;
 751
 752        dev_dbg(tspi->dev, "The def 0x%x and written 0x%lx\n",
 753                                tspi->def_command1_reg, command1);
 754
 755        if (total_fifo_words > SPI_FIFO_DEPTH)
 756                ret = tegra_spi_start_dma_based_transfer(tspi, t);
 757        else
 758                ret = tegra_spi_start_cpu_based_transfer(tspi, t);
 759        return ret;
 760}
 761
 762static int tegra_spi_setup(struct spi_device *spi)
 763{
 764        struct tegra_spi_data *tspi = spi_master_get_devdata(spi->master);
 765        unsigned long val;
 766        unsigned long flags;
 767        int ret;
 768        unsigned int cs_pol_bit[MAX_CHIP_SELECT] = {
 769                        SPI_CS_POL_INACTIVE_0,
 770                        SPI_CS_POL_INACTIVE_1,
 771                        SPI_CS_POL_INACTIVE_2,
 772                        SPI_CS_POL_INACTIVE_3,
 773        };
 774
 775        dev_dbg(&spi->dev, "setup %d bpw, %scpol, %scpha, %dHz\n",
 776                spi->bits_per_word,
 777                spi->mode & SPI_CPOL ? "" : "~",
 778                spi->mode & SPI_CPHA ? "" : "~",
 779                spi->max_speed_hz);
 780
 781        BUG_ON(spi->chip_select >= MAX_CHIP_SELECT);
 782
 783        /* Set speed to the spi max fequency if spi device has not set */
 784        spi->max_speed_hz = spi->max_speed_hz ? : tspi->spi_max_frequency;
 785
 786        ret = pm_runtime_get_sync(tspi->dev);
 787        if (ret < 0) {
 788                dev_err(tspi->dev, "pm runtime failed, e = %d\n", ret);
 789                return ret;
 790        }
 791
 792        spin_lock_irqsave(&tspi->lock, flags);
 793        val = tspi->def_command1_reg;
 794        if (spi->mode & SPI_CS_HIGH)
 795                val &= ~cs_pol_bit[spi->chip_select];
 796        else
 797                val |= cs_pol_bit[spi->chip_select];
 798        tspi->def_command1_reg = val;
 799        tegra_spi_writel(tspi, tspi->def_command1_reg, SPI_COMMAND1);
 800        spin_unlock_irqrestore(&tspi->lock, flags);
 801
 802        pm_runtime_put(tspi->dev);
 803        return 0;
 804}
 805
 806static int tegra_spi_transfer_one_message(struct spi_master *master,
 807                        struct spi_message *msg)
 808{
 809        bool is_first_msg = true;
 810        int single_xfer;
 811        struct tegra_spi_data *tspi = spi_master_get_devdata(master);
 812        struct spi_transfer *xfer;
 813        struct spi_device *spi = msg->spi;
 814        int ret;
 815
 816        msg->status = 0;
 817        msg->actual_length = 0;
 818
 819        single_xfer = list_is_singular(&msg->transfers);
 820        list_for_each_entry(xfer, &msg->transfers, transfer_list) {
 821                INIT_COMPLETION(tspi->xfer_completion);
 822                ret = tegra_spi_start_transfer_one(spi, xfer,
 823                                        is_first_msg, single_xfer);
 824                if (ret < 0) {
 825                        dev_err(tspi->dev,
 826                                "spi can not start transfer, err %d\n", ret);
 827                        goto exit;
 828                }
 829                is_first_msg = false;
 830                ret = wait_for_completion_timeout(&tspi->xfer_completion,
 831                                                SPI_DMA_TIMEOUT);
 832                if (WARN_ON(ret == 0)) {
 833                        dev_err(tspi->dev,
 834                                "spi trasfer timeout, err %d\n", ret);
 835                        ret = -EIO;
 836                        goto exit;
 837                }
 838
 839                if (tspi->tx_status ||  tspi->rx_status) {
 840                        dev_err(tspi->dev, "Error in Transfer\n");
 841                        ret = -EIO;
 842                        goto exit;
 843                }
 844                msg->actual_length += xfer->len;
 845                if (xfer->cs_change && xfer->delay_usecs) {
 846                        tegra_spi_writel(tspi, tspi->def_command1_reg,
 847                                        SPI_COMMAND1);
 848                        udelay(xfer->delay_usecs);
 849                }
 850        }
 851        ret = 0;
 852exit:
 853        tegra_spi_writel(tspi, tspi->def_command1_reg, SPI_COMMAND1);
 854        msg->status = ret;
 855        spi_finalize_current_message(master);
 856        return ret;
 857}
 858
 859static irqreturn_t handle_cpu_based_xfer(struct tegra_spi_data *tspi)
 860{
 861        struct spi_transfer *t = tspi->curr_xfer;
 862        unsigned long flags;
 863
 864        spin_lock_irqsave(&tspi->lock, flags);
 865        if (tspi->tx_status ||  tspi->rx_status) {
 866                dev_err(tspi->dev, "CpuXfer ERROR bit set 0x%x\n",
 867                        tspi->status_reg);
 868                dev_err(tspi->dev, "CpuXfer 0x%08x:0x%08x\n",
 869                        tspi->command1_reg, tspi->dma_control_reg);
 870                tegra_periph_reset_assert(tspi->clk);
 871                udelay(2);
 872                tegra_periph_reset_deassert(tspi->clk);
 873                complete(&tspi->xfer_completion);
 874                goto exit;
 875        }
 876
 877        if (tspi->cur_direction & DATA_DIR_RX)
 878                tegra_spi_read_rx_fifo_to_client_rxbuf(tspi, t);
 879
 880        if (tspi->cur_direction & DATA_DIR_TX)
 881                tspi->cur_pos = tspi->cur_tx_pos;
 882        else
 883                tspi->cur_pos = tspi->cur_rx_pos;
 884
 885        if (tspi->cur_pos == t->len) {
 886                complete(&tspi->xfer_completion);
 887                goto exit;
 888        }
 889
 890        tegra_spi_calculate_curr_xfer_param(tspi->cur_spi, tspi, t);
 891        tegra_spi_start_cpu_based_transfer(tspi, t);
 892exit:
 893        spin_unlock_irqrestore(&tspi->lock, flags);
 894        return IRQ_HANDLED;
 895}
 896
 897static irqreturn_t handle_dma_based_xfer(struct tegra_spi_data *tspi)
 898{
 899        struct spi_transfer *t = tspi->curr_xfer;
 900        long wait_status;
 901        int err = 0;
 902        unsigned total_fifo_words;
 903        unsigned long flags;
 904
 905        /* Abort dmas if any error */
 906        if (tspi->cur_direction & DATA_DIR_TX) {
 907                if (tspi->tx_status) {
 908                        dmaengine_terminate_all(tspi->tx_dma_chan);
 909                        err += 1;
 910                } else {
 911                        wait_status = wait_for_completion_interruptible_timeout(
 912                                &tspi->tx_dma_complete, SPI_DMA_TIMEOUT);
 913                        if (wait_status <= 0) {
 914                                dmaengine_terminate_all(tspi->tx_dma_chan);
 915                                dev_err(tspi->dev, "TxDma Xfer failed\n");
 916                                err += 1;
 917                        }
 918                }
 919        }
 920
 921        if (tspi->cur_direction & DATA_DIR_RX) {
 922                if (tspi->rx_status) {
 923                        dmaengine_terminate_all(tspi->rx_dma_chan);
 924                        err += 2;
 925                } else {
 926                        wait_status = wait_for_completion_interruptible_timeout(
 927                                &tspi->rx_dma_complete, SPI_DMA_TIMEOUT);
 928                        if (wait_status <= 0) {
 929                                dmaengine_terminate_all(tspi->rx_dma_chan);
 930                                dev_err(tspi->dev, "RxDma Xfer failed\n");
 931                                err += 2;
 932                        }
 933                }
 934        }
 935
 936        spin_lock_irqsave(&tspi->lock, flags);
 937        if (err) {
 938                dev_err(tspi->dev, "DmaXfer: ERROR bit set 0x%x\n",
 939                        tspi->status_reg);
 940                dev_err(tspi->dev, "DmaXfer 0x%08x:0x%08x\n",
 941                        tspi->command1_reg, tspi->dma_control_reg);
 942                tegra_periph_reset_assert(tspi->clk);
 943                udelay(2);
 944                tegra_periph_reset_deassert(tspi->clk);
 945                complete(&tspi->xfer_completion);
 946                spin_unlock_irqrestore(&tspi->lock, flags);
 947                return IRQ_HANDLED;
 948        }
 949
 950        if (tspi->cur_direction & DATA_DIR_RX)
 951                tegra_spi_copy_spi_rxbuf_to_client_rxbuf(tspi, t);
 952
 953        if (tspi->cur_direction & DATA_DIR_TX)
 954                tspi->cur_pos = tspi->cur_tx_pos;
 955        else
 956                tspi->cur_pos = tspi->cur_rx_pos;
 957
 958        if (tspi->cur_pos == t->len) {
 959                complete(&tspi->xfer_completion);
 960                goto exit;
 961        }
 962
 963        /* Continue transfer in current message */
 964        total_fifo_words = tegra_spi_calculate_curr_xfer_param(tspi->cur_spi,
 965                                                        tspi, t);
 966        if (total_fifo_words > SPI_FIFO_DEPTH)
 967                err = tegra_spi_start_dma_based_transfer(tspi, t);
 968        else
 969                err = tegra_spi_start_cpu_based_transfer(tspi, t);
 970
 971exit:
 972        spin_unlock_irqrestore(&tspi->lock, flags);
 973        return IRQ_HANDLED;
 974}
 975
 976static irqreturn_t tegra_spi_isr_thread(int irq, void *context_data)
 977{
 978        struct tegra_spi_data *tspi = context_data;
 979
 980        if (!tspi->is_curr_dma_xfer)
 981                return handle_cpu_based_xfer(tspi);
 982        return handle_dma_based_xfer(tspi);
 983}
 984
 985static irqreturn_t tegra_spi_isr(int irq, void *context_data)
 986{
 987        struct tegra_spi_data *tspi = context_data;
 988
 989        tspi->status_reg = tegra_spi_readl(tspi, SPI_FIFO_STATUS);
 990        if (tspi->cur_direction & DATA_DIR_TX)
 991                tspi->tx_status = tspi->status_reg &
 992                                        (SPI_TX_FIFO_UNF | SPI_TX_FIFO_OVF);
 993
 994        if (tspi->cur_direction & DATA_DIR_RX)
 995                tspi->rx_status = tspi->status_reg &
 996                                        (SPI_RX_FIFO_OVF | SPI_RX_FIFO_UNF);
 997        tegra_spi_clear_status(tspi);
 998
 999        return IRQ_WAKE_THREAD;
1000}
1001
1002static void tegra_spi_parse_dt(struct platform_device *pdev,
1003        struct tegra_spi_data *tspi)
1004{
1005        struct device_node *np = pdev->dev.of_node;
1006        u32 of_dma[2];
1007
1008        if (of_property_read_u32_array(np, "nvidia,dma-request-selector",
1009                                of_dma, 2) >= 0)
1010                tspi->dma_req_sel = of_dma[1];
1011
1012        if (of_property_read_u32(np, "spi-max-frequency",
1013                                &tspi->spi_max_frequency))
1014                tspi->spi_max_frequency = 25000000; /* 25MHz */
1015}
1016
1017static struct of_device_id tegra_spi_of_match[] = {
1018        { .compatible = "nvidia,tegra114-spi", },
1019        {}
1020};
1021MODULE_DEVICE_TABLE(of, tegra_spi_of_match);
1022
1023static int tegra_spi_probe(struct platform_device *pdev)
1024{
1025        struct spi_master       *master;
1026        struct tegra_spi_data   *tspi;
1027        struct resource         *r;
1028        int ret, spi_irq;
1029
1030        master = spi_alloc_master(&pdev->dev, sizeof(*tspi));
1031        if (!master) {
1032                dev_err(&pdev->dev, "master allocation failed\n");
1033                return -ENOMEM;
1034        }
1035        platform_set_drvdata(pdev, master);
1036        tspi = spi_master_get_devdata(master);
1037
1038        /* Parse DT */
1039        tegra_spi_parse_dt(pdev, tspi);
1040
1041        /* the spi->mode bits understood by this driver: */
1042        master->mode_bits = SPI_CPOL | SPI_CPHA | SPI_CS_HIGH;
1043        master->setup = tegra_spi_setup;
1044        master->transfer_one_message = tegra_spi_transfer_one_message;
1045        master->num_chipselect = MAX_CHIP_SELECT;
1046        master->bus_num = -1;
1047        master->auto_runtime_pm = true;
1048
1049        tspi->master = master;
1050        tspi->dev = &pdev->dev;
1051        spin_lock_init(&tspi->lock);
1052
1053        r = platform_get_resource(pdev, IORESOURCE_MEM, 0);
1054        tspi->base = devm_ioremap_resource(&pdev->dev, r);
1055        if (IS_ERR(tspi->base)) {
1056                ret = PTR_ERR(tspi->base);
1057                goto exit_free_master;
1058        }
1059        tspi->phys = r->start;
1060
1061        spi_irq = platform_get_irq(pdev, 0);
1062        tspi->irq = spi_irq;
1063        ret = request_threaded_irq(tspi->irq, tegra_spi_isr,
1064                        tegra_spi_isr_thread, IRQF_ONESHOT,
1065                        dev_name(&pdev->dev), tspi);
1066        if (ret < 0) {
1067                dev_err(&pdev->dev, "Failed to register ISR for IRQ %d\n",
1068                                        tspi->irq);
1069                goto exit_free_master;
1070        }
1071
1072        tspi->clk = devm_clk_get(&pdev->dev, "spi");
1073        if (IS_ERR(tspi->clk)) {
1074                dev_err(&pdev->dev, "can not get clock\n");
1075                ret = PTR_ERR(tspi->clk);
1076                goto exit_free_irq;
1077        }
1078
1079        tspi->max_buf_size = SPI_FIFO_DEPTH << 2;
1080        tspi->dma_buf_size = DEFAULT_SPI_DMA_BUF_LEN;
1081
1082        if (tspi->dma_req_sel) {
1083                ret = tegra_spi_init_dma_param(tspi, true);
1084                if (ret < 0) {
1085                        dev_err(&pdev->dev, "RxDma Init failed, err %d\n", ret);
1086                        goto exit_free_irq;
1087                }
1088
1089                ret = tegra_spi_init_dma_param(tspi, false);
1090                if (ret < 0) {
1091                        dev_err(&pdev->dev, "TxDma Init failed, err %d\n", ret);
1092                        goto exit_rx_dma_free;
1093                }
1094                tspi->max_buf_size = tspi->dma_buf_size;
1095                init_completion(&tspi->tx_dma_complete);
1096                init_completion(&tspi->rx_dma_complete);
1097        }
1098
1099        init_completion(&tspi->xfer_completion);
1100
1101        pm_runtime_enable(&pdev->dev);
1102        if (!pm_runtime_enabled(&pdev->dev)) {
1103                ret = tegra_spi_runtime_resume(&pdev->dev);
1104                if (ret)
1105                        goto exit_pm_disable;
1106        }
1107
1108        ret = pm_runtime_get_sync(&pdev->dev);
1109        if (ret < 0) {
1110                dev_err(&pdev->dev, "pm runtime get failed, e = %d\n", ret);
1111                goto exit_pm_disable;
1112        }
1113        tspi->def_command1_reg  = SPI_M_S;
1114        tegra_spi_writel(tspi, tspi->def_command1_reg, SPI_COMMAND1);
1115        pm_runtime_put(&pdev->dev);
1116
1117        master->dev.of_node = pdev->dev.of_node;
1118        ret = spi_register_master(master);
1119        if (ret < 0) {
1120                dev_err(&pdev->dev, "can not register to master err %d\n", ret);
1121                goto exit_pm_disable;
1122        }
1123        return ret;
1124
1125exit_pm_disable:
1126        pm_runtime_disable(&pdev->dev);
1127        if (!pm_runtime_status_suspended(&pdev->dev))
1128                tegra_spi_runtime_suspend(&pdev->dev);
1129        tegra_spi_deinit_dma_param(tspi, false);
1130exit_rx_dma_free:
1131        tegra_spi_deinit_dma_param(tspi, true);
1132exit_free_irq:
1133        free_irq(spi_irq, tspi);
1134exit_free_master:
1135        spi_master_put(master);
1136        return ret;
1137}
1138
1139static int tegra_spi_remove(struct platform_device *pdev)
1140{
1141        struct spi_master *master = platform_get_drvdata(pdev);
1142        struct tegra_spi_data   *tspi = spi_master_get_devdata(master);
1143
1144        free_irq(tspi->irq, tspi);
1145        spi_unregister_master(master);
1146
1147        if (tspi->tx_dma_chan)
1148                tegra_spi_deinit_dma_param(tspi, false);
1149
1150        if (tspi->rx_dma_chan)
1151                tegra_spi_deinit_dma_param(tspi, true);
1152
1153        pm_runtime_disable(&pdev->dev);
1154        if (!pm_runtime_status_suspended(&pdev->dev))
1155                tegra_spi_runtime_suspend(&pdev->dev);
1156
1157        return 0;
1158}
1159
1160#ifdef CONFIG_PM_SLEEP
1161static int tegra_spi_suspend(struct device *dev)
1162{
1163        struct spi_master *master = dev_get_drvdata(dev);
1164
1165        return spi_master_suspend(master);
1166}
1167
1168static int tegra_spi_resume(struct device *dev)
1169{
1170        struct spi_master *master = dev_get_drvdata(dev);
1171        struct tegra_spi_data *tspi = spi_master_get_devdata(master);
1172        int ret;
1173
1174        ret = pm_runtime_get_sync(dev);
1175        if (ret < 0) {
1176                dev_err(dev, "pm runtime failed, e = %d\n", ret);
1177                return ret;
1178        }
1179        tegra_spi_writel(tspi, tspi->command1_reg, SPI_COMMAND1);
1180        pm_runtime_put(dev);
1181
1182        return spi_master_resume(master);
1183}
1184#endif
1185
1186static int tegra_spi_runtime_suspend(struct device *dev)
1187{
1188        struct spi_master *master = dev_get_drvdata(dev);
1189        struct tegra_spi_data *tspi = spi_master_get_devdata(master);
1190
1191        /* Flush all write which are in PPSB queue by reading back */
1192        tegra_spi_readl(tspi, SPI_COMMAND1);
1193
1194        clk_disable_unprepare(tspi->clk);
1195        return 0;
1196}
1197
1198static int tegra_spi_runtime_resume(struct device *dev)
1199{
1200        struct spi_master *master = dev_get_drvdata(dev);
1201        struct tegra_spi_data *tspi = spi_master_get_devdata(master);
1202        int ret;
1203
1204        ret = clk_prepare_enable(tspi->clk);
1205        if (ret < 0) {
1206                dev_err(tspi->dev, "clk_prepare failed: %d\n", ret);
1207                return ret;
1208        }
1209        return 0;
1210}
1211
1212static const struct dev_pm_ops tegra_spi_pm_ops = {
1213        SET_RUNTIME_PM_OPS(tegra_spi_runtime_suspend,
1214                tegra_spi_runtime_resume, NULL)
1215        SET_SYSTEM_SLEEP_PM_OPS(tegra_spi_suspend, tegra_spi_resume)
1216};
1217static struct platform_driver tegra_spi_driver = {
1218        .driver = {
1219                .name           = "spi-tegra114",
1220                .owner          = THIS_MODULE,
1221                .pm             = &tegra_spi_pm_ops,
1222                .of_match_table = tegra_spi_of_match,
1223        },
1224        .probe =        tegra_spi_probe,
1225        .remove =       tegra_spi_remove,
1226};
1227module_platform_driver(tegra_spi_driver);
1228
1229MODULE_ALIAS("platform:spi-tegra114");
1230MODULE_DESCRIPTION("NVIDIA Tegra114 SPI Controller Driver");
1231MODULE_AUTHOR("Laxman Dewangan <ldewangan@nvidia.com>");
1232MODULE_LICENSE("GPL v2");
1233
lxr.linux.no kindly hosted by Redpill Linpro AS, provider of Linux consulting and operations services since 1995.