2020-04-25 19:59:48 +00:00
|
|
|
// SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
|
|
|
|
//
|
|
|
|
// AMD SPI controller driver
|
|
|
|
//
|
|
|
|
// Copyright (c) 2020, Advanced Micro Devices, Inc.
|
|
|
|
//
|
|
|
|
// Author: Sanjay R Mehta <sanju.mehta@amd.com>
|
|
|
|
|
|
|
|
#include <linux/acpi.h>
|
2024-09-25 13:36:37 +00:00
|
|
|
#include <linux/delay.h>
|
2024-09-25 13:36:44 +00:00
|
|
|
#include <linux/dma-mapping.h>
|
2020-04-25 19:59:48 +00:00
|
|
|
#include <linux/init.h>
|
2024-09-25 13:36:41 +00:00
|
|
|
#include <linux/io-64-nonatomic-lo-hi.h>
|
2024-09-25 13:36:37 +00:00
|
|
|
#include <linux/iopoll.h>
|
2020-04-25 19:59:48 +00:00
|
|
|
#include <linux/module.h>
|
|
|
|
#include <linux/platform_device.h>
|
|
|
|
#include <linux/spi/spi.h>
|
2024-02-29 13:45:44 +00:00
|
|
|
#include <linux/spi/spi-mem.h>
|
2020-04-25 19:59:48 +00:00
|
|
|
|
|
|
|
#define AMD_SPI_CTRL0_REG 0x00
|
|
|
|
#define AMD_SPI_EXEC_CMD BIT(16)
|
|
|
|
#define AMD_SPI_FIFO_CLEAR BIT(20)
|
|
|
|
#define AMD_SPI_BUSY BIT(31)
|
|
|
|
|
2022-02-11 14:31:55 +00:00
|
|
|
#define AMD_SPI_OPCODE_REG 0x45
|
|
|
|
#define AMD_SPI_CMD_TRIGGER_REG 0x47
|
|
|
|
#define AMD_SPI_TRIGGER_CMD BIT(7)
|
|
|
|
|
2020-04-25 19:59:48 +00:00
|
|
|
#define AMD_SPI_OPCODE_MASK 0xFF
|
|
|
|
|
|
|
|
#define AMD_SPI_ALT_CS_REG 0x1D
|
|
|
|
#define AMD_SPI_ALT_CS_MASK 0x3
|
|
|
|
|
|
|
|
#define AMD_SPI_FIFO_BASE 0x80
|
|
|
|
#define AMD_SPI_TX_COUNT_REG 0x48
|
|
|
|
#define AMD_SPI_RX_COUNT_REG 0x4B
|
|
|
|
#define AMD_SPI_STATUS_REG 0x4C
|
2024-09-25 13:36:43 +00:00
|
|
|
#define AMD_SPI_ADDR32CTRL_REG 0x50
|
2020-04-25 19:59:48 +00:00
|
|
|
|
2022-07-06 10:06:22 +00:00
|
|
|
#define AMD_SPI_FIFO_SIZE 70
|
2020-04-25 19:59:48 +00:00
|
|
|
#define AMD_SPI_MEM_SIZE 200
|
2024-02-29 13:45:44 +00:00
|
|
|
#define AMD_SPI_MAX_DATA 64
|
2024-09-25 13:36:42 +00:00
|
|
|
#define AMD_SPI_HID2_DMA_SIZE 4096
|
2020-04-25 19:59:48 +00:00
|
|
|
|
2022-08-25 14:31:32 +00:00
|
|
|
#define AMD_SPI_ENA_REG 0x20
|
|
|
|
#define AMD_SPI_ALT_SPD_SHIFT 20
|
|
|
|
#define AMD_SPI_ALT_SPD_MASK GENMASK(23, AMD_SPI_ALT_SPD_SHIFT)
|
|
|
|
#define AMD_SPI_SPI100_SHIFT 0
|
|
|
|
#define AMD_SPI_SPI100_MASK GENMASK(AMD_SPI_SPI100_SHIFT, AMD_SPI_SPI100_SHIFT)
|
|
|
|
#define AMD_SPI_SPEED_REG 0x6C
|
|
|
|
#define AMD_SPI_SPD7_SHIFT 8
|
|
|
|
#define AMD_SPI_SPD7_MASK GENMASK(13, AMD_SPI_SPD7_SHIFT)
|
|
|
|
|
2024-09-25 13:36:44 +00:00
|
|
|
#define AMD_SPI_HID2_INPUT_RING_BUF0 0X100
|
|
|
|
#define AMD_SPI_HID2_CNTRL 0x150
|
|
|
|
#define AMD_SPI_HID2_INT_STATUS 0x154
|
|
|
|
#define AMD_SPI_HID2_CMD_START 0x156
|
|
|
|
#define AMD_SPI_HID2_INT_MASK 0x158
|
|
|
|
#define AMD_SPI_HID2_READ_CNTRL0 0x170
|
|
|
|
#define AMD_SPI_HID2_READ_CNTRL1 0x174
|
|
|
|
#define AMD_SPI_HID2_READ_CNTRL2 0x180
|
|
|
|
|
2022-08-25 14:31:32 +00:00
|
|
|
#define AMD_SPI_MAX_HZ 100000000
|
|
|
|
#define AMD_SPI_MIN_HZ 800000
|
|
|
|
|
2024-09-25 13:36:44 +00:00
|
|
|
#define AMD_SPI_IO_SLEEP_US 20
|
|
|
|
#define AMD_SPI_IO_TIMEOUT_US 2000000
|
|
|
|
|
2024-09-25 13:36:38 +00:00
|
|
|
/* SPI read command opcodes */
|
|
|
|
#define AMD_SPI_OP_READ 0x03 /* Read data bytes (low frequency) */
|
|
|
|
#define AMD_SPI_OP_READ_FAST 0x0b /* Read data bytes (high frequency) */
|
|
|
|
#define AMD_SPI_OP_READ_1_1_2 0x3b /* Read data bytes (Dual Output SPI) */
|
|
|
|
#define AMD_SPI_OP_READ_1_2_2 0xbb /* Read data bytes (Dual I/O SPI) */
|
|
|
|
#define AMD_SPI_OP_READ_1_1_4 0x6b /* Read data bytes (Quad Output SPI) */
|
|
|
|
#define AMD_SPI_OP_READ_1_4_4 0xeb /* Read data bytes (Quad I/O SPI) */
|
|
|
|
|
|
|
|
/* SPI read command opcodes - 4B address */
|
|
|
|
#define AMD_SPI_OP_READ_FAST_4B 0x0c /* Read data bytes (high frequency) */
|
|
|
|
#define AMD_SPI_OP_READ_1_1_2_4B 0x3c /* Read data bytes (Dual Output SPI) */
|
|
|
|
#define AMD_SPI_OP_READ_1_2_2_4B 0xbc /* Read data bytes (Dual I/O SPI) */
|
|
|
|
#define AMD_SPI_OP_READ_1_1_4_4B 0x6c /* Read data bytes (Quad Output SPI) */
|
|
|
|
#define AMD_SPI_OP_READ_1_4_4_4B 0xec /* Read data bytes (Quad I/O SPI) */
|
|
|
|
|
2022-07-06 10:06:26 +00:00
|
|
|
/**
|
|
|
|
* enum amd_spi_versions - SPI controller versions
|
|
|
|
* @AMD_SPI_V1: AMDI0061 hardware version
|
|
|
|
* @AMD_SPI_V2: AMDI0062 hardware version
|
2024-09-25 13:36:42 +00:00
|
|
|
* @AMD_HID2_SPI: AMDI0063 hardware version
|
2022-07-06 10:06:26 +00:00
|
|
|
*/
|
2022-02-11 14:31:55 +00:00
|
|
|
enum amd_spi_versions {
|
2022-07-06 10:06:26 +00:00
|
|
|
AMD_SPI_V1 = 1,
|
|
|
|
AMD_SPI_V2,
|
2024-09-25 13:36:42 +00:00
|
|
|
AMD_HID2_SPI,
|
2022-02-11 14:31:55 +00:00
|
|
|
};
|
|
|
|
|
2022-08-25 14:31:32 +00:00
|
|
|
enum amd_spi_speed {
|
|
|
|
F_66_66MHz,
|
|
|
|
F_33_33MHz,
|
|
|
|
F_22_22MHz,
|
|
|
|
F_16_66MHz,
|
|
|
|
F_100MHz,
|
|
|
|
F_800KHz,
|
2022-11-04 10:06:37 +00:00
|
|
|
SPI_SPD7 = 0x7,
|
2022-08-25 14:31:32 +00:00
|
|
|
F_50MHz = 0x4,
|
|
|
|
F_4MHz = 0x32,
|
|
|
|
F_3_17MHz = 0x3F
|
|
|
|
};
|
|
|
|
|
|
|
|
/**
|
|
|
|
* struct amd_spi_freq - Matches device speed with values to write in regs
|
|
|
|
* @speed_hz: Device frequency
|
|
|
|
* @enable_val: Value to be written to "enable register"
|
|
|
|
* @spd7_val: Some frequencies requires to have a value written at SPISPEED register
|
|
|
|
*/
|
|
|
|
struct amd_spi_freq {
|
|
|
|
u32 speed_hz;
|
|
|
|
u32 enable_val;
|
|
|
|
u32 spd7_val;
|
|
|
|
};
|
|
|
|
|
2022-07-06 10:06:26 +00:00
|
|
|
/**
|
|
|
|
* struct amd_spi - SPI driver instance
|
|
|
|
* @io_remap_addr: Start address of the SPI controller registers
|
2024-09-25 13:36:44 +00:00
|
|
|
* @phy_dma_buf: Physical address of DMA buffer
|
|
|
|
* @dma_virt_addr: Virtual address of DMA buffer
|
2022-07-06 10:06:26 +00:00
|
|
|
* @version: SPI controller hardware version
|
2022-08-25 14:31:32 +00:00
|
|
|
* @speed_hz: Device frequency
|
2022-07-06 10:06:26 +00:00
|
|
|
*/
|
2020-04-25 19:59:48 +00:00
|
|
|
struct amd_spi {
|
|
|
|
void __iomem *io_remap_addr;
|
2024-09-25 13:36:44 +00:00
|
|
|
dma_addr_t phy_dma_buf;
|
|
|
|
void *dma_virt_addr;
|
2022-02-11 14:31:55 +00:00
|
|
|
enum amd_spi_versions version;
|
2022-08-25 14:31:32 +00:00
|
|
|
unsigned int speed_hz;
|
2020-04-25 19:59:48 +00:00
|
|
|
};
|
|
|
|
|
2021-09-10 11:15:26 +00:00
|
|
|
static inline u8 amd_spi_readreg8(struct amd_spi *amd_spi, int idx)
|
2020-04-25 19:59:48 +00:00
|
|
|
{
|
2024-09-25 13:36:39 +00:00
|
|
|
return readb((u8 __iomem *)amd_spi->io_remap_addr + idx);
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2021-09-10 11:15:26 +00:00
|
|
|
static inline void amd_spi_writereg8(struct amd_spi *amd_spi, int idx, u8 val)
|
2020-04-25 19:59:48 +00:00
|
|
|
{
|
2024-09-25 13:36:39 +00:00
|
|
|
writeb(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2021-09-10 11:15:26 +00:00
|
|
|
static void amd_spi_setclear_reg8(struct amd_spi *amd_spi, int idx, u8 set, u8 clear)
|
2020-04-25 19:59:48 +00:00
|
|
|
{
|
2021-09-10 11:15:26 +00:00
|
|
|
u8 tmp = amd_spi_readreg8(amd_spi, idx);
|
2020-04-25 19:59:48 +00:00
|
|
|
|
|
|
|
tmp = (tmp & ~clear) | set;
|
2021-09-10 11:15:26 +00:00
|
|
|
amd_spi_writereg8(amd_spi, idx, tmp);
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2024-09-25 13:36:44 +00:00
|
|
|
static inline u16 amd_spi_readreg16(struct amd_spi *amd_spi, int idx)
|
|
|
|
{
|
|
|
|
return readw((u8 __iomem *)amd_spi->io_remap_addr + idx);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void amd_spi_writereg16(struct amd_spi *amd_spi, int idx, u16 val)
|
|
|
|
{
|
|
|
|
writew(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
|
|
|
|
}
|
|
|
|
|
2021-09-10 11:15:26 +00:00
|
|
|
static inline u32 amd_spi_readreg32(struct amd_spi *amd_spi, int idx)
|
2020-04-25 19:59:48 +00:00
|
|
|
{
|
2024-09-25 13:36:39 +00:00
|
|
|
return readl((u8 __iomem *)amd_spi->io_remap_addr + idx);
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2021-09-10 11:15:26 +00:00
|
|
|
static inline void amd_spi_writereg32(struct amd_spi *amd_spi, int idx, u32 val)
|
2020-04-25 19:59:48 +00:00
|
|
|
{
|
2024-09-25 13:36:39 +00:00
|
|
|
writel(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2024-09-25 13:36:41 +00:00
|
|
|
static inline u64 amd_spi_readreg64(struct amd_spi *amd_spi, int idx)
|
|
|
|
{
|
|
|
|
return readq((u8 __iomem *)amd_spi->io_remap_addr + idx);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void amd_spi_writereg64(struct amd_spi *amd_spi, int idx, u64 val)
|
|
|
|
{
|
|
|
|
writeq(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
|
|
|
|
}
|
|
|
|
|
2021-09-10 11:15:26 +00:00
|
|
|
static inline void amd_spi_setclear_reg32(struct amd_spi *amd_spi, int idx, u32 set, u32 clear)
|
2020-04-25 19:59:48 +00:00
|
|
|
{
|
2021-09-10 11:15:26 +00:00
|
|
|
u32 tmp = amd_spi_readreg32(amd_spi, idx);
|
2020-04-25 19:59:48 +00:00
|
|
|
|
|
|
|
tmp = (tmp & ~clear) | set;
|
2021-09-10 11:15:26 +00:00
|
|
|
amd_spi_writereg32(amd_spi, idx, tmp);
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2021-09-10 11:15:28 +00:00
|
|
|
static void amd_spi_select_chip(struct amd_spi *amd_spi, u8 cs)
|
2020-04-25 19:59:48 +00:00
|
|
|
{
|
2021-09-10 11:15:28 +00:00
|
|
|
amd_spi_setclear_reg8(amd_spi, AMD_SPI_ALT_CS_REG, cs, AMD_SPI_ALT_CS_MASK);
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2022-02-11 14:31:55 +00:00
|
|
|
static inline void amd_spi_clear_chip(struct amd_spi *amd_spi, u8 chip_select)
|
|
|
|
{
|
|
|
|
amd_spi_writereg8(amd_spi, AMD_SPI_ALT_CS_REG, chip_select & ~AMD_SPI_ALT_CS_MASK);
|
|
|
|
}
|
|
|
|
|
2021-09-10 11:15:26 +00:00
|
|
|
static void amd_spi_clear_fifo_ptr(struct amd_spi *amd_spi)
|
2020-04-25 19:59:48 +00:00
|
|
|
{
|
2021-09-10 11:15:26 +00:00
|
|
|
amd_spi_setclear_reg32(amd_spi, AMD_SPI_CTRL0_REG, AMD_SPI_FIFO_CLEAR, AMD_SPI_FIFO_CLEAR);
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2022-02-11 14:31:55 +00:00
|
|
|
static int amd_spi_set_opcode(struct amd_spi *amd_spi, u8 cmd_opcode)
|
2020-04-25 19:59:48 +00:00
|
|
|
{
|
2022-02-11 14:31:55 +00:00
|
|
|
switch (amd_spi->version) {
|
|
|
|
case AMD_SPI_V1:
|
|
|
|
amd_spi_setclear_reg32(amd_spi, AMD_SPI_CTRL0_REG, cmd_opcode,
|
|
|
|
AMD_SPI_OPCODE_MASK);
|
|
|
|
return 0;
|
|
|
|
case AMD_SPI_V2:
|
2024-09-25 13:36:42 +00:00
|
|
|
case AMD_HID2_SPI:
|
2022-02-11 14:31:55 +00:00
|
|
|
amd_spi_writereg8(amd_spi, AMD_SPI_OPCODE_REG, cmd_opcode);
|
|
|
|
return 0;
|
|
|
|
default:
|
|
|
|
return -ENODEV;
|
|
|
|
}
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2021-09-10 11:15:26 +00:00
|
|
|
static inline void amd_spi_set_rx_count(struct amd_spi *amd_spi, u8 rx_count)
|
2020-04-25 19:59:48 +00:00
|
|
|
{
|
2024-09-25 13:36:40 +00:00
|
|
|
amd_spi_writereg8(amd_spi, AMD_SPI_RX_COUNT_REG, rx_count);
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2021-09-10 11:15:26 +00:00
|
|
|
static inline void amd_spi_set_tx_count(struct amd_spi *amd_spi, u8 tx_count)
|
2020-04-25 19:59:48 +00:00
|
|
|
{
|
2024-09-25 13:36:40 +00:00
|
|
|
amd_spi_writereg8(amd_spi, AMD_SPI_TX_COUNT_REG, tx_count);
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2021-09-10 11:15:27 +00:00
|
|
|
static int amd_spi_busy_wait(struct amd_spi *amd_spi)
|
2020-04-25 19:59:48 +00:00
|
|
|
{
|
2022-02-11 14:31:53 +00:00
|
|
|
u32 val;
|
2022-02-11 14:31:55 +00:00
|
|
|
int reg;
|
|
|
|
|
|
|
|
switch (amd_spi->version) {
|
|
|
|
case AMD_SPI_V1:
|
|
|
|
reg = AMD_SPI_CTRL0_REG;
|
|
|
|
break;
|
|
|
|
case AMD_SPI_V2:
|
2024-09-25 13:36:42 +00:00
|
|
|
case AMD_HID2_SPI:
|
2022-02-11 14:31:55 +00:00
|
|
|
reg = AMD_SPI_STATUS_REG;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
return -ENODEV;
|
|
|
|
}
|
2020-04-25 19:59:48 +00:00
|
|
|
|
2022-02-11 14:31:55 +00:00
|
|
|
return readl_poll_timeout(amd_spi->io_remap_addr + reg, val,
|
|
|
|
!(val & AMD_SPI_BUSY), 20, 2000000);
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2021-09-10 11:15:29 +00:00
|
|
|
static int amd_spi_execute_opcode(struct amd_spi *amd_spi)
|
2020-04-25 19:59:48 +00:00
|
|
|
{
|
2021-09-10 11:15:29 +00:00
|
|
|
int ret;
|
|
|
|
|
|
|
|
ret = amd_spi_busy_wait(amd_spi);
|
|
|
|
if (ret)
|
|
|
|
return ret;
|
|
|
|
|
2022-02-11 14:31:55 +00:00
|
|
|
switch (amd_spi->version) {
|
|
|
|
case AMD_SPI_V1:
|
|
|
|
/* Set ExecuteOpCode bit in the CTRL0 register */
|
|
|
|
amd_spi_setclear_reg32(amd_spi, AMD_SPI_CTRL0_REG, AMD_SPI_EXEC_CMD,
|
|
|
|
AMD_SPI_EXEC_CMD);
|
|
|
|
return 0;
|
|
|
|
case AMD_SPI_V2:
|
2024-09-25 13:36:42 +00:00
|
|
|
case AMD_HID2_SPI:
|
2022-02-11 14:31:55 +00:00
|
|
|
/* Trigger the command execution */
|
|
|
|
amd_spi_setclear_reg8(amd_spi, AMD_SPI_CMD_TRIGGER_REG,
|
|
|
|
AMD_SPI_TRIGGER_CMD, AMD_SPI_TRIGGER_CMD);
|
|
|
|
return 0;
|
|
|
|
default:
|
|
|
|
return -ENODEV;
|
|
|
|
}
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2023-07-28 09:32:01 +00:00
|
|
|
static int amd_spi_host_setup(struct spi_device *spi)
|
2020-04-25 19:59:48 +00:00
|
|
|
{
|
2023-07-28 09:32:01 +00:00
|
|
|
struct amd_spi *amd_spi = spi_controller_get_devdata(spi->controller);
|
2020-04-25 19:59:48 +00:00
|
|
|
|
2021-09-10 11:15:26 +00:00
|
|
|
amd_spi_clear_fifo_ptr(amd_spi);
|
2020-04-25 19:59:48 +00:00
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2022-08-25 14:31:32 +00:00
|
|
|
static const struct amd_spi_freq amd_spi_freq[] = {
|
|
|
|
{ AMD_SPI_MAX_HZ, F_100MHz, 0},
|
|
|
|
{ 66660000, F_66_66MHz, 0},
|
|
|
|
{ 50000000, SPI_SPD7, F_50MHz},
|
|
|
|
{ 33330000, F_33_33MHz, 0},
|
|
|
|
{ 22220000, F_22_22MHz, 0},
|
|
|
|
{ 16660000, F_16_66MHz, 0},
|
|
|
|
{ 4000000, SPI_SPD7, F_4MHz},
|
|
|
|
{ 3170000, SPI_SPD7, F_3_17MHz},
|
|
|
|
{ AMD_SPI_MIN_HZ, F_800KHz, 0},
|
|
|
|
};
|
|
|
|
|
|
|
|
static int amd_set_spi_freq(struct amd_spi *amd_spi, u32 speed_hz)
|
|
|
|
{
|
|
|
|
unsigned int i, spd7_val, alt_spd;
|
|
|
|
|
|
|
|
if (speed_hz < AMD_SPI_MIN_HZ)
|
|
|
|
return -EINVAL;
|
|
|
|
|
|
|
|
for (i = 0; i < ARRAY_SIZE(amd_spi_freq); i++)
|
|
|
|
if (speed_hz >= amd_spi_freq[i].speed_hz)
|
|
|
|
break;
|
|
|
|
|
2022-08-30 09:36:07 +00:00
|
|
|
if (amd_spi->speed_hz == amd_spi_freq[i].speed_hz)
|
2022-08-25 14:31:32 +00:00
|
|
|
return 0;
|
|
|
|
|
|
|
|
amd_spi->speed_hz = amd_spi_freq[i].speed_hz;
|
|
|
|
|
|
|
|
alt_spd = (amd_spi_freq[i].enable_val << AMD_SPI_ALT_SPD_SHIFT)
|
|
|
|
& AMD_SPI_ALT_SPD_MASK;
|
|
|
|
amd_spi_setclear_reg32(amd_spi, AMD_SPI_ENA_REG, alt_spd,
|
|
|
|
AMD_SPI_ALT_SPD_MASK);
|
|
|
|
|
|
|
|
if (amd_spi->speed_hz == AMD_SPI_MAX_HZ)
|
|
|
|
amd_spi_setclear_reg32(amd_spi, AMD_SPI_ENA_REG, 1,
|
|
|
|
AMD_SPI_SPI100_MASK);
|
|
|
|
|
|
|
|
if (amd_spi_freq[i].spd7_val) {
|
|
|
|
spd7_val = (amd_spi_freq[i].spd7_val << AMD_SPI_SPD7_SHIFT)
|
|
|
|
& AMD_SPI_SPD7_MASK;
|
|
|
|
amd_spi_setclear_reg32(amd_spi, AMD_SPI_SPEED_REG, spd7_val,
|
|
|
|
AMD_SPI_SPD7_MASK);
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2020-04-25 19:59:48 +00:00
|
|
|
static inline int amd_spi_fifo_xfer(struct amd_spi *amd_spi,
|
2023-07-28 09:32:01 +00:00
|
|
|
struct spi_controller *host,
|
2020-04-25 19:59:48 +00:00
|
|
|
struct spi_message *message)
|
|
|
|
{
|
|
|
|
struct spi_transfer *xfer = NULL;
|
2022-08-25 14:31:32 +00:00
|
|
|
struct spi_device *spi = message->spi;
|
2022-08-18 01:00:59 +00:00
|
|
|
u8 cmd_opcode = 0, fifo_pos = AMD_SPI_FIFO_BASE;
|
2020-04-25 19:59:48 +00:00
|
|
|
u8 *buf = NULL;
|
|
|
|
u32 i = 0;
|
|
|
|
u32 tx_len = 0, rx_len = 0;
|
|
|
|
|
|
|
|
list_for_each_entry(xfer, &message->transfers,
|
|
|
|
transfer_list) {
|
2022-08-25 14:31:32 +00:00
|
|
|
if (xfer->speed_hz)
|
|
|
|
amd_set_spi_freq(amd_spi, xfer->speed_hz);
|
|
|
|
else
|
|
|
|
amd_set_spi_freq(amd_spi, spi->max_speed_hz);
|
|
|
|
|
2022-08-18 01:00:59 +00:00
|
|
|
if (xfer->tx_buf) {
|
2020-04-25 19:59:48 +00:00
|
|
|
buf = (u8 *)xfer->tx_buf;
|
2022-08-18 01:00:59 +00:00
|
|
|
if (!tx_len) {
|
|
|
|
cmd_opcode = *(u8 *)xfer->tx_buf;
|
|
|
|
buf++;
|
|
|
|
xfer->len--;
|
|
|
|
}
|
|
|
|
tx_len += xfer->len;
|
2020-04-25 19:59:48 +00:00
|
|
|
|
|
|
|
/* Write data into the FIFO. */
|
2022-08-18 01:00:59 +00:00
|
|
|
for (i = 0; i < xfer->len; i++)
|
|
|
|
amd_spi_writereg8(amd_spi, fifo_pos + i, buf[i]);
|
2020-04-25 19:59:48 +00:00
|
|
|
|
2022-08-18 01:00:59 +00:00
|
|
|
fifo_pos += xfer->len;
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
2022-08-18 01:00:59 +00:00
|
|
|
|
|
|
|
/* Store no. of bytes to be received from FIFO */
|
|
|
|
if (xfer->rx_buf)
|
|
|
|
rx_len += xfer->len;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!buf) {
|
|
|
|
message->status = -EINVAL;
|
|
|
|
goto fin_msg;
|
|
|
|
}
|
|
|
|
|
|
|
|
amd_spi_set_opcode(amd_spi, cmd_opcode);
|
|
|
|
amd_spi_set_tx_count(amd_spi, tx_len);
|
|
|
|
amd_spi_set_rx_count(amd_spi, rx_len);
|
|
|
|
|
|
|
|
/* Execute command */
|
|
|
|
message->status = amd_spi_execute_opcode(amd_spi);
|
|
|
|
if (message->status)
|
|
|
|
goto fin_msg;
|
|
|
|
|
|
|
|
if (rx_len) {
|
|
|
|
message->status = amd_spi_busy_wait(amd_spi);
|
|
|
|
if (message->status)
|
|
|
|
goto fin_msg;
|
|
|
|
|
|
|
|
list_for_each_entry(xfer, &message->transfers, transfer_list)
|
|
|
|
if (xfer->rx_buf) {
|
|
|
|
buf = (u8 *)xfer->rx_buf;
|
|
|
|
/* Read data from FIFO to receive buffer */
|
|
|
|
for (i = 0; i < xfer->len; i++)
|
|
|
|
buf[i] = amd_spi_readreg8(amd_spi, fifo_pos + i);
|
|
|
|
fifo_pos += xfer->len;
|
|
|
|
}
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Update statistics */
|
|
|
|
message->actual_length = tx_len + rx_len + 1;
|
2022-02-11 14:31:55 +00:00
|
|
|
|
2022-08-18 01:00:59 +00:00
|
|
|
fin_msg:
|
2022-02-11 14:31:55 +00:00
|
|
|
switch (amd_spi->version) {
|
|
|
|
case AMD_SPI_V1:
|
|
|
|
break;
|
|
|
|
case AMD_SPI_V2:
|
2024-09-25 13:36:42 +00:00
|
|
|
case AMD_HID2_SPI:
|
2023-03-10 17:32:03 +00:00
|
|
|
amd_spi_clear_chip(amd_spi, spi_get_chipselect(message->spi, 0));
|
2022-02-11 14:31:55 +00:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
return -ENODEV;
|
|
|
|
}
|
|
|
|
|
2023-07-28 09:32:01 +00:00
|
|
|
spi_finalize_current_message(host);
|
2020-04-25 19:59:48 +00:00
|
|
|
|
2022-08-18 01:00:59 +00:00
|
|
|
return message->status;
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2024-09-25 13:36:38 +00:00
|
|
|
static inline bool amd_is_spi_read_cmd_4b(const u16 op)
|
|
|
|
{
|
|
|
|
switch (op) {
|
|
|
|
case AMD_SPI_OP_READ_FAST_4B:
|
|
|
|
case AMD_SPI_OP_READ_1_1_2_4B:
|
|
|
|
case AMD_SPI_OP_READ_1_2_2_4B:
|
|
|
|
case AMD_SPI_OP_READ_1_1_4_4B:
|
|
|
|
case AMD_SPI_OP_READ_1_4_4_4B:
|
|
|
|
return true;
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline bool amd_is_spi_read_cmd(const u16 op)
|
|
|
|
{
|
|
|
|
switch (op) {
|
|
|
|
case AMD_SPI_OP_READ:
|
|
|
|
case AMD_SPI_OP_READ_FAST:
|
|
|
|
case AMD_SPI_OP_READ_1_1_2:
|
|
|
|
case AMD_SPI_OP_READ_1_2_2:
|
|
|
|
case AMD_SPI_OP_READ_1_1_4:
|
|
|
|
case AMD_SPI_OP_READ_1_4_4:
|
|
|
|
return true;
|
|
|
|
default:
|
|
|
|
return amd_is_spi_read_cmd_4b(op);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-02-29 13:45:44 +00:00
|
|
|
static bool amd_spi_supports_op(struct spi_mem *mem,
|
|
|
|
const struct spi_mem_op *op)
|
|
|
|
{
|
2024-09-25 13:36:42 +00:00
|
|
|
struct amd_spi *amd_spi = spi_controller_get_devdata(mem->spi->controller);
|
|
|
|
|
2024-02-29 13:45:44 +00:00
|
|
|
/* bus width is number of IO lines used to transmit */
|
2024-09-25 13:36:42 +00:00
|
|
|
if (op->cmd.buswidth > 1 || op->addr.buswidth > 4)
|
2024-02-29 13:45:44 +00:00
|
|
|
return false;
|
|
|
|
|
2024-09-25 13:36:38 +00:00
|
|
|
/* AMD SPI controllers support quad mode only for read operations */
|
|
|
|
if (amd_is_spi_read_cmd(op->cmd.opcode)) {
|
|
|
|
if (op->data.buswidth > 4)
|
|
|
|
return false;
|
2024-09-25 13:36:42 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
* HID2 SPI controller supports DMA read up to 4K bytes and
|
|
|
|
* doesn't support 4-byte address commands.
|
|
|
|
*/
|
|
|
|
if (amd_spi->version == AMD_HID2_SPI) {
|
|
|
|
if (amd_is_spi_read_cmd_4b(op->cmd.opcode) ||
|
|
|
|
op->data.nbytes > AMD_SPI_HID2_DMA_SIZE)
|
|
|
|
return false;
|
|
|
|
} else if (op->data.nbytes > AMD_SPI_MAX_DATA) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
} else if (op->data.buswidth > 1 || op->data.nbytes > AMD_SPI_MAX_DATA) {
|
2024-09-25 13:36:38 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2024-02-29 13:45:44 +00:00
|
|
|
return spi_mem_default_supports_op(mem, op);
|
|
|
|
}
|
|
|
|
|
|
|
|
static int amd_spi_adjust_op_size(struct spi_mem *mem, struct spi_mem_op *op)
|
|
|
|
{
|
2024-09-25 13:36:42 +00:00
|
|
|
struct amd_spi *amd_spi = spi_controller_get_devdata(mem->spi->controller);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* HID2 SPI controller DMA read mode supports reading up to 4k
|
|
|
|
* bytes in single transaction, where as SPI0 and HID2 SPI
|
|
|
|
* controller index mode supports maximum of 64 bytes in a single
|
|
|
|
* transaction.
|
|
|
|
*/
|
|
|
|
if (amd_spi->version == AMD_HID2_SPI && amd_is_spi_read_cmd(op->cmd.opcode))
|
|
|
|
op->data.nbytes = clamp_val(op->data.nbytes, 0, AMD_SPI_HID2_DMA_SIZE);
|
|
|
|
else
|
|
|
|
op->data.nbytes = clamp_val(op->data.nbytes, 0, AMD_SPI_MAX_DATA);
|
|
|
|
|
2024-02-29 13:45:44 +00:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void amd_spi_set_addr(struct amd_spi *amd_spi,
|
|
|
|
const struct spi_mem_op *op)
|
|
|
|
{
|
|
|
|
u8 nbytes = op->addr.nbytes;
|
|
|
|
u64 addr_val = op->addr.val;
|
|
|
|
int base_addr, i;
|
|
|
|
|
|
|
|
base_addr = AMD_SPI_FIFO_BASE + nbytes;
|
|
|
|
|
|
|
|
for (i = 0; i < nbytes; i++) {
|
|
|
|
amd_spi_writereg8(amd_spi, base_addr - i - 1, addr_val &
|
|
|
|
GENMASK(7, 0));
|
|
|
|
addr_val >>= 8;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void amd_spi_mem_data_out(struct amd_spi *amd_spi,
|
|
|
|
const struct spi_mem_op *op)
|
|
|
|
{
|
|
|
|
int base_addr = AMD_SPI_FIFO_BASE + op->addr.nbytes;
|
2024-09-25 13:36:41 +00:00
|
|
|
u64 *buf_64 = (u64 *)op->data.buf.out;
|
2024-02-29 13:45:44 +00:00
|
|
|
u32 nbytes = op->data.nbytes;
|
2024-09-25 13:36:41 +00:00
|
|
|
u32 left_data = nbytes;
|
|
|
|
u8 *buf;
|
2024-02-29 13:45:44 +00:00
|
|
|
int i;
|
|
|
|
|
|
|
|
amd_spi_set_opcode(amd_spi, op->cmd.opcode);
|
|
|
|
amd_spi_set_addr(amd_spi, op);
|
|
|
|
|
2024-09-25 13:36:41 +00:00
|
|
|
for (i = 0; left_data >= 8; i++, left_data -= 8)
|
|
|
|
amd_spi_writereg64(amd_spi, base_addr + op->dummy.nbytes + (i * 8), *buf_64++);
|
|
|
|
|
|
|
|
buf = (u8 *)buf_64;
|
|
|
|
for (i = 0; i < left_data; i++) {
|
|
|
|
amd_spi_writereg8(amd_spi, base_addr + op->dummy.nbytes + nbytes + i - left_data,
|
|
|
|
buf[i]);
|
|
|
|
}
|
2024-02-29 13:45:44 +00:00
|
|
|
|
|
|
|
amd_spi_set_tx_count(amd_spi, op->addr.nbytes + op->data.nbytes);
|
|
|
|
amd_spi_set_rx_count(amd_spi, 0);
|
|
|
|
amd_spi_clear_fifo_ptr(amd_spi);
|
|
|
|
amd_spi_execute_opcode(amd_spi);
|
|
|
|
}
|
|
|
|
|
2024-09-25 13:36:44 +00:00
|
|
|
static void amd_spi_hiddma_read(struct amd_spi *amd_spi, const struct spi_mem_op *op)
|
|
|
|
{
|
|
|
|
u16 hid_cmd_start, val;
|
|
|
|
u32 hid_regval;
|
|
|
|
|
|
|
|
/* Set the opcode in hid2_read_control0 register */
|
|
|
|
hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_READ_CNTRL0);
|
|
|
|
hid_regval = (hid_regval & ~GENMASK(7, 0)) | op->cmd.opcode;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Program the address in the hid2_read_control0 register [8:31]. The address should
|
|
|
|
* be written starting from the 8th bit of the register, requiring an 8-bit shift.
|
|
|
|
* Additionally, to convert a 2-byte spinand address to a 3-byte address, another
|
|
|
|
* 8-bit shift is needed. Therefore, a total shift of 16 bits is required.
|
|
|
|
*/
|
|
|
|
hid_regval = (hid_regval & ~GENMASK(31, 8)) | (op->addr.val << 16);
|
|
|
|
amd_spi_writereg32(amd_spi, AMD_SPI_HID2_READ_CNTRL0, hid_regval);
|
|
|
|
|
|
|
|
/* Configure dummy clock cycles for fast read, dual, quad I/O commands */
|
|
|
|
hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_READ_CNTRL2);
|
|
|
|
/* Fast read dummy cycle */
|
|
|
|
hid_regval &= ~GENMASK(4, 0);
|
|
|
|
|
|
|
|
/* Fast read Dual I/O dummy cycle */
|
|
|
|
hid_regval &= ~GENMASK(12, 8);
|
|
|
|
|
|
|
|
/* Fast read Quad I/O dummy cycle */
|
|
|
|
hid_regval = (hid_regval & ~GENMASK(20, 16)) | BIT(17);
|
|
|
|
|
|
|
|
/* Set no of preamble bytecount */
|
|
|
|
hid_regval &= ~GENMASK(27, 24);
|
|
|
|
amd_spi_writereg32(amd_spi, AMD_SPI_HID2_READ_CNTRL2, hid_regval);
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Program the HID2 Input Ring Buffer0. 4k aligned buf_memory_addr[31:12],
|
|
|
|
* buf_size[4:0], end_input_ring[5].
|
|
|
|
*/
|
|
|
|
hid_regval = amd_spi->phy_dma_buf | BIT(5) | BIT(0);
|
|
|
|
amd_spi_writereg32(amd_spi, AMD_SPI_HID2_INPUT_RING_BUF0, hid_regval);
|
|
|
|
|
|
|
|
/* Program max read length(no of DWs) in hid2_read_control1 register */
|
|
|
|
hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_READ_CNTRL1);
|
|
|
|
hid_regval = (hid_regval & ~GENMASK(15, 0)) | ((op->data.nbytes / 4) - 1);
|
|
|
|
amd_spi_writereg32(amd_spi, AMD_SPI_HID2_READ_CNTRL1, hid_regval);
|
|
|
|
|
|
|
|
/* Set cmd start bit in hid2_cmd_start register to trigger HID basic read operation */
|
|
|
|
hid_cmd_start = amd_spi_readreg16(amd_spi, AMD_SPI_HID2_CMD_START);
|
|
|
|
amd_spi_writereg16(amd_spi, AMD_SPI_HID2_CMD_START, (hid_cmd_start | BIT(3)));
|
|
|
|
|
|
|
|
/* Check interrupt status of HIDDMA basic read operation in hid2_int_status register */
|
|
|
|
readw_poll_timeout(amd_spi->io_remap_addr + AMD_SPI_HID2_INT_STATUS, val,
|
|
|
|
(val & BIT(3)), AMD_SPI_IO_SLEEP_US, AMD_SPI_IO_TIMEOUT_US);
|
|
|
|
|
|
|
|
/* Clear the interrupts by writing to hid2_int_status register */
|
|
|
|
val = amd_spi_readreg16(amd_spi, AMD_SPI_HID2_INT_STATUS);
|
|
|
|
amd_spi_writereg16(amd_spi, AMD_SPI_HID2_INT_STATUS, val);
|
|
|
|
}
|
|
|
|
|
2024-02-29 13:45:44 +00:00
|
|
|
static void amd_spi_mem_data_in(struct amd_spi *amd_spi,
|
|
|
|
const struct spi_mem_op *op)
|
|
|
|
{
|
2024-09-25 13:36:41 +00:00
|
|
|
int base_addr = AMD_SPI_FIFO_BASE + op->addr.nbytes;
|
|
|
|
u64 *buf_64 = (u64 *)op->data.buf.in;
|
2024-02-29 13:45:44 +00:00
|
|
|
u32 nbytes = op->data.nbytes;
|
2024-09-25 13:36:41 +00:00
|
|
|
u32 left_data = nbytes;
|
2024-09-25 13:36:44 +00:00
|
|
|
u32 data;
|
2024-09-25 13:36:41 +00:00
|
|
|
u8 *buf;
|
|
|
|
int i;
|
2024-02-29 13:45:44 +00:00
|
|
|
|
2024-09-25 13:36:44 +00:00
|
|
|
/*
|
|
|
|
* Condition for using HID read mode. Only for reading complete page data, use HID read.
|
|
|
|
* Use index mode otherwise.
|
|
|
|
*/
|
|
|
|
if (amd_spi->version == AMD_HID2_SPI && amd_is_spi_read_cmd(op->cmd.opcode)) {
|
|
|
|
amd_spi_hiddma_read(amd_spi, op);
|
|
|
|
|
|
|
|
for (i = 0; left_data >= 8; i++, left_data -= 8)
|
|
|
|
*buf_64++ = readq((u8 __iomem *)amd_spi->dma_virt_addr + (i * 8));
|
|
|
|
|
|
|
|
buf = (u8 *)buf_64;
|
|
|
|
for (i = 0; i < left_data; i++)
|
|
|
|
buf[i] = readb((u8 __iomem *)amd_spi->dma_virt_addr +
|
|
|
|
(nbytes - left_data + i));
|
|
|
|
|
|
|
|
/* Reset HID RX memory logic */
|
|
|
|
data = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_CNTRL);
|
|
|
|
amd_spi_writereg32(amd_spi, AMD_SPI_HID2_CNTRL, data | BIT(5));
|
|
|
|
} else {
|
|
|
|
/* Index mode */
|
|
|
|
amd_spi_set_opcode(amd_spi, op->cmd.opcode);
|
|
|
|
amd_spi_set_addr(amd_spi, op);
|
|
|
|
amd_spi_set_tx_count(amd_spi, op->addr.nbytes + op->dummy.nbytes);
|
|
|
|
|
|
|
|
for (i = 0; i < op->dummy.nbytes; i++)
|
|
|
|
amd_spi_writereg8(amd_spi, (base_addr + i), 0xff);
|
|
|
|
|
|
|
|
amd_spi_set_rx_count(amd_spi, op->data.nbytes);
|
|
|
|
amd_spi_clear_fifo_ptr(amd_spi);
|
|
|
|
amd_spi_execute_opcode(amd_spi);
|
|
|
|
amd_spi_busy_wait(amd_spi);
|
|
|
|
|
|
|
|
for (i = 0; left_data >= 8; i++, left_data -= 8)
|
|
|
|
*buf_64++ = amd_spi_readreg64(amd_spi, base_addr + op->dummy.nbytes +
|
|
|
|
(i * 8));
|
|
|
|
|
|
|
|
buf = (u8 *)buf_64;
|
|
|
|
for (i = 0; i < left_data; i++)
|
|
|
|
buf[i] = amd_spi_readreg8(amd_spi, base_addr + op->dummy.nbytes +
|
|
|
|
nbytes + i - left_data);
|
|
|
|
}
|
2024-09-25 13:36:41 +00:00
|
|
|
|
2024-02-29 13:45:44 +00:00
|
|
|
}
|
|
|
|
|
2024-09-25 13:36:43 +00:00
|
|
|
static void amd_set_spi_addr_mode(struct amd_spi *amd_spi,
|
|
|
|
const struct spi_mem_op *op)
|
|
|
|
{
|
|
|
|
u32 val = amd_spi_readreg32(amd_spi, AMD_SPI_ADDR32CTRL_REG);
|
|
|
|
|
|
|
|
if (amd_is_spi_read_cmd_4b(op->cmd.opcode))
|
|
|
|
amd_spi_writereg32(amd_spi, AMD_SPI_ADDR32CTRL_REG, val | BIT(0));
|
|
|
|
else
|
|
|
|
amd_spi_writereg32(amd_spi, AMD_SPI_ADDR32CTRL_REG, val & ~BIT(0));
|
|
|
|
}
|
|
|
|
|
2024-02-29 13:45:44 +00:00
|
|
|
static int amd_spi_exec_mem_op(struct spi_mem *mem,
|
|
|
|
const struct spi_mem_op *op)
|
|
|
|
{
|
|
|
|
struct amd_spi *amd_spi;
|
|
|
|
int ret;
|
|
|
|
|
|
|
|
amd_spi = spi_controller_get_devdata(mem->spi->controller);
|
|
|
|
|
|
|
|
ret = amd_set_spi_freq(amd_spi, mem->spi->max_speed_hz);
|
|
|
|
if (ret)
|
|
|
|
return ret;
|
|
|
|
|
2024-09-25 13:36:43 +00:00
|
|
|
if (amd_spi->version == AMD_SPI_V2)
|
|
|
|
amd_set_spi_addr_mode(amd_spi, op);
|
|
|
|
|
2024-02-29 13:45:44 +00:00
|
|
|
switch (op->data.dir) {
|
|
|
|
case SPI_MEM_DATA_IN:
|
|
|
|
amd_spi_mem_data_in(amd_spi, op);
|
|
|
|
break;
|
|
|
|
case SPI_MEM_DATA_OUT:
|
|
|
|
fallthrough;
|
|
|
|
case SPI_MEM_NO_DATA:
|
|
|
|
amd_spi_mem_data_out(amd_spi, op);
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
ret = -EOPNOTSUPP;
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static const struct spi_controller_mem_ops amd_spi_mem_ops = {
|
|
|
|
.exec_op = amd_spi_exec_mem_op,
|
|
|
|
.adjust_op_size = amd_spi_adjust_op_size,
|
|
|
|
.supports_op = amd_spi_supports_op,
|
|
|
|
};
|
|
|
|
|
2023-07-28 09:32:01 +00:00
|
|
|
static int amd_spi_host_transfer(struct spi_controller *host,
|
2020-04-25 19:59:48 +00:00
|
|
|
struct spi_message *msg)
|
|
|
|
{
|
2023-07-28 09:32:01 +00:00
|
|
|
struct amd_spi *amd_spi = spi_controller_get_devdata(host);
|
2020-04-25 19:59:48 +00:00
|
|
|
struct spi_device *spi = msg->spi;
|
|
|
|
|
2023-03-10 17:32:03 +00:00
|
|
|
amd_spi_select_chip(amd_spi, spi_get_chipselect(spi, 0));
|
2020-04-25 19:59:48 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
* Extract spi_transfers from the spi message and
|
|
|
|
* program the controller.
|
|
|
|
*/
|
2023-07-28 09:32:01 +00:00
|
|
|
return amd_spi_fifo_xfer(amd_spi, host, msg);
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2022-07-06 10:06:22 +00:00
|
|
|
static size_t amd_spi_max_transfer_size(struct spi_device *spi)
|
|
|
|
{
|
|
|
|
return AMD_SPI_FIFO_SIZE;
|
|
|
|
}
|
|
|
|
|
2024-09-25 13:36:44 +00:00
|
|
|
static int amd_spi_setup_hiddma(struct amd_spi *amd_spi, struct device *dev)
|
|
|
|
{
|
|
|
|
u32 hid_regval;
|
|
|
|
|
|
|
|
/* Allocate DMA buffer to use for HID basic read operation */
|
|
|
|
amd_spi->dma_virt_addr = dma_alloc_coherent(dev, AMD_SPI_HID2_DMA_SIZE,
|
|
|
|
&amd_spi->phy_dma_buf, GFP_KERNEL);
|
|
|
|
if (!amd_spi->dma_virt_addr)
|
|
|
|
return -ENOMEM;
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Enable interrupts and set mask bits in hid2_int_mask register to generate interrupt
|
|
|
|
* properly for HIDDMA basic read operations.
|
|
|
|
*/
|
|
|
|
hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_INT_MASK);
|
|
|
|
hid_regval = (hid_regval & GENMASK(31, 8)) | BIT(19);
|
|
|
|
amd_spi_writereg32(amd_spi, AMD_SPI_HID2_INT_MASK, hid_regval);
|
|
|
|
|
|
|
|
/* Configure buffer unit(4k) in hid2_control register */
|
|
|
|
hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_CNTRL);
|
|
|
|
amd_spi_writereg32(amd_spi, AMD_SPI_HID2_CNTRL, hid_regval & ~BIT(3));
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2020-04-25 19:59:48 +00:00
|
|
|
static int amd_spi_probe(struct platform_device *pdev)
|
|
|
|
{
|
|
|
|
struct device *dev = &pdev->dev;
|
2023-07-28 09:32:01 +00:00
|
|
|
struct spi_controller *host;
|
2020-04-25 19:59:48 +00:00
|
|
|
struct amd_spi *amd_spi;
|
2022-07-06 10:06:23 +00:00
|
|
|
int err;
|
2020-04-25 19:59:48 +00:00
|
|
|
|
2023-07-28 09:32:01 +00:00
|
|
|
/* Allocate storage for host and driver private data */
|
|
|
|
host = devm_spi_alloc_host(dev, sizeof(struct amd_spi));
|
|
|
|
if (!host)
|
|
|
|
return dev_err_probe(dev, -ENOMEM, "Error allocating SPI host\n");
|
2020-04-25 19:59:48 +00:00
|
|
|
|
2023-07-28 09:32:01 +00:00
|
|
|
amd_spi = spi_controller_get_devdata(host);
|
2020-11-21 03:43:51 +00:00
|
|
|
amd_spi->io_remap_addr = devm_platform_ioremap_resource(pdev, 0);
|
2022-07-06 10:06:24 +00:00
|
|
|
if (IS_ERR(amd_spi->io_remap_addr))
|
|
|
|
return dev_err_probe(dev, PTR_ERR(amd_spi->io_remap_addr),
|
|
|
|
"ioremap of SPI registers failed\n");
|
|
|
|
|
2020-04-25 19:59:48 +00:00
|
|
|
dev_dbg(dev, "io_remap_address: %p\n", amd_spi->io_remap_addr);
|
|
|
|
|
2023-08-10 09:12:47 +00:00
|
|
|
amd_spi->version = (uintptr_t) device_get_match_data(dev);
|
2022-02-11 14:31:55 +00:00
|
|
|
|
2023-07-28 09:32:01 +00:00
|
|
|
/* Initialize the spi_controller fields */
|
2024-09-25 13:36:42 +00:00
|
|
|
host->bus_num = (amd_spi->version == AMD_HID2_SPI) ? 2 : 0;
|
2023-07-28 09:32:01 +00:00
|
|
|
host->num_chipselect = 4;
|
2024-09-25 13:36:38 +00:00
|
|
|
host->mode_bits = SPI_TX_DUAL | SPI_TX_QUAD | SPI_RX_DUAL | SPI_RX_QUAD;
|
2023-07-28 09:32:01 +00:00
|
|
|
host->flags = SPI_CONTROLLER_HALF_DUPLEX;
|
|
|
|
host->max_speed_hz = AMD_SPI_MAX_HZ;
|
|
|
|
host->min_speed_hz = AMD_SPI_MIN_HZ;
|
|
|
|
host->setup = amd_spi_host_setup;
|
|
|
|
host->transfer_one_message = amd_spi_host_transfer;
|
2024-02-29 13:45:44 +00:00
|
|
|
host->mem_ops = &amd_spi_mem_ops;
|
2023-07-28 09:32:01 +00:00
|
|
|
host->max_transfer_size = amd_spi_max_transfer_size;
|
|
|
|
host->max_message_size = amd_spi_max_transfer_size;
|
2020-04-25 19:59:48 +00:00
|
|
|
|
|
|
|
/* Register the controller with SPI framework */
|
2023-07-28 09:32:01 +00:00
|
|
|
err = devm_spi_register_controller(dev, host);
|
2022-07-06 10:06:23 +00:00
|
|
|
if (err)
|
2022-07-06 10:06:24 +00:00
|
|
|
return dev_err_probe(dev, err, "error registering SPI controller\n");
|
2020-04-25 19:59:48 +00:00
|
|
|
|
2024-09-25 13:36:44 +00:00
|
|
|
if (amd_spi->version == AMD_HID2_SPI)
|
|
|
|
err = amd_spi_setup_hiddma(amd_spi, dev);
|
|
|
|
|
|
|
|
return err;
|
2020-04-25 19:59:48 +00:00
|
|
|
}
|
|
|
|
|
2020-07-17 13:54:24 +00:00
|
|
|
#ifdef CONFIG_ACPI
|
2020-04-25 19:59:48 +00:00
|
|
|
static const struct acpi_device_id spi_acpi_match[] = {
|
2022-02-11 14:31:55 +00:00
|
|
|
{ "AMDI0061", AMD_SPI_V1 },
|
|
|
|
{ "AMDI0062", AMD_SPI_V2 },
|
2024-09-25 13:36:42 +00:00
|
|
|
{ "AMDI0063", AMD_HID2_SPI },
|
2020-04-25 19:59:48 +00:00
|
|
|
{},
|
|
|
|
};
|
|
|
|
MODULE_DEVICE_TABLE(acpi, spi_acpi_match);
|
2020-07-17 13:54:24 +00:00
|
|
|
#endif
|
2020-04-25 19:59:48 +00:00
|
|
|
|
|
|
|
static struct platform_driver amd_spi_driver = {
|
|
|
|
.driver = {
|
|
|
|
.name = "amd_spi",
|
2022-02-16 16:27:19 +00:00
|
|
|
.acpi_match_table = ACPI_PTR(spi_acpi_match),
|
2020-04-25 19:59:48 +00:00
|
|
|
},
|
|
|
|
.probe = amd_spi_probe,
|
|
|
|
};
|
|
|
|
|
|
|
|
module_platform_driver(amd_spi_driver);
|
|
|
|
|
|
|
|
MODULE_LICENSE("Dual BSD/GPL");
|
|
|
|
MODULE_AUTHOR("Sanjay Mehta <sanju.mehta@amd.com>");
|
|
|
|
MODULE_DESCRIPTION("AMD SPI Master Controller Driver");
|