David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 1 | // SPDX-License-Identifier: GPL-2.0-only |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2 | /* |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 3 | * Copyright (C) 2012 ARM Ltd. |
| 4 | * Author: Catalin Marinas <catalin.marinas@arm.com> |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 5 | */ |
| 6 | |
| 7 | #include <linux/gfp.h> |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 8 | #include <linux/cache.h> |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 9 | #include <linux/dma-noncoherent.h> |
| 10 | #include <linux/dma-iommu.h> |
| 11 | #include <xen/xen.h> |
| 12 | #include <xen/swiotlb-xen.h> |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 13 | |
| 14 | #include <asm/cacheflush.h> |
| 15 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 16 | void arch_sync_dma_for_device(struct device *dev, phys_addr_t paddr, |
| 17 | size_t size, enum dma_data_direction dir) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 18 | { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 19 | __dma_map_area(phys_to_virt(paddr), size, dir); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 20 | } |
| 21 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 22 | void arch_sync_dma_for_cpu(struct device *dev, phys_addr_t paddr, |
| 23 | size_t size, enum dma_data_direction dir) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 24 | { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 25 | __dma_unmap_area(phys_to_virt(paddr), size, dir); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 26 | } |
| 27 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 28 | void arch_dma_prep_coherent(struct page *page, size_t size) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 29 | { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 30 | __dma_flush_area(page_address(page), size); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 31 | } |
| 32 | |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 33 | #ifdef CONFIG_IOMMU_DMA |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 34 | void arch_teardown_dma_ops(struct device *dev) |
| 35 | { |
| 36 | dev->dma_ops = NULL; |
| 37 | } |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 38 | #endif |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 39 | |
| 40 | void arch_setup_dma_ops(struct device *dev, u64 dma_base, u64 size, |
| 41 | const struct iommu_ops *iommu, bool coherent) |
| 42 | { |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 43 | int cls = cache_line_size_of_cpu(); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 44 | |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 45 | WARN_TAINT(!coherent && cls > ARCH_DMA_MINALIGN, |
| 46 | TAINT_CPU_OUT_OF_SPEC, |
| 47 | "%s %s: ARCH_DMA_MINALIGN smaller than CTR_EL0.CWG (%d < %d)", |
| 48 | dev_driver_string(dev), dev_name(dev), |
| 49 | ARCH_DMA_MINALIGN, cls); |
| 50 | |
| 51 | dev->dma_coherent = coherent; |
| 52 | if (iommu) |
| 53 | iommu_setup_dma_ops(dev, dma_base, size); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 54 | |
| 55 | #ifdef CONFIG_XEN |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 56 | if (xen_initial_domain()) |
| 57 | dev->dma_ops = &xen_swiotlb_dma_ops; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 58 | #endif |
| 59 | } |