Revision d86a77f8 dma.h
b/dma.h | ||
---|---|---|
14 | 14 |
#include "hw/hw.h" |
15 | 15 |
#include "block.h" |
16 | 16 |
|
17 |
typedef struct DMAContext DMAContext; |
|
17 | 18 |
typedef struct ScatterGatherEntry ScatterGatherEntry; |
18 | 19 |
|
19 | 20 |
typedef enum { |
... | ... | |
34 | 35 |
#define DMA_ADDR_BITS TARGET_PHYS_ADDR_BITS |
35 | 36 |
#define DMA_ADDR_FMT TARGET_FMT_plx |
36 | 37 |
|
38 |
/* Checks that the given range of addresses is valid for DMA. This is |
|
39 |
* useful for certain cases, but usually you should just use |
|
40 |
* dma_memory_{read,write}() and check for errors */ |
|
41 |
static inline bool dma_memory_valid(DMAContext *dma, dma_addr_t addr, |
|
42 |
dma_addr_t len, DMADirection dir) |
|
43 |
{ |
|
44 |
/* Stub version, with no iommu we assume all bus addresses are valid */ |
|
45 |
return true; |
|
46 |
} |
|
47 |
|
|
48 |
static inline int dma_memory_rw(DMAContext *dma, dma_addr_t addr, |
|
49 |
void *buf, dma_addr_t len, DMADirection dir) |
|
50 |
{ |
|
51 |
/* Stub version when we have no iommu support */ |
|
52 |
cpu_physical_memory_rw(addr, buf, (target_phys_addr_t)len, |
|
53 |
dir == DMA_DIRECTION_FROM_DEVICE); |
|
54 |
return 0; |
|
55 |
} |
|
56 |
|
|
57 |
static inline int dma_memory_read(DMAContext *dma, dma_addr_t addr, |
|
58 |
void *buf, dma_addr_t len) |
|
59 |
{ |
|
60 |
return dma_memory_rw(dma, addr, buf, len, DMA_DIRECTION_TO_DEVICE); |
|
61 |
} |
|
62 |
|
|
63 |
static inline int dma_memory_write(DMAContext *dma, dma_addr_t addr, |
|
64 |
const void *buf, dma_addr_t len) |
|
65 |
{ |
|
66 |
return dma_memory_rw(dma, addr, (void *)buf, len, |
|
67 |
DMA_DIRECTION_FROM_DEVICE); |
|
68 |
} |
|
69 |
|
|
70 |
int dma_memory_set(DMAContext *dma, dma_addr_t addr, uint8_t c, dma_addr_t len); |
|
71 |
|
|
72 |
static inline void *dma_memory_map(DMAContext *dma, |
|
73 |
dma_addr_t addr, dma_addr_t *len, |
|
74 |
DMADirection dir) |
|
75 |
{ |
|
76 |
target_phys_addr_t xlen = *len; |
|
77 |
void *p; |
|
78 |
|
|
79 |
p = cpu_physical_memory_map(addr, &xlen, |
|
80 |
dir == DMA_DIRECTION_FROM_DEVICE); |
|
81 |
*len = xlen; |
|
82 |
return p; |
|
83 |
} |
|
84 |
|
|
85 |
static inline void dma_memory_unmap(DMAContext *dma, |
|
86 |
void *buffer, dma_addr_t len, |
|
87 |
DMADirection dir, dma_addr_t access_len) |
|
88 |
{ |
|
89 |
return cpu_physical_memory_unmap(buffer, (target_phys_addr_t)len, |
|
90 |
dir == DMA_DIRECTION_FROM_DEVICE, |
|
91 |
access_len); |
|
92 |
} |
|
93 |
|
|
94 |
#define DEFINE_LDST_DMA(_lname, _sname, _bits, _end) \ |
|
95 |
static inline uint##_bits##_t ld##_lname##_##_end##_dma(DMAContext *dma, \ |
|
96 |
dma_addr_t addr) \ |
|
97 |
{ \ |
|
98 |
uint##_bits##_t val; \ |
|
99 |
dma_memory_read(dma, addr, &val, (_bits) / 8); \ |
|
100 |
return _end##_bits##_to_cpu(val); \ |
|
101 |
} \ |
|
102 |
static inline void st##_sname##_##_end##_dma(DMAContext *dma, \ |
|
103 |
dma_addr_t addr, \ |
|
104 |
uint##_bits##_t val) \ |
|
105 |
{ \ |
|
106 |
val = cpu_to_##_end##_bits(val); \ |
|
107 |
dma_memory_write(dma, addr, &val, (_bits) / 8); \ |
|
108 |
} |
|
109 |
|
|
110 |
static inline uint8_t ldub_dma(DMAContext *dma, dma_addr_t addr) |
|
111 |
{ |
|
112 |
uint8_t val; |
|
113 |
|
|
114 |
dma_memory_read(dma, addr, &val, 1); |
|
115 |
return val; |
|
116 |
} |
|
117 |
|
|
118 |
static inline void stb_dma(DMAContext *dma, dma_addr_t addr, uint8_t val) |
|
119 |
{ |
|
120 |
dma_memory_write(dma, addr, &val, 1); |
|
121 |
} |
|
122 |
|
|
123 |
DEFINE_LDST_DMA(uw, w, 16, le); |
|
124 |
DEFINE_LDST_DMA(l, l, 32, le); |
|
125 |
DEFINE_LDST_DMA(q, q, 64, le); |
|
126 |
DEFINE_LDST_DMA(uw, w, 16, be); |
|
127 |
DEFINE_LDST_DMA(l, l, 32, be); |
|
128 |
DEFINE_LDST_DMA(q, q, 64, be); |
|
129 |
|
|
130 |
#undef DEFINE_LDST_DMA |
|
131 |
|
|
37 | 132 |
struct ScatterGatherEntry { |
38 | 133 |
dma_addr_t base; |
39 | 134 |
dma_addr_t len; |
Also available in: Unified diff