]>
Commit | Line | Data |
---|---|---|
07e2034d PF |
1 | /* |
2 | * ARM GICv3 support - internal interfaces | |
3 | * | |
4 | * Copyright (c) 2012 Linaro Limited | |
5 | * Copyright (c) 2015 Huawei. | |
6 | * Copyright (c) 2015 Samsung Electronics Co., Ltd. | |
7 | * Written by Peter Maydell | |
8 | * Reworked for GICv3 by Shlomo Pongratz and Pavel Fedin | |
9 | * | |
10 | * This program is free software; you can redistribute it and/or modify | |
11 | * it under the terms of the GNU General Public License as published by | |
12 | * the Free Software Foundation, either version 2 of the License, or | |
13 | * (at your option) any later version. | |
14 | * | |
15 | * This program is distributed in the hope that it will be useful, | |
16 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
17 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
18 | * GNU General Public License for more details. | |
19 | * | |
20 | * You should have received a copy of the GNU General Public License along | |
21 | * with this program; if not, see <http://www.gnu.org/licenses/>. | |
22 | */ | |
23 | ||
24 | #ifndef QEMU_ARM_GICV3_INTERNAL_H | |
25 | #define QEMU_ARM_GICV3_INTERNAL_H | |
26 | ||
27 | #include "hw/intc/arm_gicv3_common.h" | |
28 | ||
29 | /* Distributor registers, as offsets from the distributor base address */ | |
30 | #define GICD_CTLR 0x0000 | |
31 | #define GICD_TYPER 0x0004 | |
32 | #define GICD_IIDR 0x0008 | |
33 | #define GICD_STATUSR 0x0010 | |
34 | #define GICD_SETSPI_NSR 0x0040 | |
35 | #define GICD_CLRSPI_NSR 0x0048 | |
36 | #define GICD_SETSPI_SR 0x0050 | |
37 | #define GICD_CLRSPI_SR 0x0058 | |
38 | #define GICD_SEIR 0x0068 | |
39 | #define GICD_IGROUPR 0x0080 | |
40 | #define GICD_ISENABLER 0x0100 | |
41 | #define GICD_ICENABLER 0x0180 | |
42 | #define GICD_ISPENDR 0x0200 | |
43 | #define GICD_ICPENDR 0x0280 | |
44 | #define GICD_ISACTIVER 0x0300 | |
45 | #define GICD_ICACTIVER 0x0380 | |
46 | #define GICD_IPRIORITYR 0x0400 | |
47 | #define GICD_ITARGETSR 0x0800 | |
48 | #define GICD_ICFGR 0x0C00 | |
49 | #define GICD_IGRPMODR 0x0D00 | |
50 | #define GICD_NSACR 0x0E00 | |
51 | #define GICD_SGIR 0x0F00 | |
52 | #define GICD_CPENDSGIR 0x0F10 | |
53 | #define GICD_SPENDSGIR 0x0F20 | |
54 | #define GICD_IROUTER 0x6000 | |
55 | #define GICD_IDREGS 0xFFD0 | |
56 | ||
57 | /* GICD_CTLR fields */ | |
58 | #define GICD_CTLR_EN_GRP0 (1U << 0) | |
59 | #define GICD_CTLR_EN_GRP1NS (1U << 1) /* GICv3 5.3.20 */ | |
60 | #define GICD_CTLR_EN_GRP1S (1U << 2) | |
61 | #define GICD_CTLR_EN_GRP1_ALL (GICD_CTLR_EN_GRP1NS | GICD_CTLR_EN_GRP1S) | |
62 | /* Bit 4 is ARE if the system doesn't support TrustZone, ARE_S otherwise */ | |
63 | #define GICD_CTLR_ARE (1U << 4) | |
64 | #define GICD_CTLR_ARE_S (1U << 4) | |
65 | #define GICD_CTLR_ARE_NS (1U << 5) | |
66 | #define GICD_CTLR_DS (1U << 6) | |
67 | #define GICD_CTLR_E1NWF (1U << 7) | |
68 | #define GICD_CTLR_RWP (1U << 31) | |
69 | ||
70 | /* | |
71 | * Redistributor frame offsets from RD_base | |
72 | */ | |
73 | #define GICR_SGI_OFFSET 0x10000 | |
74 | ||
75 | /* | |
76 | * Redistributor registers, offsets from RD_base | |
77 | */ | |
78 | #define GICR_CTLR 0x0000 | |
79 | #define GICR_IIDR 0x0004 | |
80 | #define GICR_TYPER 0x0008 | |
81 | #define GICR_STATUSR 0x0010 | |
82 | #define GICR_WAKER 0x0014 | |
83 | #define GICR_SETLPIR 0x0040 | |
84 | #define GICR_CLRLPIR 0x0048 | |
85 | #define GICR_PROPBASER 0x0070 | |
86 | #define GICR_PENDBASER 0x0078 | |
87 | #define GICR_INVLPIR 0x00A0 | |
88 | #define GICR_INVALLR 0x00B0 | |
89 | #define GICR_SYNCR 0x00C0 | |
90 | #define GICR_IDREGS 0xFFD0 | |
91 | ||
92 | /* SGI and PPI Redistributor registers, offsets from RD_base */ | |
93 | #define GICR_IGROUPR0 (GICR_SGI_OFFSET + 0x0080) | |
94 | #define GICR_ISENABLER0 (GICR_SGI_OFFSET + 0x0100) | |
95 | #define GICR_ICENABLER0 (GICR_SGI_OFFSET + 0x0180) | |
96 | #define GICR_ISPENDR0 (GICR_SGI_OFFSET + 0x0200) | |
97 | #define GICR_ICPENDR0 (GICR_SGI_OFFSET + 0x0280) | |
98 | #define GICR_ISACTIVER0 (GICR_SGI_OFFSET + 0x0300) | |
99 | #define GICR_ICACTIVER0 (GICR_SGI_OFFSET + 0x0380) | |
100 | #define GICR_IPRIORITYR (GICR_SGI_OFFSET + 0x0400) | |
101 | #define GICR_ICFGR0 (GICR_SGI_OFFSET + 0x0C00) | |
102 | #define GICR_ICFGR1 (GICR_SGI_OFFSET + 0x0C04) | |
103 | #define GICR_IGRPMODR0 (GICR_SGI_OFFSET + 0x0D00) | |
104 | #define GICR_NSACR (GICR_SGI_OFFSET + 0x0E00) | |
105 | ||
106 | #define GICR_CTLR_ENABLE_LPIS (1U << 0) | |
107 | #define GICR_CTLR_RWP (1U << 3) | |
108 | #define GICR_CTLR_DPG0 (1U << 24) | |
109 | #define GICR_CTLR_DPG1NS (1U << 25) | |
110 | #define GICR_CTLR_DPG1S (1U << 26) | |
111 | #define GICR_CTLR_UWP (1U << 31) | |
112 | ||
113 | #define GICR_TYPER_PLPIS (1U << 0) | |
114 | #define GICR_TYPER_VLPIS (1U << 1) | |
115 | #define GICR_TYPER_DIRECTLPI (1U << 3) | |
116 | #define GICR_TYPER_LAST (1U << 4) | |
117 | #define GICR_TYPER_DPGS (1U << 5) | |
118 | #define GICR_TYPER_PROCNUM (0xFFFFU << 8) | |
119 | #define GICR_TYPER_COMMONLPIAFF (0x3 << 24) | |
120 | #define GICR_TYPER_AFFINITYVALUE (0xFFFFFFFFULL << 32) | |
121 | ||
122 | #define GICR_WAKER_ProcessorSleep (1U << 1) | |
123 | #define GICR_WAKER_ChildrenAsleep (1U << 2) | |
124 | ||
125 | #define GICR_PROPBASER_OUTER_CACHEABILITY_MASK (7ULL << 56) | |
126 | #define GICR_PROPBASER_ADDR_MASK (0xfffffffffULL << 12) | |
127 | #define GICR_PROPBASER_SHAREABILITY_MASK (3U << 10) | |
128 | #define GICR_PROPBASER_CACHEABILITY_MASK (7U << 7) | |
129 | #define GICR_PROPBASER_IDBITS_MASK (0x1f) | |
130 | ||
131 | #define GICR_PENDBASER_PTZ (1ULL << 62) | |
132 | #define GICR_PENDBASER_OUTER_CACHEABILITY_MASK (7ULL << 56) | |
133 | #define GICR_PENDBASER_ADDR_MASK (0xffffffffULL << 16) | |
134 | #define GICR_PENDBASER_SHAREABILITY_MASK (3U << 10) | |
135 | #define GICR_PENDBASER_CACHEABILITY_MASK (7U << 7) | |
136 | ||
137 | #define ICC_CTLR_EL1_CBPR (1U << 0) | |
138 | #define ICC_CTLR_EL1_EOIMODE (1U << 1) | |
139 | #define ICC_CTLR_EL1_PMHE (1U << 6) | |
140 | #define ICC_CTLR_EL1_PRIBITS_SHIFT 8 | |
367b9f52 | 141 | #define ICC_CTLR_EL1_PRIBITS_MASK (7U << ICC_CTLR_EL1_PRIBITS_SHIFT) |
07e2034d PF |
142 | #define ICC_CTLR_EL1_IDBITS_SHIFT 11 |
143 | #define ICC_CTLR_EL1_SEIS (1U << 14) | |
144 | #define ICC_CTLR_EL1_A3V (1U << 15) | |
145 | ||
146 | #define ICC_PMR_PRIORITY_MASK 0xff | |
147 | #define ICC_BPR_BINARYPOINT_MASK 0x07 | |
148 | #define ICC_IGRPEN_ENABLE 0x01 | |
149 | ||
150 | #define ICC_CTLR_EL3_CBPR_EL1S (1U << 0) | |
151 | #define ICC_CTLR_EL3_CBPR_EL1NS (1U << 1) | |
152 | #define ICC_CTLR_EL3_EOIMODE_EL3 (1U << 2) | |
153 | #define ICC_CTLR_EL3_EOIMODE_EL1S (1U << 3) | |
154 | #define ICC_CTLR_EL3_EOIMODE_EL1NS (1U << 4) | |
155 | #define ICC_CTLR_EL3_RM (1U << 5) | |
156 | #define ICC_CTLR_EL3_PMHE (1U << 6) | |
157 | #define ICC_CTLR_EL3_PRIBITS_SHIFT 8 | |
158 | #define ICC_CTLR_EL3_IDBITS_SHIFT 11 | |
159 | #define ICC_CTLR_EL3_SEIS (1U << 14) | |
160 | #define ICC_CTLR_EL3_A3V (1U << 15) | |
161 | #define ICC_CTLR_EL3_NDS (1U << 17) | |
162 | ||
e69d2fa0 PM |
163 | #define ICH_VMCR_EL2_VENG0_SHIFT 0 |
164 | #define ICH_VMCR_EL2_VENG0 (1U << ICH_VMCR_EL2_VENG0_SHIFT) | |
165 | #define ICH_VMCR_EL2_VENG1_SHIFT 1 | |
166 | #define ICH_VMCR_EL2_VENG1 (1U << ICH_VMCR_EL2_VENG1_SHIFT) | |
167 | #define ICH_VMCR_EL2_VACKCTL (1U << 2) | |
168 | #define ICH_VMCR_EL2_VFIQEN (1U << 3) | |
169 | #define ICH_VMCR_EL2_VCBPR_SHIFT 4 | |
170 | #define ICH_VMCR_EL2_VCBPR (1U << ICH_VMCR_EL2_VCBPR_SHIFT) | |
171 | #define ICH_VMCR_EL2_VEOIM_SHIFT 9 | |
172 | #define ICH_VMCR_EL2_VEOIM (1U << ICH_VMCR_EL2_VEOIM_SHIFT) | |
173 | #define ICH_VMCR_EL2_VBPR1_SHIFT 18 | |
174 | #define ICH_VMCR_EL2_VBPR1_LENGTH 3 | |
175 | #define ICH_VMCR_EL2_VBPR1_MASK (0x7U << ICH_VMCR_EL2_VBPR1_SHIFT) | |
176 | #define ICH_VMCR_EL2_VBPR0_SHIFT 21 | |
177 | #define ICH_VMCR_EL2_VBPR0_LENGTH 3 | |
178 | #define ICH_VMCR_EL2_VBPR0_MASK (0x7U << ICH_VMCR_EL2_VBPR0_SHIFT) | |
179 | #define ICH_VMCR_EL2_VPMR_SHIFT 24 | |
180 | #define ICH_VMCR_EL2_VPMR_LENGTH 8 | |
181 | #define ICH_VMCR_EL2_VPMR_MASK (0xffU << ICH_VMCR_EL2_VPMR_SHIFT) | |
182 | ||
183 | #define ICH_HCR_EL2_EN (1U << 0) | |
184 | #define ICH_HCR_EL2_UIE (1U << 1) | |
185 | #define ICH_HCR_EL2_LRENPIE (1U << 2) | |
186 | #define ICH_HCR_EL2_NPIE (1U << 3) | |
187 | #define ICH_HCR_EL2_VGRP0EIE (1U << 4) | |
188 | #define ICH_HCR_EL2_VGRP0DIE (1U << 5) | |
189 | #define ICH_HCR_EL2_VGRP1EIE (1U << 6) | |
190 | #define ICH_HCR_EL2_VGRP1DIE (1U << 7) | |
191 | #define ICH_HCR_EL2_TC (1U << 10) | |
192 | #define ICH_HCR_EL2_TALL0 (1U << 11) | |
193 | #define ICH_HCR_EL2_TALL1 (1U << 12) | |
194 | #define ICH_HCR_EL2_TSEI (1U << 13) | |
195 | #define ICH_HCR_EL2_TDIR (1U << 14) | |
196 | #define ICH_HCR_EL2_EOICOUNT_SHIFT 27 | |
197 | #define ICH_HCR_EL2_EOICOUNT_LENGTH 5 | |
198 | #define ICH_HCR_EL2_EOICOUNT_MASK (0x1fU << ICH_HCR_EL2_EOICOUNT_SHIFT) | |
199 | ||
200 | #define ICH_LR_EL2_VINTID_SHIFT 0 | |
201 | #define ICH_LR_EL2_VINTID_LENGTH 32 | |
202 | #define ICH_LR_EL2_VINTID_MASK (0xffffffffULL << ICH_LR_EL2_VINTID_SHIFT) | |
203 | #define ICH_LR_EL2_PINTID_SHIFT 32 | |
204 | #define ICH_LR_EL2_PINTID_LENGTH 10 | |
205 | #define ICH_LR_EL2_PINTID_MASK (0x3ffULL << ICH_LR_EL2_PINTID_SHIFT) | |
206 | /* Note that EOI shares with the top bit of the pINTID field */ | |
207 | #define ICH_LR_EL2_EOI (1ULL << 41) | |
208 | #define ICH_LR_EL2_PRIORITY_SHIFT 48 | |
209 | #define ICH_LR_EL2_PRIORITY_LENGTH 8 | |
210 | #define ICH_LR_EL2_PRIORITY_MASK (0xffULL << ICH_LR_EL2_PRIORITY_SHIFT) | |
211 | #define ICH_LR_EL2_GROUP (1ULL << 60) | |
212 | #define ICH_LR_EL2_HW (1ULL << 61) | |
213 | #define ICH_LR_EL2_STATE_SHIFT 62 | |
214 | #define ICH_LR_EL2_STATE_LENGTH 2 | |
215 | #define ICH_LR_EL2_STATE_MASK (3ULL << ICH_LR_EL2_STATE_SHIFT) | |
216 | /* values for the state field: */ | |
217 | #define ICH_LR_EL2_STATE_INVALID 0 | |
218 | #define ICH_LR_EL2_STATE_PENDING 1 | |
219 | #define ICH_LR_EL2_STATE_ACTIVE 2 | |
220 | #define ICH_LR_EL2_STATE_ACTIVE_PENDING 3 | |
221 | #define ICH_LR_EL2_STATE_PENDING_BIT (1ULL << ICH_LR_EL2_STATE_SHIFT) | |
222 | #define ICH_LR_EL2_STATE_ACTIVE_BIT (2ULL << ICH_LR_EL2_STATE_SHIFT) | |
223 | ||
224 | #define ICH_MISR_EL2_EOI (1U << 0) | |
225 | #define ICH_MISR_EL2_U (1U << 1) | |
226 | #define ICH_MISR_EL2_LRENP (1U << 2) | |
227 | #define ICH_MISR_EL2_NP (1U << 3) | |
228 | #define ICH_MISR_EL2_VGRP0E (1U << 4) | |
229 | #define ICH_MISR_EL2_VGRP0D (1U << 5) | |
230 | #define ICH_MISR_EL2_VGRP1E (1U << 6) | |
231 | #define ICH_MISR_EL2_VGRP1D (1U << 7) | |
232 | ||
233 | #define ICH_VTR_EL2_LISTREGS_SHIFT 0 | |
234 | #define ICH_VTR_EL2_TDS (1U << 19) | |
235 | #define ICH_VTR_EL2_NV4 (1U << 20) | |
236 | #define ICH_VTR_EL2_A3V (1U << 21) | |
237 | #define ICH_VTR_EL2_SEIS (1U << 22) | |
238 | #define ICH_VTR_EL2_IDBITS_SHIFT 23 | |
239 | #define ICH_VTR_EL2_PREBITS_SHIFT 26 | |
240 | #define ICH_VTR_EL2_PRIBITS_SHIFT 29 | |
241 | ||
227a8653 PM |
242 | /* Special interrupt IDs */ |
243 | #define INTID_SECURE 1020 | |
244 | #define INTID_NONSECURE 1021 | |
245 | #define INTID_SPURIOUS 1023 | |
246 | ||
ce187c3c PM |
247 | /* Functions internal to the emulated GICv3 */ |
248 | ||
249 | /** | |
250 | * gicv3_redist_update: | |
251 | * @cs: GICv3CPUState for this redistributor | |
252 | * | |
253 | * Recalculate the highest priority pending interrupt after a | |
254 | * change to redistributor state, and inform the CPU accordingly. | |
255 | */ | |
256 | void gicv3_redist_update(GICv3CPUState *cs); | |
257 | ||
258 | /** | |
259 | * gicv3_update: | |
260 | * @s: GICv3State | |
261 | * @start: first interrupt whose state changed | |
262 | * @len: length of the range of interrupts whose state changed | |
263 | * | |
264 | * Recalculate the highest priority pending interrupts after a | |
265 | * change to the distributor state affecting @len interrupts | |
266 | * starting at @start, and inform the CPUs accordingly. | |
267 | */ | |
268 | void gicv3_update(GICv3State *s, int start, int len); | |
269 | ||
270 | /** | |
271 | * gicv3_full_update_noirqset: | |
272 | * @s: GICv3State | |
273 | * | |
274 | * Recalculate the cached information about highest priority | |
275 | * pending interrupts, but don't inform the CPUs. This should be | |
276 | * called after an incoming migration has loaded new state. | |
277 | */ | |
278 | void gicv3_full_update_noirqset(GICv3State *s); | |
279 | ||
280 | /** | |
281 | * gicv3_full_update: | |
282 | * @s: GICv3State | |
283 | * | |
284 | * Recalculate the highest priority pending interrupts after | |
285 | * a change that could affect the status of all interrupts, | |
286 | * and inform the CPUs accordingly. | |
287 | */ | |
288 | void gicv3_full_update(GICv3State *s); | |
e52af513 SP |
289 | MemTxResult gicv3_dist_read(void *opaque, hwaddr offset, uint64_t *data, |
290 | unsigned size, MemTxAttrs attrs); | |
291 | MemTxResult gicv3_dist_write(void *opaque, hwaddr addr, uint64_t data, | |
292 | unsigned size, MemTxAttrs attrs); | |
cec93a93 SP |
293 | MemTxResult gicv3_redist_read(void *opaque, hwaddr offset, uint64_t *data, |
294 | unsigned size, MemTxAttrs attrs); | |
295 | MemTxResult gicv3_redist_write(void *opaque, hwaddr offset, uint64_t data, | |
296 | unsigned size, MemTxAttrs attrs); | |
c84428b3 PM |
297 | void gicv3_dist_set_irq(GICv3State *s, int irq, int level); |
298 | void gicv3_redist_set_irq(GICv3CPUState *cs, int irq, int level); | |
b1a0eb77 | 299 | void gicv3_redist_send_sgi(GICv3CPUState *cs, int grp, int irq, bool ns); |
359fbe65 | 300 | void gicv3_init_cpuif(GICv3State *s); |
ce187c3c PM |
301 | |
302 | /** | |
303 | * gicv3_cpuif_update: | |
304 | * @cs: GICv3CPUState for the CPU to update | |
305 | * | |
306 | * Recalculate whether to assert the IRQ or FIQ lines after a change | |
307 | * to the current highest priority pending interrupt, the CPU's | |
308 | * current running priority or the CPU's current exception level or | |
309 | * security state. | |
310 | */ | |
f7b9358e | 311 | void gicv3_cpuif_update(GICv3CPUState *cs); |
ce187c3c | 312 | |
56992670 SP |
313 | static inline uint32_t gicv3_iidr(void) |
314 | { | |
315 | /* Return the Implementer Identification Register value | |
316 | * for the emulated GICv3, as reported in GICD_IIDR and GICR_IIDR. | |
317 | * | |
318 | * We claim to be an ARM r0p0 with a zero ProductID. | |
319 | * This is the same as an r0p0 GIC-500. | |
320 | */ | |
321 | return 0x43b; | |
322 | } | |
323 | ||
324 | static inline uint32_t gicv3_idreg(int regoffset) | |
325 | { | |
326 | /* Return the value of the CoreSight ID register at the specified | |
327 | * offset from the first ID register (as found in the distributor | |
328 | * and redistributor register banks). | |
329 | * These values indicate an ARM implementation of a GICv3. | |
330 | */ | |
331 | static const uint8_t gicd_ids[] = { | |
332 | 0x44, 0x00, 0x00, 0x00, 0x92, 0xB4, 0x3B, 0x00, 0x0D, 0xF0, 0x05, 0xB1 | |
333 | }; | |
334 | return gicd_ids[regoffset / 4]; | |
335 | } | |
336 | ||
ce187c3c PM |
337 | /** |
338 | * gicv3_irq_group: | |
339 | * | |
340 | * Return the group which this interrupt is configured as (GICV3_G0, | |
341 | * GICV3_G1 or GICV3_G1NS). | |
342 | */ | |
343 | static inline int gicv3_irq_group(GICv3State *s, GICv3CPUState *cs, int irq) | |
344 | { | |
345 | bool grpbit, grpmodbit; | |
346 | ||
347 | if (irq < GIC_INTERNAL) { | |
348 | grpbit = extract32(cs->gicr_igroupr0, irq, 1); | |
349 | grpmodbit = extract32(cs->gicr_igrpmodr0, irq, 1); | |
350 | } else { | |
351 | grpbit = gicv3_gicd_group_test(s, irq); | |
352 | grpmodbit = gicv3_gicd_grpmod_test(s, irq); | |
353 | } | |
354 | if (grpbit) { | |
355 | return GICV3_G1NS; | |
356 | } | |
357 | if (s->gicd_ctlr & GICD_CTLR_DS) { | |
358 | return GICV3_G0; | |
359 | } | |
360 | return grpmodbit ? GICV3_G1 : GICV3_G0; | |
361 | } | |
362 | ||
07e2034d PF |
363 | /** |
364 | * gicv3_redist_affid: | |
365 | * | |
366 | * Return the 32-bit affinity ID of the CPU connected to this redistributor | |
367 | */ | |
368 | static inline uint32_t gicv3_redist_affid(GICv3CPUState *cs) | |
369 | { | |
370 | return cs->gicr_typer >> 32; | |
371 | } | |
372 | ||
ce187c3c PM |
373 | /** |
374 | * gicv3_cache_target_cpustate: | |
375 | * | |
376 | * Update the cached CPU state corresponding to the target for this interrupt | |
377 | * (which is kept in s->gicd_irouter_target[]). | |
378 | */ | |
379 | static inline void gicv3_cache_target_cpustate(GICv3State *s, int irq) | |
380 | { | |
381 | GICv3CPUState *cs = NULL; | |
382 | int i; | |
383 | uint32_t tgtaff = extract64(s->gicd_irouter[irq], 0, 24) | | |
384 | extract64(s->gicd_irouter[irq], 32, 8) << 24; | |
385 | ||
386 | for (i = 0; i < s->num_cpu; i++) { | |
387 | if (s->cpu[i].gicr_typer >> 32 == tgtaff) { | |
388 | cs = &s->cpu[i]; | |
389 | break; | |
390 | } | |
391 | } | |
392 | ||
393 | s->gicd_irouter_target[irq] = cs; | |
394 | } | |
395 | ||
396 | /** | |
397 | * gicv3_cache_all_target_cpustates: | |
398 | * | |
399 | * Populate the entire cache of CPU state pointers for interrupt targets | |
400 | * (eg after inbound migration or CPU reset) | |
401 | */ | |
402 | static inline void gicv3_cache_all_target_cpustates(GICv3State *s) | |
403 | { | |
404 | int irq; | |
405 | ||
406 | for (irq = GIC_INTERNAL; irq < GICV3_MAXIRQ; irq++) { | |
407 | gicv3_cache_target_cpustate(s, irq); | |
408 | } | |
409 | } | |
410 | ||
d3a3e529 VK |
411 | void gicv3_set_gicv3state(CPUState *cpu, GICv3CPUState *s); |
412 | ||
175de524 | 413 | #endif /* QEMU_ARM_GICV3_INTERNAL_H */ |