hw/block/nvme: Use QEMU_PACKED on hardware/packet structures
[qemu.git] / include / block / nvme.h
1 #ifndef BLOCK_NVME_H
2 #define BLOCK_NVME_H
3
4 typedef struct QEMU_PACKED NvmeBar {
5 uint64_t cap;
6 uint32_t vs;
7 uint32_t intms;
8 uint32_t intmc;
9 uint32_t cc;
10 uint32_t rsvd1;
11 uint32_t csts;
12 uint32_t nssrc;
13 uint32_t aqa;
14 uint64_t asq;
15 uint64_t acq;
16 uint32_t cmbloc;
17 uint32_t cmbsz;
18 uint8_t padding[3520]; /* not used by QEMU */
19 uint32_t pmrcap;
20 uint32_t pmrctl;
21 uint32_t pmrsts;
22 uint32_t pmrebs;
23 uint32_t pmrswtp;
24 uint32_t pmrmsc;
25 } NvmeBar;
26
27 enum NvmeCapShift {
28 CAP_MQES_SHIFT = 0,
29 CAP_CQR_SHIFT = 16,
30 CAP_AMS_SHIFT = 17,
31 CAP_TO_SHIFT = 24,
32 CAP_DSTRD_SHIFT = 32,
33 CAP_NSSRS_SHIFT = 36,
34 CAP_CSS_SHIFT = 37,
35 CAP_MPSMIN_SHIFT = 48,
36 CAP_MPSMAX_SHIFT = 52,
37 CAP_PMR_SHIFT = 56,
38 };
39
40 enum NvmeCapMask {
41 CAP_MQES_MASK = 0xffff,
42 CAP_CQR_MASK = 0x1,
43 CAP_AMS_MASK = 0x3,
44 CAP_TO_MASK = 0xff,
45 CAP_DSTRD_MASK = 0xf,
46 CAP_NSSRS_MASK = 0x1,
47 CAP_CSS_MASK = 0xff,
48 CAP_MPSMIN_MASK = 0xf,
49 CAP_MPSMAX_MASK = 0xf,
50 CAP_PMR_MASK = 0x1,
51 };
52
53 #define NVME_CAP_MQES(cap) (((cap) >> CAP_MQES_SHIFT) & CAP_MQES_MASK)
54 #define NVME_CAP_CQR(cap) (((cap) >> CAP_CQR_SHIFT) & CAP_CQR_MASK)
55 #define NVME_CAP_AMS(cap) (((cap) >> CAP_AMS_SHIFT) & CAP_AMS_MASK)
56 #define NVME_CAP_TO(cap) (((cap) >> CAP_TO_SHIFT) & CAP_TO_MASK)
57 #define NVME_CAP_DSTRD(cap) (((cap) >> CAP_DSTRD_SHIFT) & CAP_DSTRD_MASK)
58 #define NVME_CAP_NSSRS(cap) (((cap) >> CAP_NSSRS_SHIFT) & CAP_NSSRS_MASK)
59 #define NVME_CAP_CSS(cap) (((cap) >> CAP_CSS_SHIFT) & CAP_CSS_MASK)
60 #define NVME_CAP_MPSMIN(cap)(((cap) >> CAP_MPSMIN_SHIFT) & CAP_MPSMIN_MASK)
61 #define NVME_CAP_MPSMAX(cap)(((cap) >> CAP_MPSMAX_SHIFT) & CAP_MPSMAX_MASK)
62
63 #define NVME_CAP_SET_MQES(cap, val) (cap |= (uint64_t)(val & CAP_MQES_MASK) \
64 << CAP_MQES_SHIFT)
65 #define NVME_CAP_SET_CQR(cap, val) (cap |= (uint64_t)(val & CAP_CQR_MASK) \
66 << CAP_CQR_SHIFT)
67 #define NVME_CAP_SET_AMS(cap, val) (cap |= (uint64_t)(val & CAP_AMS_MASK) \
68 << CAP_AMS_SHIFT)
69 #define NVME_CAP_SET_TO(cap, val) (cap |= (uint64_t)(val & CAP_TO_MASK) \
70 << CAP_TO_SHIFT)
71 #define NVME_CAP_SET_DSTRD(cap, val) (cap |= (uint64_t)(val & CAP_DSTRD_MASK) \
72 << CAP_DSTRD_SHIFT)
73 #define NVME_CAP_SET_NSSRS(cap, val) (cap |= (uint64_t)(val & CAP_NSSRS_MASK) \
74 << CAP_NSSRS_SHIFT)
75 #define NVME_CAP_SET_CSS(cap, val) (cap |= (uint64_t)(val & CAP_CSS_MASK) \
76 << CAP_CSS_SHIFT)
77 #define NVME_CAP_SET_MPSMIN(cap, val) (cap |= (uint64_t)(val & CAP_MPSMIN_MASK)\
78 << CAP_MPSMIN_SHIFT)
79 #define NVME_CAP_SET_MPSMAX(cap, val) (cap |= (uint64_t)(val & CAP_MPSMAX_MASK)\
80 << CAP_MPSMAX_SHIFT)
81 #define NVME_CAP_SET_PMRS(cap, val) (cap |= (uint64_t)(val & CAP_PMR_MASK)\
82 << CAP_PMR_SHIFT)
83
84 enum NvmeCcShift {
85 CC_EN_SHIFT = 0,
86 CC_CSS_SHIFT = 4,
87 CC_MPS_SHIFT = 7,
88 CC_AMS_SHIFT = 11,
89 CC_SHN_SHIFT = 14,
90 CC_IOSQES_SHIFT = 16,
91 CC_IOCQES_SHIFT = 20,
92 };
93
94 enum NvmeCcMask {
95 CC_EN_MASK = 0x1,
96 CC_CSS_MASK = 0x7,
97 CC_MPS_MASK = 0xf,
98 CC_AMS_MASK = 0x7,
99 CC_SHN_MASK = 0x3,
100 CC_IOSQES_MASK = 0xf,
101 CC_IOCQES_MASK = 0xf,
102 };
103
104 #define NVME_CC_EN(cc) ((cc >> CC_EN_SHIFT) & CC_EN_MASK)
105 #define NVME_CC_CSS(cc) ((cc >> CC_CSS_SHIFT) & CC_CSS_MASK)
106 #define NVME_CC_MPS(cc) ((cc >> CC_MPS_SHIFT) & CC_MPS_MASK)
107 #define NVME_CC_AMS(cc) ((cc >> CC_AMS_SHIFT) & CC_AMS_MASK)
108 #define NVME_CC_SHN(cc) ((cc >> CC_SHN_SHIFT) & CC_SHN_MASK)
109 #define NVME_CC_IOSQES(cc) ((cc >> CC_IOSQES_SHIFT) & CC_IOSQES_MASK)
110 #define NVME_CC_IOCQES(cc) ((cc >> CC_IOCQES_SHIFT) & CC_IOCQES_MASK)
111
112 enum NvmeCstsShift {
113 CSTS_RDY_SHIFT = 0,
114 CSTS_CFS_SHIFT = 1,
115 CSTS_SHST_SHIFT = 2,
116 CSTS_NSSRO_SHIFT = 4,
117 };
118
119 enum NvmeCstsMask {
120 CSTS_RDY_MASK = 0x1,
121 CSTS_CFS_MASK = 0x1,
122 CSTS_SHST_MASK = 0x3,
123 CSTS_NSSRO_MASK = 0x1,
124 };
125
126 enum NvmeCsts {
127 NVME_CSTS_READY = 1 << CSTS_RDY_SHIFT,
128 NVME_CSTS_FAILED = 1 << CSTS_CFS_SHIFT,
129 NVME_CSTS_SHST_NORMAL = 0 << CSTS_SHST_SHIFT,
130 NVME_CSTS_SHST_PROGRESS = 1 << CSTS_SHST_SHIFT,
131 NVME_CSTS_SHST_COMPLETE = 2 << CSTS_SHST_SHIFT,
132 NVME_CSTS_NSSRO = 1 << CSTS_NSSRO_SHIFT,
133 };
134
135 #define NVME_CSTS_RDY(csts) ((csts >> CSTS_RDY_SHIFT) & CSTS_RDY_MASK)
136 #define NVME_CSTS_CFS(csts) ((csts >> CSTS_CFS_SHIFT) & CSTS_CFS_MASK)
137 #define NVME_CSTS_SHST(csts) ((csts >> CSTS_SHST_SHIFT) & CSTS_SHST_MASK)
138 #define NVME_CSTS_NSSRO(csts) ((csts >> CSTS_NSSRO_SHIFT) & CSTS_NSSRO_MASK)
139
140 enum NvmeAqaShift {
141 AQA_ASQS_SHIFT = 0,
142 AQA_ACQS_SHIFT = 16,
143 };
144
145 enum NvmeAqaMask {
146 AQA_ASQS_MASK = 0xfff,
147 AQA_ACQS_MASK = 0xfff,
148 };
149
150 #define NVME_AQA_ASQS(aqa) ((aqa >> AQA_ASQS_SHIFT) & AQA_ASQS_MASK)
151 #define NVME_AQA_ACQS(aqa) ((aqa >> AQA_ACQS_SHIFT) & AQA_ACQS_MASK)
152
153 enum NvmeCmblocShift {
154 CMBLOC_BIR_SHIFT = 0,
155 CMBLOC_OFST_SHIFT = 12,
156 };
157
158 enum NvmeCmblocMask {
159 CMBLOC_BIR_MASK = 0x7,
160 CMBLOC_OFST_MASK = 0xfffff,
161 };
162
163 #define NVME_CMBLOC_BIR(cmbloc) ((cmbloc >> CMBLOC_BIR_SHIFT) & \
164 CMBLOC_BIR_MASK)
165 #define NVME_CMBLOC_OFST(cmbloc)((cmbloc >> CMBLOC_OFST_SHIFT) & \
166 CMBLOC_OFST_MASK)
167
168 #define NVME_CMBLOC_SET_BIR(cmbloc, val) \
169 (cmbloc |= (uint64_t)(val & CMBLOC_BIR_MASK) << CMBLOC_BIR_SHIFT)
170 #define NVME_CMBLOC_SET_OFST(cmbloc, val) \
171 (cmbloc |= (uint64_t)(val & CMBLOC_OFST_MASK) << CMBLOC_OFST_SHIFT)
172
173 enum NvmeCmbszShift {
174 CMBSZ_SQS_SHIFT = 0,
175 CMBSZ_CQS_SHIFT = 1,
176 CMBSZ_LISTS_SHIFT = 2,
177 CMBSZ_RDS_SHIFT = 3,
178 CMBSZ_WDS_SHIFT = 4,
179 CMBSZ_SZU_SHIFT = 8,
180 CMBSZ_SZ_SHIFT = 12,
181 };
182
183 enum NvmeCmbszMask {
184 CMBSZ_SQS_MASK = 0x1,
185 CMBSZ_CQS_MASK = 0x1,
186 CMBSZ_LISTS_MASK = 0x1,
187 CMBSZ_RDS_MASK = 0x1,
188 CMBSZ_WDS_MASK = 0x1,
189 CMBSZ_SZU_MASK = 0xf,
190 CMBSZ_SZ_MASK = 0xfffff,
191 };
192
193 #define NVME_CMBSZ_SQS(cmbsz) ((cmbsz >> CMBSZ_SQS_SHIFT) & CMBSZ_SQS_MASK)
194 #define NVME_CMBSZ_CQS(cmbsz) ((cmbsz >> CMBSZ_CQS_SHIFT) & CMBSZ_CQS_MASK)
195 #define NVME_CMBSZ_LISTS(cmbsz)((cmbsz >> CMBSZ_LISTS_SHIFT) & CMBSZ_LISTS_MASK)
196 #define NVME_CMBSZ_RDS(cmbsz) ((cmbsz >> CMBSZ_RDS_SHIFT) & CMBSZ_RDS_MASK)
197 #define NVME_CMBSZ_WDS(cmbsz) ((cmbsz >> CMBSZ_WDS_SHIFT) & CMBSZ_WDS_MASK)
198 #define NVME_CMBSZ_SZU(cmbsz) ((cmbsz >> CMBSZ_SZU_SHIFT) & CMBSZ_SZU_MASK)
199 #define NVME_CMBSZ_SZ(cmbsz) ((cmbsz >> CMBSZ_SZ_SHIFT) & CMBSZ_SZ_MASK)
200
201 #define NVME_CMBSZ_SET_SQS(cmbsz, val) \
202 (cmbsz |= (uint64_t)(val & CMBSZ_SQS_MASK) << CMBSZ_SQS_SHIFT)
203 #define NVME_CMBSZ_SET_CQS(cmbsz, val) \
204 (cmbsz |= (uint64_t)(val & CMBSZ_CQS_MASK) << CMBSZ_CQS_SHIFT)
205 #define NVME_CMBSZ_SET_LISTS(cmbsz, val) \
206 (cmbsz |= (uint64_t)(val & CMBSZ_LISTS_MASK) << CMBSZ_LISTS_SHIFT)
207 #define NVME_CMBSZ_SET_RDS(cmbsz, val) \
208 (cmbsz |= (uint64_t)(val & CMBSZ_RDS_MASK) << CMBSZ_RDS_SHIFT)
209 #define NVME_CMBSZ_SET_WDS(cmbsz, val) \
210 (cmbsz |= (uint64_t)(val & CMBSZ_WDS_MASK) << CMBSZ_WDS_SHIFT)
211 #define NVME_CMBSZ_SET_SZU(cmbsz, val) \
212 (cmbsz |= (uint64_t)(val & CMBSZ_SZU_MASK) << CMBSZ_SZU_SHIFT)
213 #define NVME_CMBSZ_SET_SZ(cmbsz, val) \
214 (cmbsz |= (uint64_t)(val & CMBSZ_SZ_MASK) << CMBSZ_SZ_SHIFT)
215
216 #define NVME_CMBSZ_GETSIZE(cmbsz) \
217 (NVME_CMBSZ_SZ(cmbsz) * (1 << (12 + 4 * NVME_CMBSZ_SZU(cmbsz))))
218
219 enum NvmePmrcapShift {
220 PMRCAP_RDS_SHIFT = 3,
221 PMRCAP_WDS_SHIFT = 4,
222 PMRCAP_BIR_SHIFT = 5,
223 PMRCAP_PMRTU_SHIFT = 8,
224 PMRCAP_PMRWBM_SHIFT = 10,
225 PMRCAP_PMRTO_SHIFT = 16,
226 PMRCAP_CMSS_SHIFT = 24,
227 };
228
229 enum NvmePmrcapMask {
230 PMRCAP_RDS_MASK = 0x1,
231 PMRCAP_WDS_MASK = 0x1,
232 PMRCAP_BIR_MASK = 0x7,
233 PMRCAP_PMRTU_MASK = 0x3,
234 PMRCAP_PMRWBM_MASK = 0xf,
235 PMRCAP_PMRTO_MASK = 0xff,
236 PMRCAP_CMSS_MASK = 0x1,
237 };
238
239 #define NVME_PMRCAP_RDS(pmrcap) \
240 ((pmrcap >> PMRCAP_RDS_SHIFT) & PMRCAP_RDS_MASK)
241 #define NVME_PMRCAP_WDS(pmrcap) \
242 ((pmrcap >> PMRCAP_WDS_SHIFT) & PMRCAP_WDS_MASK)
243 #define NVME_PMRCAP_BIR(pmrcap) \
244 ((pmrcap >> PMRCAP_BIR_SHIFT) & PMRCAP_BIR_MASK)
245 #define NVME_PMRCAP_PMRTU(pmrcap) \
246 ((pmrcap >> PMRCAP_PMRTU_SHIFT) & PMRCAP_PMRTU_MASK)
247 #define NVME_PMRCAP_PMRWBM(pmrcap) \
248 ((pmrcap >> PMRCAP_PMRWBM_SHIFT) & PMRCAP_PMRWBM_MASK)
249 #define NVME_PMRCAP_PMRTO(pmrcap) \
250 ((pmrcap >> PMRCAP_PMRTO_SHIFT) & PMRCAP_PMRTO_MASK)
251 #define NVME_PMRCAP_CMSS(pmrcap) \
252 ((pmrcap >> PMRCAP_CMSS_SHIFT) & PMRCAP_CMSS_MASK)
253
254 #define NVME_PMRCAP_SET_RDS(pmrcap, val) \
255 (pmrcap |= (uint64_t)(val & PMRCAP_RDS_MASK) << PMRCAP_RDS_SHIFT)
256 #define NVME_PMRCAP_SET_WDS(pmrcap, val) \
257 (pmrcap |= (uint64_t)(val & PMRCAP_WDS_MASK) << PMRCAP_WDS_SHIFT)
258 #define NVME_PMRCAP_SET_BIR(pmrcap, val) \
259 (pmrcap |= (uint64_t)(val & PMRCAP_BIR_MASK) << PMRCAP_BIR_SHIFT)
260 #define NVME_PMRCAP_SET_PMRTU(pmrcap, val) \
261 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTU_MASK) << PMRCAP_PMRTU_SHIFT)
262 #define NVME_PMRCAP_SET_PMRWBM(pmrcap, val) \
263 (pmrcap |= (uint64_t)(val & PMRCAP_PMRWBM_MASK) << PMRCAP_PMRWBM_SHIFT)
264 #define NVME_PMRCAP_SET_PMRTO(pmrcap, val) \
265 (pmrcap |= (uint64_t)(val & PMRCAP_PMRTO_MASK) << PMRCAP_PMRTO_SHIFT)
266 #define NVME_PMRCAP_SET_CMSS(pmrcap, val) \
267 (pmrcap |= (uint64_t)(val & PMRCAP_CMSS_MASK) << PMRCAP_CMSS_SHIFT)
268
269 enum NvmePmrctlShift {
270 PMRCTL_EN_SHIFT = 0,
271 };
272
273 enum NvmePmrctlMask {
274 PMRCTL_EN_MASK = 0x1,
275 };
276
277 #define NVME_PMRCTL_EN(pmrctl) ((pmrctl >> PMRCTL_EN_SHIFT) & PMRCTL_EN_MASK)
278
279 #define NVME_PMRCTL_SET_EN(pmrctl, val) \
280 (pmrctl |= (uint64_t)(val & PMRCTL_EN_MASK) << PMRCTL_EN_SHIFT)
281
282 enum NvmePmrstsShift {
283 PMRSTS_ERR_SHIFT = 0,
284 PMRSTS_NRDY_SHIFT = 8,
285 PMRSTS_HSTS_SHIFT = 9,
286 PMRSTS_CBAI_SHIFT = 12,
287 };
288
289 enum NvmePmrstsMask {
290 PMRSTS_ERR_MASK = 0xff,
291 PMRSTS_NRDY_MASK = 0x1,
292 PMRSTS_HSTS_MASK = 0x7,
293 PMRSTS_CBAI_MASK = 0x1,
294 };
295
296 #define NVME_PMRSTS_ERR(pmrsts) \
297 ((pmrsts >> PMRSTS_ERR_SHIFT) & PMRSTS_ERR_MASK)
298 #define NVME_PMRSTS_NRDY(pmrsts) \
299 ((pmrsts >> PMRSTS_NRDY_SHIFT) & PMRSTS_NRDY_MASK)
300 #define NVME_PMRSTS_HSTS(pmrsts) \
301 ((pmrsts >> PMRSTS_HSTS_SHIFT) & PMRSTS_HSTS_MASK)
302 #define NVME_PMRSTS_CBAI(pmrsts) \
303 ((pmrsts >> PMRSTS_CBAI_SHIFT) & PMRSTS_CBAI_MASK)
304
305 #define NVME_PMRSTS_SET_ERR(pmrsts, val) \
306 (pmrsts |= (uint64_t)(val & PMRSTS_ERR_MASK) << PMRSTS_ERR_SHIFT)
307 #define NVME_PMRSTS_SET_NRDY(pmrsts, val) \
308 (pmrsts |= (uint64_t)(val & PMRSTS_NRDY_MASK) << PMRSTS_NRDY_SHIFT)
309 #define NVME_PMRSTS_SET_HSTS(pmrsts, val) \
310 (pmrsts |= (uint64_t)(val & PMRSTS_HSTS_MASK) << PMRSTS_HSTS_SHIFT)
311 #define NVME_PMRSTS_SET_CBAI(pmrsts, val) \
312 (pmrsts |= (uint64_t)(val & PMRSTS_CBAI_MASK) << PMRSTS_CBAI_SHIFT)
313
314 enum NvmePmrebsShift {
315 PMREBS_PMRSZU_SHIFT = 0,
316 PMREBS_RBB_SHIFT = 4,
317 PMREBS_PMRWBZ_SHIFT = 8,
318 };
319
320 enum NvmePmrebsMask {
321 PMREBS_PMRSZU_MASK = 0xf,
322 PMREBS_RBB_MASK = 0x1,
323 PMREBS_PMRWBZ_MASK = 0xffffff,
324 };
325
326 #define NVME_PMREBS_PMRSZU(pmrebs) \
327 ((pmrebs >> PMREBS_PMRSZU_SHIFT) & PMREBS_PMRSZU_MASK)
328 #define NVME_PMREBS_RBB(pmrebs) \
329 ((pmrebs >> PMREBS_RBB_SHIFT) & PMREBS_RBB_MASK)
330 #define NVME_PMREBS_PMRWBZ(pmrebs) \
331 ((pmrebs >> PMREBS_PMRWBZ_SHIFT) & PMREBS_PMRWBZ_MASK)
332
333 #define NVME_PMREBS_SET_PMRSZU(pmrebs, val) \
334 (pmrebs |= (uint64_t)(val & PMREBS_PMRSZU_MASK) << PMREBS_PMRSZU_SHIFT)
335 #define NVME_PMREBS_SET_RBB(pmrebs, val) \
336 (pmrebs |= (uint64_t)(val & PMREBS_RBB_MASK) << PMREBS_RBB_SHIFT)
337 #define NVME_PMREBS_SET_PMRWBZ(pmrebs, val) \
338 (pmrebs |= (uint64_t)(val & PMREBS_PMRWBZ_MASK) << PMREBS_PMRWBZ_SHIFT)
339
340 enum NvmePmrswtpShift {
341 PMRSWTP_PMRSWTU_SHIFT = 0,
342 PMRSWTP_PMRSWTV_SHIFT = 8,
343 };
344
345 enum NvmePmrswtpMask {
346 PMRSWTP_PMRSWTU_MASK = 0xf,
347 PMRSWTP_PMRSWTV_MASK = 0xffffff,
348 };
349
350 #define NVME_PMRSWTP_PMRSWTU(pmrswtp) \
351 ((pmrswtp >> PMRSWTP_PMRSWTU_SHIFT) & PMRSWTP_PMRSWTU_MASK)
352 #define NVME_PMRSWTP_PMRSWTV(pmrswtp) \
353 ((pmrswtp >> PMRSWTP_PMRSWTV_SHIFT) & PMRSWTP_PMRSWTV_MASK)
354
355 #define NVME_PMRSWTP_SET_PMRSWTU(pmrswtp, val) \
356 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTU_MASK) << PMRSWTP_PMRSWTU_SHIFT)
357 #define NVME_PMRSWTP_SET_PMRSWTV(pmrswtp, val) \
358 (pmrswtp |= (uint64_t)(val & PMRSWTP_PMRSWTV_MASK) << PMRSWTP_PMRSWTV_SHIFT)
359
360 enum NvmePmrmscShift {
361 PMRMSC_CMSE_SHIFT = 1,
362 PMRMSC_CBA_SHIFT = 12,
363 };
364
365 enum NvmePmrmscMask {
366 PMRMSC_CMSE_MASK = 0x1,
367 PMRMSC_CBA_MASK = 0xfffffffffffff,
368 };
369
370 #define NVME_PMRMSC_CMSE(pmrmsc) \
371 ((pmrmsc >> PMRMSC_CMSE_SHIFT) & PMRMSC_CMSE_MASK)
372 #define NVME_PMRMSC_CBA(pmrmsc) \
373 ((pmrmsc >> PMRMSC_CBA_SHIFT) & PMRMSC_CBA_MASK)
374
375 #define NVME_PMRMSC_SET_CMSE(pmrmsc, val) \
376 (pmrmsc |= (uint64_t)(val & PMRMSC_CMSE_MASK) << PMRMSC_CMSE_SHIFT)
377 #define NVME_PMRMSC_SET_CBA(pmrmsc, val) \
378 (pmrmsc |= (uint64_t)(val & PMRMSC_CBA_MASK) << PMRMSC_CBA_SHIFT)
379
380 typedef struct QEMU_PACKED NvmeCmd {
381 uint8_t opcode;
382 uint8_t fuse;
383 uint16_t cid;
384 uint32_t nsid;
385 uint64_t res1;
386 uint64_t mptr;
387 uint64_t prp1;
388 uint64_t prp2;
389 uint32_t cdw10;
390 uint32_t cdw11;
391 uint32_t cdw12;
392 uint32_t cdw13;
393 uint32_t cdw14;
394 uint32_t cdw15;
395 } NvmeCmd;
396
397 enum NvmeAdminCommands {
398 NVME_ADM_CMD_DELETE_SQ = 0x00,
399 NVME_ADM_CMD_CREATE_SQ = 0x01,
400 NVME_ADM_CMD_GET_LOG_PAGE = 0x02,
401 NVME_ADM_CMD_DELETE_CQ = 0x04,
402 NVME_ADM_CMD_CREATE_CQ = 0x05,
403 NVME_ADM_CMD_IDENTIFY = 0x06,
404 NVME_ADM_CMD_ABORT = 0x08,
405 NVME_ADM_CMD_SET_FEATURES = 0x09,
406 NVME_ADM_CMD_GET_FEATURES = 0x0a,
407 NVME_ADM_CMD_ASYNC_EV_REQ = 0x0c,
408 NVME_ADM_CMD_ACTIVATE_FW = 0x10,
409 NVME_ADM_CMD_DOWNLOAD_FW = 0x11,
410 NVME_ADM_CMD_FORMAT_NVM = 0x80,
411 NVME_ADM_CMD_SECURITY_SEND = 0x81,
412 NVME_ADM_CMD_SECURITY_RECV = 0x82,
413 };
414
415 enum NvmeIoCommands {
416 NVME_CMD_FLUSH = 0x00,
417 NVME_CMD_WRITE = 0x01,
418 NVME_CMD_READ = 0x02,
419 NVME_CMD_WRITE_UNCOR = 0x04,
420 NVME_CMD_COMPARE = 0x05,
421 NVME_CMD_WRITE_ZEROS = 0x08,
422 NVME_CMD_DSM = 0x09,
423 };
424
425 typedef struct QEMU_PACKED NvmeDeleteQ {
426 uint8_t opcode;
427 uint8_t flags;
428 uint16_t cid;
429 uint32_t rsvd1[9];
430 uint16_t qid;
431 uint16_t rsvd10;
432 uint32_t rsvd11[5];
433 } NvmeDeleteQ;
434
435 typedef struct QEMU_PACKED NvmeCreateCq {
436 uint8_t opcode;
437 uint8_t flags;
438 uint16_t cid;
439 uint32_t rsvd1[5];
440 uint64_t prp1;
441 uint64_t rsvd8;
442 uint16_t cqid;
443 uint16_t qsize;
444 uint16_t cq_flags;
445 uint16_t irq_vector;
446 uint32_t rsvd12[4];
447 } NvmeCreateCq;
448
449 #define NVME_CQ_FLAGS_PC(cq_flags) (cq_flags & 0x1)
450 #define NVME_CQ_FLAGS_IEN(cq_flags) ((cq_flags >> 1) & 0x1)
451
452 typedef struct QEMU_PACKED NvmeCreateSq {
453 uint8_t opcode;
454 uint8_t flags;
455 uint16_t cid;
456 uint32_t rsvd1[5];
457 uint64_t prp1;
458 uint64_t rsvd8;
459 uint16_t sqid;
460 uint16_t qsize;
461 uint16_t sq_flags;
462 uint16_t cqid;
463 uint32_t rsvd12[4];
464 } NvmeCreateSq;
465
466 #define NVME_SQ_FLAGS_PC(sq_flags) (sq_flags & 0x1)
467 #define NVME_SQ_FLAGS_QPRIO(sq_flags) ((sq_flags >> 1) & 0x3)
468
469 enum NvmeQueueFlags {
470 NVME_Q_PC = 1,
471 NVME_Q_PRIO_URGENT = 0,
472 NVME_Q_PRIO_HIGH = 1,
473 NVME_Q_PRIO_NORMAL = 2,
474 NVME_Q_PRIO_LOW = 3,
475 };
476
477 typedef struct QEMU_PACKED NvmeIdentify {
478 uint8_t opcode;
479 uint8_t flags;
480 uint16_t cid;
481 uint32_t nsid;
482 uint64_t rsvd2[2];
483 uint64_t prp1;
484 uint64_t prp2;
485 uint32_t cns;
486 uint32_t rsvd11[5];
487 } NvmeIdentify;
488
489 typedef struct QEMU_PACKED NvmeRwCmd {
490 uint8_t opcode;
491 uint8_t flags;
492 uint16_t cid;
493 uint32_t nsid;
494 uint64_t rsvd2;
495 uint64_t mptr;
496 uint64_t prp1;
497 uint64_t prp2;
498 uint64_t slba;
499 uint16_t nlb;
500 uint16_t control;
501 uint32_t dsmgmt;
502 uint32_t reftag;
503 uint16_t apptag;
504 uint16_t appmask;
505 } NvmeRwCmd;
506
507 enum {
508 NVME_RW_LR = 1 << 15,
509 NVME_RW_FUA = 1 << 14,
510 NVME_RW_DSM_FREQ_UNSPEC = 0,
511 NVME_RW_DSM_FREQ_TYPICAL = 1,
512 NVME_RW_DSM_FREQ_RARE = 2,
513 NVME_RW_DSM_FREQ_READS = 3,
514 NVME_RW_DSM_FREQ_WRITES = 4,
515 NVME_RW_DSM_FREQ_RW = 5,
516 NVME_RW_DSM_FREQ_ONCE = 6,
517 NVME_RW_DSM_FREQ_PREFETCH = 7,
518 NVME_RW_DSM_FREQ_TEMP = 8,
519 NVME_RW_DSM_LATENCY_NONE = 0 << 4,
520 NVME_RW_DSM_LATENCY_IDLE = 1 << 4,
521 NVME_RW_DSM_LATENCY_NORM = 2 << 4,
522 NVME_RW_DSM_LATENCY_LOW = 3 << 4,
523 NVME_RW_DSM_SEQ_REQ = 1 << 6,
524 NVME_RW_DSM_COMPRESSED = 1 << 7,
525 NVME_RW_PRINFO_PRACT = 1 << 13,
526 NVME_RW_PRINFO_PRCHK_GUARD = 1 << 12,
527 NVME_RW_PRINFO_PRCHK_APP = 1 << 11,
528 NVME_RW_PRINFO_PRCHK_REF = 1 << 10,
529 };
530
531 typedef struct QEMU_PACKED NvmeDsmCmd {
532 uint8_t opcode;
533 uint8_t flags;
534 uint16_t cid;
535 uint32_t nsid;
536 uint64_t rsvd2[2];
537 uint64_t prp1;
538 uint64_t prp2;
539 uint32_t nr;
540 uint32_t attributes;
541 uint32_t rsvd12[4];
542 } NvmeDsmCmd;
543
544 enum {
545 NVME_DSMGMT_IDR = 1 << 0,
546 NVME_DSMGMT_IDW = 1 << 1,
547 NVME_DSMGMT_AD = 1 << 2,
548 };
549
550 typedef struct QEMU_PACKED NvmeDsmRange {
551 uint32_t cattr;
552 uint32_t nlb;
553 uint64_t slba;
554 } NvmeDsmRange;
555
556 enum NvmeAsyncEventRequest {
557 NVME_AER_TYPE_ERROR = 0,
558 NVME_AER_TYPE_SMART = 1,
559 NVME_AER_TYPE_IO_SPECIFIC = 6,
560 NVME_AER_TYPE_VENDOR_SPECIFIC = 7,
561 NVME_AER_INFO_ERR_INVALID_SQ = 0,
562 NVME_AER_INFO_ERR_INVALID_DB = 1,
563 NVME_AER_INFO_ERR_DIAG_FAIL = 2,
564 NVME_AER_INFO_ERR_PERS_INTERNAL_ERR = 3,
565 NVME_AER_INFO_ERR_TRANS_INTERNAL_ERR = 4,
566 NVME_AER_INFO_ERR_FW_IMG_LOAD_ERR = 5,
567 NVME_AER_INFO_SMART_RELIABILITY = 0,
568 NVME_AER_INFO_SMART_TEMP_THRESH = 1,
569 NVME_AER_INFO_SMART_SPARE_THRESH = 2,
570 };
571
572 typedef struct QEMU_PACKED NvmeAerResult {
573 uint8_t event_type;
574 uint8_t event_info;
575 uint8_t log_page;
576 uint8_t resv;
577 } NvmeAerResult;
578
579 typedef struct QEMU_PACKED NvmeCqe {
580 uint32_t result;
581 uint32_t rsvd;
582 uint16_t sq_head;
583 uint16_t sq_id;
584 uint16_t cid;
585 uint16_t status;
586 } NvmeCqe;
587
588 enum NvmeStatusCodes {
589 NVME_SUCCESS = 0x0000,
590 NVME_INVALID_OPCODE = 0x0001,
591 NVME_INVALID_FIELD = 0x0002,
592 NVME_CID_CONFLICT = 0x0003,
593 NVME_DATA_TRAS_ERROR = 0x0004,
594 NVME_POWER_LOSS_ABORT = 0x0005,
595 NVME_INTERNAL_DEV_ERROR = 0x0006,
596 NVME_CMD_ABORT_REQ = 0x0007,
597 NVME_CMD_ABORT_SQ_DEL = 0x0008,
598 NVME_CMD_ABORT_FAILED_FUSE = 0x0009,
599 NVME_CMD_ABORT_MISSING_FUSE = 0x000a,
600 NVME_INVALID_NSID = 0x000b,
601 NVME_CMD_SEQ_ERROR = 0x000c,
602 NVME_LBA_RANGE = 0x0080,
603 NVME_CAP_EXCEEDED = 0x0081,
604 NVME_NS_NOT_READY = 0x0082,
605 NVME_NS_RESV_CONFLICT = 0x0083,
606 NVME_INVALID_CQID = 0x0100,
607 NVME_INVALID_QID = 0x0101,
608 NVME_MAX_QSIZE_EXCEEDED = 0x0102,
609 NVME_ACL_EXCEEDED = 0x0103,
610 NVME_RESERVED = 0x0104,
611 NVME_AER_LIMIT_EXCEEDED = 0x0105,
612 NVME_INVALID_FW_SLOT = 0x0106,
613 NVME_INVALID_FW_IMAGE = 0x0107,
614 NVME_INVALID_IRQ_VECTOR = 0x0108,
615 NVME_INVALID_LOG_ID = 0x0109,
616 NVME_INVALID_FORMAT = 0x010a,
617 NVME_FW_REQ_RESET = 0x010b,
618 NVME_INVALID_QUEUE_DEL = 0x010c,
619 NVME_FID_NOT_SAVEABLE = 0x010d,
620 NVME_FID_NOT_NSID_SPEC = 0x010f,
621 NVME_FW_REQ_SUSYSTEM_RESET = 0x0110,
622 NVME_CONFLICTING_ATTRS = 0x0180,
623 NVME_INVALID_PROT_INFO = 0x0181,
624 NVME_WRITE_TO_RO = 0x0182,
625 NVME_WRITE_FAULT = 0x0280,
626 NVME_UNRECOVERED_READ = 0x0281,
627 NVME_E2E_GUARD_ERROR = 0x0282,
628 NVME_E2E_APP_ERROR = 0x0283,
629 NVME_E2E_REF_ERROR = 0x0284,
630 NVME_CMP_FAILURE = 0x0285,
631 NVME_ACCESS_DENIED = 0x0286,
632 NVME_MORE = 0x2000,
633 NVME_DNR = 0x4000,
634 NVME_NO_COMPLETE = 0xffff,
635 };
636
637 typedef struct QEMU_PACKED NvmeFwSlotInfoLog {
638 uint8_t afi;
639 uint8_t reserved1[7];
640 uint8_t frs1[8];
641 uint8_t frs2[8];
642 uint8_t frs3[8];
643 uint8_t frs4[8];
644 uint8_t frs5[8];
645 uint8_t frs6[8];
646 uint8_t frs7[8];
647 uint8_t reserved2[448];
648 } NvmeFwSlotInfoLog;
649
650 typedef struct QEMU_PACKED NvmeErrorLog {
651 uint64_t error_count;
652 uint16_t sqid;
653 uint16_t cid;
654 uint16_t status_field;
655 uint16_t param_error_location;
656 uint64_t lba;
657 uint32_t nsid;
658 uint8_t vs;
659 uint8_t resv[35];
660 } NvmeErrorLog;
661
662 typedef struct QEMU_PACKED NvmeSmartLog {
663 uint8_t critical_warning;
664 uint8_t temperature[2];
665 uint8_t available_spare;
666 uint8_t available_spare_threshold;
667 uint8_t percentage_used;
668 uint8_t reserved1[26];
669 uint64_t data_units_read[2];
670 uint64_t data_units_written[2];
671 uint64_t host_read_commands[2];
672 uint64_t host_write_commands[2];
673 uint64_t controller_busy_time[2];
674 uint64_t power_cycles[2];
675 uint64_t power_on_hours[2];
676 uint64_t unsafe_shutdowns[2];
677 uint64_t media_errors[2];
678 uint64_t number_of_error_log_entries[2];
679 uint8_t reserved2[320];
680 } NvmeSmartLog;
681
682 enum NvmeSmartWarn {
683 NVME_SMART_SPARE = 1 << 0,
684 NVME_SMART_TEMPERATURE = 1 << 1,
685 NVME_SMART_RELIABILITY = 1 << 2,
686 NVME_SMART_MEDIA_READ_ONLY = 1 << 3,
687 NVME_SMART_FAILED_VOLATILE_MEDIA = 1 << 4,
688 };
689
690 enum LogIdentifier {
691 NVME_LOG_ERROR_INFO = 0x01,
692 NVME_LOG_SMART_INFO = 0x02,
693 NVME_LOG_FW_SLOT_INFO = 0x03,
694 };
695
696 typedef struct QEMU_PACKED NvmePSD {
697 uint16_t mp;
698 uint16_t reserved;
699 uint32_t enlat;
700 uint32_t exlat;
701 uint8_t rrt;
702 uint8_t rrl;
703 uint8_t rwt;
704 uint8_t rwl;
705 uint8_t resv[16];
706 } NvmePSD;
707
708 #define NVME_IDENTIFY_DATA_SIZE 4096
709
710 enum {
711 NVME_ID_CNS_NS = 0x0,
712 NVME_ID_CNS_CTRL = 0x1,
713 NVME_ID_CNS_NS_ACTIVE_LIST = 0x2,
714 };
715
716 typedef struct QEMU_PACKED NvmeIdCtrl {
717 uint16_t vid;
718 uint16_t ssvid;
719 uint8_t sn[20];
720 uint8_t mn[40];
721 uint8_t fr[8];
722 uint8_t rab;
723 uint8_t ieee[3];
724 uint8_t cmic;
725 uint8_t mdts;
726 uint8_t rsvd255[178];
727 uint16_t oacs;
728 uint8_t acl;
729 uint8_t aerl;
730 uint8_t frmw;
731 uint8_t lpa;
732 uint8_t elpe;
733 uint8_t npss;
734 uint8_t rsvd511[248];
735 uint8_t sqes;
736 uint8_t cqes;
737 uint16_t rsvd515;
738 uint32_t nn;
739 uint16_t oncs;
740 uint16_t fuses;
741 uint8_t fna;
742 uint8_t vwc;
743 uint16_t awun;
744 uint16_t awupf;
745 uint8_t rsvd703[174];
746 uint8_t rsvd2047[1344];
747 NvmePSD psd[32];
748 uint8_t vs[1024];
749 } NvmeIdCtrl;
750
751 enum NvmeIdCtrlOacs {
752 NVME_OACS_SECURITY = 1 << 0,
753 NVME_OACS_FORMAT = 1 << 1,
754 NVME_OACS_FW = 1 << 2,
755 };
756
757 enum NvmeIdCtrlOncs {
758 NVME_ONCS_COMPARE = 1 << 0,
759 NVME_ONCS_WRITE_UNCORR = 1 << 1,
760 NVME_ONCS_DSM = 1 << 2,
761 NVME_ONCS_WRITE_ZEROS = 1 << 3,
762 NVME_ONCS_FEATURES = 1 << 4,
763 NVME_ONCS_RESRVATIONS = 1 << 5,
764 NVME_ONCS_TIMESTAMP = 1 << 6,
765 };
766
767 #define NVME_CTRL_SQES_MIN(sqes) ((sqes) & 0xf)
768 #define NVME_CTRL_SQES_MAX(sqes) (((sqes) >> 4) & 0xf)
769 #define NVME_CTRL_CQES_MIN(cqes) ((cqes) & 0xf)
770 #define NVME_CTRL_CQES_MAX(cqes) (((cqes) >> 4) & 0xf)
771
772 typedef struct NvmeFeatureVal {
773 uint32_t arbitration;
774 uint32_t power_mgmt;
775 uint32_t temp_thresh;
776 uint32_t err_rec;
777 uint32_t volatile_wc;
778 uint32_t num_queues;
779 uint32_t int_coalescing;
780 uint32_t *int_vector_config;
781 uint32_t write_atomicity;
782 uint32_t async_config;
783 uint32_t sw_prog_marker;
784 } NvmeFeatureVal;
785
786 #define NVME_ARB_AB(arb) (arb & 0x7)
787 #define NVME_ARB_LPW(arb) ((arb >> 8) & 0xff)
788 #define NVME_ARB_MPW(arb) ((arb >> 16) & 0xff)
789 #define NVME_ARB_HPW(arb) ((arb >> 24) & 0xff)
790
791 #define NVME_INTC_THR(intc) (intc & 0xff)
792 #define NVME_INTC_TIME(intc) ((intc >> 8) & 0xff)
793
794 enum NvmeFeatureIds {
795 NVME_ARBITRATION = 0x1,
796 NVME_POWER_MANAGEMENT = 0x2,
797 NVME_LBA_RANGE_TYPE = 0x3,
798 NVME_TEMPERATURE_THRESHOLD = 0x4,
799 NVME_ERROR_RECOVERY = 0x5,
800 NVME_VOLATILE_WRITE_CACHE = 0x6,
801 NVME_NUMBER_OF_QUEUES = 0x7,
802 NVME_INTERRUPT_COALESCING = 0x8,
803 NVME_INTERRUPT_VECTOR_CONF = 0x9,
804 NVME_WRITE_ATOMICITY = 0xa,
805 NVME_ASYNCHRONOUS_EVENT_CONF = 0xb,
806 NVME_TIMESTAMP = 0xe,
807 NVME_SOFTWARE_PROGRESS_MARKER = 0x80
808 };
809
810 typedef struct QEMU_PACKED NvmeRangeType {
811 uint8_t type;
812 uint8_t attributes;
813 uint8_t rsvd2[14];
814 uint64_t slba;
815 uint64_t nlb;
816 uint8_t guid[16];
817 uint8_t rsvd48[16];
818 } NvmeRangeType;
819
820 typedef struct QEMU_PACKED NvmeLBAF {
821 uint16_t ms;
822 uint8_t ds;
823 uint8_t rp;
824 } NvmeLBAF;
825
826 typedef struct QEMU_PACKED NvmeIdNs {
827 uint64_t nsze;
828 uint64_t ncap;
829 uint64_t nuse;
830 uint8_t nsfeat;
831 uint8_t nlbaf;
832 uint8_t flbas;
833 uint8_t mc;
834 uint8_t dpc;
835 uint8_t dps;
836
837 uint8_t nmic;
838 uint8_t rescap;
839 uint8_t fpi;
840 uint8_t dlfeat;
841
842 uint8_t res34[94];
843 NvmeLBAF lbaf[16];
844 uint8_t res192[192];
845 uint8_t vs[3712];
846 } NvmeIdNs;
847
848
849 /*Deallocate Logical Block Features*/
850 #define NVME_ID_NS_DLFEAT_GUARD_CRC(dlfeat) ((dlfeat) & 0x10)
851 #define NVME_ID_NS_DLFEAT_WRITE_ZEROES(dlfeat) ((dlfeat) & 0x08)
852
853 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR(dlfeat) ((dlfeat) & 0x7)
854 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_UNDEFINED 0
855 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ZEROES 1
856 #define NVME_ID_NS_DLFEAT_READ_BEHAVIOR_ONES 2
857
858
859 #define NVME_ID_NS_NSFEAT_THIN(nsfeat) ((nsfeat & 0x1))
860 #define NVME_ID_NS_FLBAS_EXTENDED(flbas) ((flbas >> 4) & 0x1)
861 #define NVME_ID_NS_FLBAS_INDEX(flbas) ((flbas & 0xf))
862 #define NVME_ID_NS_MC_SEPARATE(mc) ((mc >> 1) & 0x1)
863 #define NVME_ID_NS_MC_EXTENDED(mc) ((mc & 0x1))
864 #define NVME_ID_NS_DPC_LAST_EIGHT(dpc) ((dpc >> 4) & 0x1)
865 #define NVME_ID_NS_DPC_FIRST_EIGHT(dpc) ((dpc >> 3) & 0x1)
866 #define NVME_ID_NS_DPC_TYPE_3(dpc) ((dpc >> 2) & 0x1)
867 #define NVME_ID_NS_DPC_TYPE_2(dpc) ((dpc >> 1) & 0x1)
868 #define NVME_ID_NS_DPC_TYPE_1(dpc) ((dpc & 0x1))
869 #define NVME_ID_NS_DPC_TYPE_MASK 0x7
870
871 enum NvmeIdNsDps {
872 DPS_TYPE_NONE = 0,
873 DPS_TYPE_1 = 1,
874 DPS_TYPE_2 = 2,
875 DPS_TYPE_3 = 3,
876 DPS_TYPE_MASK = 0x7,
877 DPS_FIRST_EIGHT = 8,
878 };
879
880 static inline void _nvme_check_size(void)
881 {
882 QEMU_BUILD_BUG_ON(sizeof(NvmeAerResult) != 4);
883 QEMU_BUILD_BUG_ON(sizeof(NvmeCqe) != 16);
884 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmRange) != 16);
885 QEMU_BUILD_BUG_ON(sizeof(NvmeCmd) != 64);
886 QEMU_BUILD_BUG_ON(sizeof(NvmeDeleteQ) != 64);
887 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateCq) != 64);
888 QEMU_BUILD_BUG_ON(sizeof(NvmeCreateSq) != 64);
889 QEMU_BUILD_BUG_ON(sizeof(NvmeIdentify) != 64);
890 QEMU_BUILD_BUG_ON(sizeof(NvmeRwCmd) != 64);
891 QEMU_BUILD_BUG_ON(sizeof(NvmeDsmCmd) != 64);
892 QEMU_BUILD_BUG_ON(sizeof(NvmeRangeType) != 64);
893 QEMU_BUILD_BUG_ON(sizeof(NvmeErrorLog) != 64);
894 QEMU_BUILD_BUG_ON(sizeof(NvmeFwSlotInfoLog) != 512);
895 QEMU_BUILD_BUG_ON(sizeof(NvmeSmartLog) != 512);
896 QEMU_BUILD_BUG_ON(sizeof(NvmeIdCtrl) != 4096);
897 QEMU_BUILD_BUG_ON(sizeof(NvmeIdNs) != 4096);
898 }
899 #endif