1 /*
2  * This file is auto-generated. Modifications will be lost.
3  *
4  * See https://android.googlesource.com/platform/bionic/+/master/libc/kernel/
5  * for more information.
6  */
7 #ifndef _UAPI_XE_DRM_H_
8 #define _UAPI_XE_DRM_H_
9 #include "drm.h"
10 #ifdef __cplusplus
11 extern "C" {
12 #endif
13 #define DRM_XE_DEVICE_QUERY 0x00
14 #define DRM_XE_GEM_CREATE 0x01
15 #define DRM_XE_GEM_MMAP_OFFSET 0x02
16 #define DRM_XE_VM_CREATE 0x03
17 #define DRM_XE_VM_DESTROY 0x04
18 #define DRM_XE_VM_BIND 0x05
19 #define DRM_XE_EXEC_QUEUE_CREATE 0x06
20 #define DRM_XE_EXEC_QUEUE_DESTROY 0x07
21 #define DRM_XE_EXEC_QUEUE_GET_PROPERTY 0x08
22 #define DRM_XE_EXEC 0x09
23 #define DRM_XE_WAIT_USER_FENCE 0x0a
24 #define DRM_IOCTL_XE_DEVICE_QUERY DRM_IOWR(DRM_COMMAND_BASE + DRM_XE_DEVICE_QUERY, struct drm_xe_device_query)
25 #define DRM_IOCTL_XE_GEM_CREATE DRM_IOWR(DRM_COMMAND_BASE + DRM_XE_GEM_CREATE, struct drm_xe_gem_create)
26 #define DRM_IOCTL_XE_GEM_MMAP_OFFSET DRM_IOWR(DRM_COMMAND_BASE + DRM_XE_GEM_MMAP_OFFSET, struct drm_xe_gem_mmap_offset)
27 #define DRM_IOCTL_XE_VM_CREATE DRM_IOWR(DRM_COMMAND_BASE + DRM_XE_VM_CREATE, struct drm_xe_vm_create)
28 #define DRM_IOCTL_XE_VM_DESTROY DRM_IOW(DRM_COMMAND_BASE + DRM_XE_VM_DESTROY, struct drm_xe_vm_destroy)
29 #define DRM_IOCTL_XE_VM_BIND DRM_IOW(DRM_COMMAND_BASE + DRM_XE_VM_BIND, struct drm_xe_vm_bind)
30 #define DRM_IOCTL_XE_EXEC_QUEUE_CREATE DRM_IOWR(DRM_COMMAND_BASE + DRM_XE_EXEC_QUEUE_CREATE, struct drm_xe_exec_queue_create)
31 #define DRM_IOCTL_XE_EXEC_QUEUE_DESTROY DRM_IOW(DRM_COMMAND_BASE + DRM_XE_EXEC_QUEUE_DESTROY, struct drm_xe_exec_queue_destroy)
32 #define DRM_IOCTL_XE_EXEC_QUEUE_GET_PROPERTY DRM_IOWR(DRM_COMMAND_BASE + DRM_XE_EXEC_QUEUE_GET_PROPERTY, struct drm_xe_exec_queue_get_property)
33 #define DRM_IOCTL_XE_EXEC DRM_IOW(DRM_COMMAND_BASE + DRM_XE_EXEC, struct drm_xe_exec)
34 #define DRM_IOCTL_XE_WAIT_USER_FENCE DRM_IOWR(DRM_COMMAND_BASE + DRM_XE_WAIT_USER_FENCE, struct drm_xe_wait_user_fence)
35 struct drm_xe_user_extension {
36   __u64 next_extension;
37   __u32 name;
38   __u32 pad;
39 };
40 struct drm_xe_ext_set_property {
41   struct drm_xe_user_extension base;
42   __u32 property;
43   __u32 pad;
44   __u64 value;
45   __u64 reserved[2];
46 };
47 struct drm_xe_engine_class_instance {
48 #define DRM_XE_ENGINE_CLASS_RENDER 0
49 #define DRM_XE_ENGINE_CLASS_COPY 1
50 #define DRM_XE_ENGINE_CLASS_VIDEO_DECODE 2
51 #define DRM_XE_ENGINE_CLASS_VIDEO_ENHANCE 3
52 #define DRM_XE_ENGINE_CLASS_COMPUTE 4
53 #define DRM_XE_ENGINE_CLASS_VM_BIND 5
54   __u16 engine_class;
55   __u16 engine_instance;
56   __u16 gt_id;
57   __u16 pad;
58 };
59 struct drm_xe_engine {
60   struct drm_xe_engine_class_instance instance;
61   __u64 reserved[3];
62 };
63 struct drm_xe_query_engines {
64   __u32 num_engines;
65   __u32 pad;
66   struct drm_xe_engine engines[];
67 };
68 enum drm_xe_memory_class {
69   DRM_XE_MEM_REGION_CLASS_SYSMEM = 0,
70   DRM_XE_MEM_REGION_CLASS_VRAM
71 };
72 struct drm_xe_mem_region {
73   __u16 mem_class;
74   __u16 instance;
75   __u32 min_page_size;
76   __u64 total_size;
77   __u64 used;
78   __u64 cpu_visible_size;
79   __u64 cpu_visible_used;
80   __u64 reserved[6];
81 };
82 struct drm_xe_query_mem_regions {
83   __u32 num_mem_regions;
84   __u32 pad;
85   struct drm_xe_mem_region mem_regions[];
86 };
87 struct drm_xe_query_config {
88   __u32 num_params;
89   __u32 pad;
90 #define DRM_XE_QUERY_CONFIG_REV_AND_DEVICE_ID 0
91 #define DRM_XE_QUERY_CONFIG_FLAGS 1
92 #define DRM_XE_QUERY_CONFIG_FLAG_HAS_VRAM (1 << 0)
93 #define DRM_XE_QUERY_CONFIG_MIN_ALIGNMENT 2
94 #define DRM_XE_QUERY_CONFIG_VA_BITS 3
95 #define DRM_XE_QUERY_CONFIG_MAX_EXEC_QUEUE_PRIORITY 4
96   __u64 info[];
97 };
98 struct drm_xe_gt {
99 #define DRM_XE_QUERY_GT_TYPE_MAIN 0
100 #define DRM_XE_QUERY_GT_TYPE_MEDIA 1
101   __u16 type;
102   __u16 tile_id;
103   __u16 gt_id;
104   __u16 pad[3];
105   __u32 reference_clock;
106   __u64 near_mem_regions;
107   __u64 far_mem_regions;
108   __u64 reserved[8];
109 };
110 struct drm_xe_query_gt_list {
111   __u32 num_gt;
112   __u32 pad;
113   struct drm_xe_gt gt_list[];
114 };
115 struct drm_xe_query_topology_mask {
116   __u16 gt_id;
117 #define DRM_XE_TOPO_DSS_GEOMETRY (1 << 0)
118 #define DRM_XE_TOPO_DSS_COMPUTE (1 << 1)
119 #define DRM_XE_TOPO_EU_PER_DSS (1 << 2)
120   __u16 type;
121   __u32 num_bytes;
122   __u8 mask[];
123 };
124 struct drm_xe_query_engine_cycles {
125   struct drm_xe_engine_class_instance eci;
126   __s32 clockid;
127   __u32 width;
128   __u64 engine_cycles;
129   __u64 cpu_timestamp;
130   __u64 cpu_delta;
131 };
132 struct drm_xe_query_uc_fw_version {
133 #define XE_QUERY_UC_TYPE_GUC_SUBMISSION 0
134   __u16 uc_type;
135   __u16 pad;
136   __u32 branch_ver;
137   __u32 major_ver;
138   __u32 minor_ver;
139   __u32 patch_ver;
140   __u32 pad2;
141   __u64 reserved;
142 };
143 struct drm_xe_device_query {
144   __u64 extensions;
145 #define DRM_XE_DEVICE_QUERY_ENGINES 0
146 #define DRM_XE_DEVICE_QUERY_MEM_REGIONS 1
147 #define DRM_XE_DEVICE_QUERY_CONFIG 2
148 #define DRM_XE_DEVICE_QUERY_GT_LIST 3
149 #define DRM_XE_DEVICE_QUERY_HWCONFIG 4
150 #define DRM_XE_DEVICE_QUERY_GT_TOPOLOGY 5
151 #define DRM_XE_DEVICE_QUERY_ENGINE_CYCLES 6
152 #define DRM_XE_DEVICE_QUERY_UC_FW_VERSION 7
153   __u32 query;
154   __u32 size;
155   __u64 data;
156   __u64 reserved[2];
157 };
158 struct drm_xe_gem_create {
159   __u64 extensions;
160   __u64 size;
161   __u32 placement;
162 #define DRM_XE_GEM_CREATE_FLAG_DEFER_BACKING (1 << 0)
163 #define DRM_XE_GEM_CREATE_FLAG_SCANOUT (1 << 1)
164 #define DRM_XE_GEM_CREATE_FLAG_NEEDS_VISIBLE_VRAM (1 << 2)
165   __u32 flags;
166   __u32 vm_id;
167   __u32 handle;
168 #define DRM_XE_GEM_CPU_CACHING_WB 1
169 #define DRM_XE_GEM_CPU_CACHING_WC 2
170   __u16 cpu_caching;
171   __u16 pad[3];
172   __u64 reserved[2];
173 };
174 struct drm_xe_gem_mmap_offset {
175   __u64 extensions;
176   __u32 handle;
177   __u32 flags;
178   __u64 offset;
179   __u64 reserved[2];
180 };
181 struct drm_xe_vm_create {
182   __u64 extensions;
183 #define DRM_XE_VM_CREATE_FLAG_SCRATCH_PAGE (1 << 0)
184 #define DRM_XE_VM_CREATE_FLAG_LR_MODE (1 << 1)
185 #define DRM_XE_VM_CREATE_FLAG_FAULT_MODE (1 << 2)
186   __u32 flags;
187   __u32 vm_id;
188   __u64 reserved[2];
189 };
190 struct drm_xe_vm_destroy {
191   __u32 vm_id;
192   __u32 pad;
193   __u64 reserved[2];
194 };
195 struct drm_xe_vm_bind_op {
196   __u64 extensions;
197   __u32 obj;
198   __u16 pat_index;
199   __u16 pad;
200   union {
201     __u64 obj_offset;
202     __u64 userptr;
203   };
204   __u64 range;
205   __u64 addr;
206 #define DRM_XE_VM_BIND_OP_MAP 0x0
207 #define DRM_XE_VM_BIND_OP_UNMAP 0x1
208 #define DRM_XE_VM_BIND_OP_MAP_USERPTR 0x2
209 #define DRM_XE_VM_BIND_OP_UNMAP_ALL 0x3
210 #define DRM_XE_VM_BIND_OP_PREFETCH 0x4
211   __u32 op;
212 #define DRM_XE_VM_BIND_FLAG_NULL (1 << 2)
213 #define DRM_XE_VM_BIND_FLAG_DUMPABLE (1 << 3)
214   __u32 flags;
215   __u32 prefetch_mem_region_instance;
216   __u32 pad2;
217   __u64 reserved[3];
218 };
219 struct drm_xe_vm_bind {
220   __u64 extensions;
221   __u32 vm_id;
222   __u32 exec_queue_id;
223   __u32 pad;
224   __u32 num_binds;
225   union {
226     struct drm_xe_vm_bind_op bind;
227     __u64 vector_of_binds;
228   };
229   __u32 pad2;
230   __u32 num_syncs;
231   __u64 syncs;
232   __u64 reserved[2];
233 };
234 struct drm_xe_exec_queue_create {
235 #define DRM_XE_EXEC_QUEUE_EXTENSION_SET_PROPERTY 0
236 #define DRM_XE_EXEC_QUEUE_SET_PROPERTY_PRIORITY 0
237 #define DRM_XE_EXEC_QUEUE_SET_PROPERTY_TIMESLICE 1
238   __u64 extensions;
239   __u16 width;
240   __u16 num_placements;
241   __u32 vm_id;
242   __u32 flags;
243   __u32 exec_queue_id;
244   __u64 instances;
245   __u64 reserved[2];
246 };
247 struct drm_xe_exec_queue_destroy {
248   __u32 exec_queue_id;
249   __u32 pad;
250   __u64 reserved[2];
251 };
252 struct drm_xe_exec_queue_get_property {
253   __u64 extensions;
254   __u32 exec_queue_id;
255 #define DRM_XE_EXEC_QUEUE_GET_PROPERTY_BAN 0
256   __u32 property;
257   __u64 value;
258   __u64 reserved[2];
259 };
260 struct drm_xe_sync {
261   __u64 extensions;
262 #define DRM_XE_SYNC_TYPE_SYNCOBJ 0x0
263 #define DRM_XE_SYNC_TYPE_TIMELINE_SYNCOBJ 0x1
264 #define DRM_XE_SYNC_TYPE_USER_FENCE 0x2
265   __u32 type;
266 #define DRM_XE_SYNC_FLAG_SIGNAL (1 << 0)
267   __u32 flags;
268   union {
269     __u32 handle;
270     __u64 addr;
271   };
272   __u64 timeline_value;
273   __u64 reserved[2];
274 };
275 struct drm_xe_exec {
276   __u64 extensions;
277   __u32 exec_queue_id;
278   __u32 num_syncs;
279   __u64 syncs;
280   __u64 address;
281   __u16 num_batch_buffer;
282   __u16 pad[3];
283   __u64 reserved[2];
284 };
285 struct drm_xe_wait_user_fence {
286   __u64 extensions;
287   __u64 addr;
288 #define DRM_XE_UFENCE_WAIT_OP_EQ 0x0
289 #define DRM_XE_UFENCE_WAIT_OP_NEQ 0x1
290 #define DRM_XE_UFENCE_WAIT_OP_GT 0x2
291 #define DRM_XE_UFENCE_WAIT_OP_GTE 0x3
292 #define DRM_XE_UFENCE_WAIT_OP_LT 0x4
293 #define DRM_XE_UFENCE_WAIT_OP_LTE 0x5
294   __u16 op;
295 #define DRM_XE_UFENCE_WAIT_FLAG_ABSTIME (1 << 0)
296   __u16 flags;
297   __u32 pad;
298   __u64 value;
299   __u64 mask;
300   __s64 timeout;
301   __u32 exec_queue_id;
302   __u32 pad2;
303   __u64 reserved[2];
304 };
305 #ifdef __cplusplus
306 }
307 #endif
308 #endif
309