2023-01-09 12:07:23 +00:00
/*
2025-01-06 22:12:14 +00:00
* Copyright ( C ) 2023 - 2025 Intel Corporation
2023-01-09 12:07:23 +00:00
*
* SPDX - License - Identifier : MIT
*
*/
# include "shared/source/os_interface/linux/xe/ioctl_helper_xe.h"
2024-01-09 13:40:40 +00:00
# include "shared/source/debugger/debugger.h"
# include "shared/source/execution_environment/execution_environment.h"
2023-01-09 12:07:23 +00:00
# include "shared/source/execution_environment/root_device_environment.h"
2025-07-16 18:26:34 +00:00
# include "shared/source/gmm_helper/client_context/gmm_client_context.h"
2023-08-07 12:32:02 +00:00
# include "shared/source/gmm_helper/gmm_helper.h"
2024-07-25 10:47:37 +00:00
# include "shared/source/helpers/aligned_memory.h"
2023-01-09 12:07:23 +00:00
# include "shared/source/helpers/basic_math.h"
# include "shared/source/helpers/common_types.h"
# include "shared/source/helpers/constants.h"
2023-09-18 10:49:16 +00:00
# include "shared/source/helpers/engine_control.h"
2024-01-09 09:54:33 +00:00
# include "shared/source/helpers/gfx_core_helper.h"
2023-01-09 12:07:23 +00:00
# include "shared/source/helpers/hw_info.h"
# include "shared/source/helpers/ptr_math.h"
# include "shared/source/helpers/string.h"
2025-08-05 22:33:38 +00:00
# include "shared/source/helpers/topology.h"
2024-03-18 23:41:10 +00:00
# include "shared/source/os_interface/linux/drm_buffer_object.h"
2023-01-09 12:07:23 +00:00
# include "shared/source/os_interface/linux/drm_neo.h"
2023-05-02 11:05:31 +00:00
# include "shared/source/os_interface/linux/engine_info.h"
2025-04-08 22:05:33 +00:00
# include "shared/source/os_interface/linux/file_descriptor.h"
2023-05-04 10:13:08 +00:00
# include "shared/source/os_interface/linux/memory_info.h"
2023-01-09 12:07:23 +00:00
# include "shared/source/os_interface/linux/os_context_linux.h"
2024-03-09 01:22:47 +00:00
# include "shared/source/os_interface/linux/sys_calls.h"
2024-08-01 09:59:15 +00:00
# include "shared/source/os_interface/linux/xe/xedrm.h"
2023-09-18 10:49:16 +00:00
# include "shared/source/os_interface/os_time.h"
2025-04-08 22:05:33 +00:00
# include "shared/source/utilities/directory.h"
2023-01-09 12:07:23 +00:00
2023-06-20 07:34:12 +00:00
# include <algorithm>
2023-01-09 12:07:23 +00:00
# include <iostream>
2024-04-15 11:09:00 +00:00
# include <limits>
2023-12-27 13:27:51 +00:00
# include <sstream>
2023-01-09 12:07:23 +00:00
# define STRINGIFY_ME(X) return #X
# define RETURN_ME(X) return X
namespace NEO {
2025-04-03 16:10:10 +00:00
const char * IoctlHelperXe : : xeGetClassName ( int className ) const {
2023-01-09 12:07:23 +00:00
switch ( className ) {
case DRM_XE_ENGINE_CLASS_RENDER :
return " rcs " ;
case DRM_XE_ENGINE_CLASS_COPY :
return " bcs " ;
case DRM_XE_ENGINE_CLASS_VIDEO_DECODE :
return " vcs " ;
case DRM_XE_ENGINE_CLASS_VIDEO_ENHANCE :
return " vecs " ;
case DRM_XE_ENGINE_CLASS_COMPUTE :
return " ccs " ;
}
2024-03-12 16:44:45 +00:00
return " Unknown class name " ;
2023-01-09 12:07:23 +00:00
}
2023-09-18 10:49:16 +00:00
const char * IoctlHelperXe : : xeGetBindOperationName ( int bindOperation ) {
switch ( bindOperation ) {
2023-11-27 13:16:31 +00:00
case DRM_XE_VM_BIND_OP_MAP :
2023-01-09 12:07:23 +00:00
return " MAP " ;
2023-11-27 13:16:31 +00:00
case DRM_XE_VM_BIND_OP_UNMAP :
2023-01-09 12:07:23 +00:00
return " UNMAP " ;
2023-11-27 13:16:31 +00:00
case DRM_XE_VM_BIND_OP_MAP_USERPTR :
2023-01-09 12:07:23 +00:00
return " MAP_USERPTR " ;
2023-11-27 13:16:31 +00:00
case DRM_XE_VM_BIND_OP_UNMAP_ALL :
2023-09-18 10:49:16 +00:00
return " UNMAP ALL " ;
2023-11-27 13:16:31 +00:00
case DRM_XE_VM_BIND_OP_PREFETCH :
2023-09-18 10:49:16 +00:00
return " PREFETCH " ;
2023-01-09 12:07:23 +00:00
}
2023-09-18 10:49:16 +00:00
return " Unknown operation " ;
}
2025-02-13 22:26:10 +00:00
const char * IoctlHelperXe : : xeGetAdviseOperationName ( int adviseOperation ) {
return " Unknown operation " ;
}
2024-07-29 14:22:01 +00:00
std : : string IoctlHelperXe : : xeGetBindFlagNames ( int bindFlags ) {
if ( bindFlags = = 0 ) {
return " " ;
2023-09-18 10:49:16 +00:00
}
2024-07-29 14:22:01 +00:00
std : : string flags ;
if ( bindFlags & DRM_XE_VM_BIND_FLAG_READONLY ) {
bindFlags & = ~ DRM_XE_VM_BIND_FLAG_READONLY ;
flags + = " READONLY " ;
}
if ( bindFlags & DRM_XE_VM_BIND_FLAG_IMMEDIATE ) {
bindFlags & = ~ DRM_XE_VM_BIND_FLAG_IMMEDIATE ;
flags + = " IMMEDIATE " ;
}
if ( bindFlags & DRM_XE_VM_BIND_FLAG_NULL ) {
bindFlags & = ~ DRM_XE_VM_BIND_FLAG_NULL ;
flags + = " NULL " ;
}
if ( bindFlags & DRM_XE_VM_BIND_FLAG_DUMPABLE ) {
bindFlags & = ~ DRM_XE_VM_BIND_FLAG_DUMPABLE ;
flags + = " DUMPABLE " ;
}
if ( bindFlags ! = 0 ) {
flags + = " Unknown flag " ;
}
// Remove the trailing space
if ( ! flags . empty ( ) & & flags . back ( ) = = ' ' ) {
flags . pop_back ( ) ;
}
return flags ;
2023-01-09 12:07:23 +00:00
}
const char * IoctlHelperXe : : xeGetengineClassName ( uint32_t engineClass ) {
switch ( engineClass ) {
case DRM_XE_ENGINE_CLASS_RENDER :
return " DRM_XE_ENGINE_CLASS_RENDER " ;
case DRM_XE_ENGINE_CLASS_COPY :
return " DRM_XE_ENGINE_CLASS_COPY " ;
case DRM_XE_ENGINE_CLASS_VIDEO_DECODE :
return " DRM_XE_ENGINE_CLASS_VIDEO_DECODE " ;
case DRM_XE_ENGINE_CLASS_VIDEO_ENHANCE :
return " DRM_XE_ENGINE_CLASS_VIDEO_ENHANCE " ;
case DRM_XE_ENGINE_CLASS_COMPUTE :
return " DRM_XE_ENGINE_CLASS_COMPUTE " ;
default :
2024-03-12 16:44:45 +00:00
return " Unknown engine class " ;
2023-01-09 12:07:23 +00:00
}
}
IoctlHelperXe : : IoctlHelperXe ( Drm & drmArg ) : IoctlHelper ( drmArg ) {
xeLog ( " IoctlHelperXe::IoctlHelperXe \n " , " " ) ;
}
2025-01-24 03:59:30 +00:00
bool IoctlHelperXe : : queryDeviceIdAndRevision ( Drm & drm ) {
2024-12-17 15:19:39 +00:00
auto fileDescriptor = drm . getFileDescriptor ( ) ;
drm_xe_device_query queryConfig = { } ;
queryConfig . query = DRM_XE_DEVICE_QUERY_CONFIG ;
int ret = SysCalls : : ioctl ( fileDescriptor , DRM_IOCTL_XE_DEVICE_QUERY , & queryConfig ) ;
if ( ret | | queryConfig . size = = 0 ) {
printDebugString ( debugManager . flags . PrintDebugMessages . get ( ) , stderr , " %s " , " FATAL: Cannot query size for device config! \n " ) ;
return false ;
}
auto data = std : : vector < uint64_t > ( Math : : divideAndRoundUp ( sizeof ( drm_xe_query_config ) + sizeof ( uint64_t ) * queryConfig . size , sizeof ( uint64_t ) ) , 0 ) ;
struct drm_xe_query_config * config = reinterpret_cast < struct drm_xe_query_config * > ( data . data ( ) ) ;
queryConfig . data = castToUint64 ( config ) ;
ret = SysCalls : : ioctl ( fileDescriptor , DRM_IOCTL_XE_DEVICE_QUERY , & queryConfig ) ;
if ( ret ) {
printDebugString ( debugManager . flags . PrintDebugMessages . get ( ) , stderr , " %s " , " FATAL: Cannot query device ID and revision! \n " ) ;
return false ;
}
auto hwInfo = drm . getRootDeviceEnvironment ( ) . getMutableHardwareInfo ( ) ;
hwInfo - > platform . usDeviceID = config - > info [ DRM_XE_QUERY_CONFIG_REV_AND_DEVICE_ID ] & 0xffff ;
hwInfo - > platform . usRevId = static_cast < int > ( ( config - > info [ DRM_XE_QUERY_CONFIG_REV_AND_DEVICE_ID ] > > 16 ) & 0xff ) ;
2025-01-24 03:59:30 +00:00
2025-05-12 15:27:55 +00:00
if ( ( debugManager . flags . EnableRecoverablePageFaults . get ( ) ! = 0 ) & & ( debugManager . flags . EnableSharedSystemUsmSupport . get ( ) = = 1 ) & & ( config - > info [ DRM_XE_QUERY_CONFIG_FLAGS ] & DRM_XE_QUERY_CONFIG_FLAG_HAS_CPU_ADDR_MIRROR ) ) {
2025-01-24 03:59:30 +00:00
drm . setSharedSystemAllocEnable ( true ) ;
2025-08-01 06:14:40 +00:00
} else {
printDebugString ( debugManager . flags . PrintDebugMessages . get ( ) , stderr , " %s " , " Shared System USM NOT allowed: KMD does not support \n " ) ;
2025-01-24 03:59:30 +00:00
}
2024-12-17 15:19:39 +00:00
return true ;
}
2025-05-05 12:45:29 +00:00
uint32_t IoctlHelperXe : : getGtIdFromTileId ( uint32_t tileId , uint16_t engineClass ) const {
if ( engineClass = = DRM_XE_ENGINE_CLASS_VIDEO_DECODE | | engineClass = = DRM_XE_ENGINE_CLASS_VIDEO_ENHANCE ) {
return static_cast < uint32_t > ( tileIdToMediaGtId [ tileId ] ) ;
}
return static_cast < uint32_t > ( tileIdToGtId [ tileId ] ) ;
}
2023-01-09 12:07:23 +00:00
bool IoctlHelperXe : : initialize ( ) {
xeLog ( " IoctlHelperXe::initialize \n " , " " ) ;
2024-12-16 18:22:18 +00:00
euDebugInterface = EuDebugInterface : : create ( drm . getSysFsPciPath ( ) ) ;
2024-12-17 15:19:39 +00:00
2023-09-18 10:49:16 +00:00
drm_xe_device_query queryConfig = { } ;
2023-05-02 11:05:31 +00:00
queryConfig . query = DRM_XE_DEVICE_QUERY_CONFIG ;
2023-01-09 12:07:23 +00:00
2023-12-12 08:48:32 +00:00
auto retVal = IoctlHelper : : ioctl ( DrmIoctl : : query , & queryConfig ) ;
2023-05-02 11:05:31 +00:00
if ( retVal ! = 0 | | queryConfig . size = = 0 ) {
return false ;
}
2023-10-02 14:26:33 +00:00
auto data = std : : vector < uint64_t > ( Math : : divideAndRoundUp ( sizeof ( drm_xe_query_config ) + sizeof ( uint64_t ) * queryConfig . size , sizeof ( uint64_t ) ) , 0 ) ;
2023-05-02 11:05:31 +00:00
struct drm_xe_query_config * config = reinterpret_cast < struct drm_xe_query_config * > ( data . data ( ) ) ;
queryConfig . data = castToUint64 ( config ) ;
2023-12-12 08:48:32 +00:00
IoctlHelper : : ioctl ( DrmIoctl : : query , & queryConfig ) ;
2023-11-27 13:16:31 +00:00
xeLog ( " DRM_XE_QUERY_CONFIG_REV_AND_DEVICE_ID \t %#llx \n " ,
config - > info [ DRM_XE_QUERY_CONFIG_REV_AND_DEVICE_ID ] ) ;
2023-05-02 11:05:31 +00:00
xeLog ( " REV_ID \t \t \t \t %#llx \n " ,
2023-11-27 13:16:31 +00:00
( config - > info [ DRM_XE_QUERY_CONFIG_REV_AND_DEVICE_ID ] > > 16 ) & 0xff ) ;
2023-05-02 11:05:31 +00:00
xeLog ( " DEVICE_ID \t \t \t \t %#llx \n " ,
2023-11-27 13:16:31 +00:00
config - > info [ DRM_XE_QUERY_CONFIG_REV_AND_DEVICE_ID ] & 0xffff ) ;
xeLog ( " DRM_XE_QUERY_CONFIG_FLAGS \t \t \t %#llx \n " ,
config - > info [ DRM_XE_QUERY_CONFIG_FLAGS ] ) ;
xeLog ( " DRM_XE_QUERY_CONFIG_FLAG_HAS_VRAM \t %s \n " ,
config - > info [ DRM_XE_QUERY_CONFIG_FLAGS ] &
DRM_XE_QUERY_CONFIG_FLAG_HAS_VRAM
2023-05-02 11:05:31 +00:00
? " ON "
: " OFF " ) ;
2023-11-27 13:16:31 +00:00
xeLog ( " DRM_XE_QUERY_CONFIG_MIN_ALIGNMENT \t \t %#llx \n " ,
config - > info [ DRM_XE_QUERY_CONFIG_MIN_ALIGNMENT ] ) ;
xeLog ( " DRM_XE_QUERY_CONFIG_VA_BITS \t \t %#llx \n " ,
config - > info [ DRM_XE_QUERY_CONFIG_VA_BITS ] ) ;
2023-05-02 11:05:31 +00:00
2024-03-25 15:44:38 +00:00
xeLog ( " DRM_XE_QUERY_CONFIG_MAX_EXEC_QUEUE_PRIORITY \t \t %#llx \n " ,
config - > info [ DRM_XE_QUERY_CONFIG_MAX_EXEC_QUEUE_PRIORITY ] ) ;
2025-08-21 09:53:34 +00:00
xeLog ( " DRM_XE_QUERY_CONFIG_FLAG_HAS_LOW_LATENCY \t %s \n " ,
config - > info [ DRM_XE_QUERY_CONFIG_FLAGS ] &
DRM_XE_QUERY_CONFIG_FLAG_HAS_LOW_LATENCY
? " ON "
: " OFF " ) ;
2024-03-25 15:44:38 +00:00
maxExecQueuePriority = config - > info [ DRM_XE_QUERY_CONFIG_MAX_EXEC_QUEUE_PRIORITY ] & 0xffff ;
2025-09-01 11:02:50 +00:00
isLowLatencyHintAvailable = config - > info [ DRM_XE_QUERY_CONFIG_FLAGS ] & DRM_XE_QUERY_CONFIG_FLAG_HAS_LOW_LATENCY ;
2025-08-21 09:53:34 +00:00
if ( debugManager . flags . ForceLowLatencyHint . get ( ) ! = - 1 ) {
isLowLatencyHintAvailable = ! ! debugManager . flags . ForceLowLatencyHint . get ( ) ;
}
2023-05-02 11:05:31 +00:00
memset ( & queryConfig , 0 , sizeof ( queryConfig ) ) ;
queryConfig . query = DRM_XE_DEVICE_QUERY_HWCONFIG ;
2023-12-12 08:48:32 +00:00
IoctlHelper : : ioctl ( DrmIoctl : : query , & queryConfig ) ;
2023-10-19 11:23:16 +00:00
auto newSize = queryConfig . size / sizeof ( uint32_t ) ;
2023-12-27 13:27:51 +00:00
hwconfig . resize ( newSize ) ;
queryConfig . data = castToUint64 ( hwconfig . data ( ) ) ;
2023-12-12 08:48:32 +00:00
IoctlHelper : : ioctl ( DrmIoctl : : query , & queryConfig ) ;
2023-05-02 11:05:31 +00:00
auto hwInfo = this - > drm . getRootDeviceEnvironment ( ) . getMutableHardwareInfo ( ) ;
2024-03-21 17:40:02 +00:00
hwInfo - > capabilityTable . gpuAddressSpace = ( 1ull < < config - > info [ DRM_XE_QUERY_CONFIG_VA_BITS ] ) - 1 ;
2023-09-18 10:49:16 +00:00
2024-08-01 09:59:15 +00:00
hwInfo - > capabilityTable . cxlType = 0 ;
if ( getCxlType ( ) & & config - > num_params > * getCxlType ( ) ) {
hwInfo - > capabilityTable . cxlType = static_cast < uint32_t > ( config - > info [ * getCxlType ( ) ] ) ;
}
2024-06-07 18:46:33 +00:00
2024-04-12 11:39:48 +00:00
queryGtListData = queryData < uint64_t > ( DRM_XE_DEVICE_QUERY_GT_LIST ) ;
if ( queryGtListData . empty ( ) ) {
return false ;
}
xeGtListData = reinterpret_cast < drm_xe_query_gt_list * > ( queryGtListData . data ( ) ) ;
2025-03-28 12:19:17 +00:00
auto assignValue = [ ] ( auto & container , uint16_t id , uint16_t value ) {
if ( container . size ( ) < id + 1u ) {
container . resize ( id + 1 , invalidIndex ) ;
}
container [ id ] = value ;
} ;
2024-04-12 13:09:30 +00:00
gtIdToTileId . resize ( xeGtListData - > num_gt , invalidIndex ) ;
for ( auto i = 0u ; i < xeGtListData - > num_gt ; i + + ) {
const auto & gt = xeGtListData - > gt_list [ i ] ;
if ( gt . type = = DRM_XE_QUERY_GT_TYPE_MAIN ) {
gtIdToTileId [ gt . gt_id ] = gt . tile_id ;
2025-03-28 12:19:17 +00:00
assignValue ( tileIdToGtId , gt . tile_id , gt . gt_id ) ;
} else if ( isMediaGt ( gt . type ) ) {
assignValue ( mediaGtIdToTileId , gt . gt_id , gt . tile_id ) ;
2025-05-05 12:45:29 +00:00
assignValue ( tileIdToMediaGtId , gt . tile_id , gt . gt_id ) ;
2024-04-12 13:09:30 +00:00
}
}
2023-06-22 19:30:16 +00:00
return true ;
2023-01-09 12:07:23 +00:00
}
2025-03-28 12:19:17 +00:00
bool IoctlHelperXe : : isMediaGt ( uint16_t gtType ) const {
return ( gtType = = DRM_XE_QUERY_GT_TYPE_MEDIA ) ;
}
2023-01-09 12:07:23 +00:00
IoctlHelperXe : : ~ IoctlHelperXe ( ) {
xeLog ( " IoctlHelperXe::~IoctlHelperXe \n " , " " ) ;
}
bool IoctlHelperXe : : isSetPairAvailable ( ) {
return false ;
}
2023-03-08 04:06:00 +00:00
bool IoctlHelperXe : : isChunkingAvailable ( ) {
return false ;
}
2023-01-09 12:07:23 +00:00
bool IoctlHelperXe : : isVmBindAvailable ( ) {
return true ;
}
2023-12-27 16:23:42 +00:00
bool IoctlHelperXe : : setDomainCpu ( uint32_t handle , bool writeEnable ) {
return false ;
}
2023-10-02 14:26:33 +00:00
template < typename DataType >
std : : vector < DataType > IoctlHelperXe : : queryData ( uint32_t queryId ) {
2023-05-02 11:05:31 +00:00
struct drm_xe_device_query deviceQuery = { } ;
deviceQuery . query = queryId ;
2023-12-12 08:48:32 +00:00
IoctlHelper : : ioctl ( DrmIoctl : : query , & deviceQuery ) ;
2023-05-02 11:05:31 +00:00
2023-10-02 14:26:33 +00:00
std : : vector < DataType > retVal ( Math : : divideAndRoundUp ( deviceQuery . size , sizeof ( DataType ) ) ) ;
2023-05-02 11:05:31 +00:00
deviceQuery . data = castToUint64 ( retVal . data ( ) ) ;
2023-12-12 08:48:32 +00:00
IoctlHelper : : ioctl ( DrmIoctl : : query , & deviceQuery ) ;
2023-05-02 11:05:31 +00:00
return retVal ;
}
2024-03-27 16:55:20 +00:00
template std : : vector < uint8_t > IoctlHelperXe : : queryData ( uint32_t queryId ) ;
template std : : vector < uint64_t > IoctlHelperXe : : queryData ( uint32_t queryId ) ;
2023-05-02 11:05:31 +00:00
std : : unique_ptr < EngineInfo > IoctlHelperXe : : createEngineInfo ( bool isSysmanEnabled ) {
2024-03-14 11:04:59 +00:00
auto enginesData = queryData < uint64_t > ( DRM_XE_DEVICE_QUERY_ENGINES ) ;
2023-05-04 10:13:08 +00:00
if ( enginesData . empty ( ) ) {
return { } ;
}
2024-03-14 11:04:59 +00:00
auto queryEngines = reinterpret_cast < struct drm_xe_query_engines * > ( enginesData . data ( ) ) ;
2025-07-24 14:54:43 +00:00
const auto numberHwEngines = queryEngines - > num_engines ;
2024-03-14 11:04:59 +00:00
xeLog ( " numberHwEngines=%d \n " , numberHwEngines ) ;
2023-05-02 11:05:31 +00:00
2024-07-10 16:13:50 +00:00
StackVec < std : : vector < EngineCapabilities > , 2 > enginesPerTile { } ;
2023-05-05 14:26:24 +02:00
std : : bitset < 8 > multiTileMask { } ;
2023-05-02 11:05:31 +00:00
2024-04-12 09:40:28 +00:00
auto hwInfo = drm . getRootDeviceEnvironment ( ) . getMutableHardwareInfo ( ) ;
auto defaultEngineClass = getDefaultEngineClass ( hwInfo - > capabilityTable . defaultEngineType ) ;
2025-03-28 12:19:17 +00:00
auto containsGtId = [ ] ( const auto & container , uint16_t gtId ) {
return ( ( container . size ( ) > gtId ) & & ( container [ gtId ] ! = invalidIndex ) ) ;
} ;
2023-05-02 11:05:31 +00:00
for ( auto i = 0u ; i < numberHwEngines ; i + + ) {
2024-03-14 11:04:59 +00:00
const auto & engine = queryEngines - > engines [ i ] . instance ;
2025-03-28 12:19:17 +00:00
uint16_t tile = 0 ;
2025-04-03 16:10:10 +00:00
const bool mediaEngine = isMediaEngine ( engine . engine_class ) ;
2025-04-08 12:20:24 +00:00
const bool videoEngine = ( engine . engine_class = = getDrmParamValue ( DrmParam : : engineClassVideo ) | | engine . engine_class = = getDrmParamValue ( DrmParam : : engineClassVideoEnhance ) ) ;
2025-03-28 12:19:17 +00:00
2025-04-03 16:10:10 +00:00
if ( containsGtId ( gtIdToTileId , engine . gt_id ) & & ! mediaEngine ) {
2025-03-28 12:19:17 +00:00
tile = static_cast < uint16_t > ( gtIdToTileId [ engine . gt_id ] ) ;
2025-04-08 12:20:24 +00:00
} else if ( containsGtId ( mediaGtIdToTileId , engine . gt_id ) & & ( mediaEngine | | videoEngine ) ) {
2025-03-28 12:19:17 +00:00
tile = static_cast < uint16_t > ( mediaGtIdToTileId [ engine . gt_id ] ) ;
} else {
2025-03-27 10:42:42 +00:00
continue ;
}
2025-03-28 12:19:17 +00:00
2023-05-05 14:26:24 +02:00
multiTileMask . set ( tile ) ;
2023-05-02 11:05:31 +00:00
EngineClassInstance engineClassInstance { } ;
2024-03-14 11:04:59 +00:00
engineClassInstance . engineClass = engine . engine_class ;
engineClassInstance . engineInstance = engine . engine_instance ;
2025-04-03 16:10:10 +00:00
xeLog ( " \t class: %s, instance: %d, gt_id: %d, tile: %d \n " , xeGetClassName ( engineClassInstance . engineClass ) , engineClassInstance . engineInstance , engine . gt_id , tile ) ;
2023-05-02 11:05:31 +00:00
2024-05-20 09:56:10 +00:00
const bool isBaseEngineClass = engineClassInstance . engineClass = = getDrmParamValue ( DrmParam : : engineClassCompute ) | |
engineClassInstance . engineClass = = getDrmParamValue ( DrmParam : : engineClassRender ) | |
engineClassInstance . engineClass = = getDrmParamValue ( DrmParam : : engineClassCopy ) ;
2023-05-02 11:05:31 +00:00
2025-04-08 12:20:24 +00:00
const bool isSysmanEngineClass = isSysmanEnabled & & videoEngine ;
2024-05-20 09:56:10 +00:00
2025-04-03 16:10:10 +00:00
if ( isBaseEngineClass | | isSysmanEngineClass | | mediaEngine ) {
2023-05-02 11:05:31 +00:00
if ( enginesPerTile . size ( ) < = tile ) {
enginesPerTile . resize ( tile + 1 ) ;
}
2024-07-10 16:13:50 +00:00
enginesPerTile [ tile ] . push_back ( { engineClassInstance , { } } ) ;
2024-04-12 09:40:28 +00:00
if ( ! defaultEngine & & engineClassInstance . engineClass = = defaultEngineClass ) {
defaultEngine = std : : make_unique < drm_xe_engine_class_instance > ( ) ;
* defaultEngine = engine ;
}
2023-05-02 11:05:31 +00:00
}
}
2024-04-12 09:40:28 +00:00
UNRECOVERABLE_IF ( ! defaultEngine ) ;
2023-05-05 14:26:24 +02:00
if ( hwInfo - > featureTable . flags . ftrMultiTileArch ) {
auto & multiTileArchInfo = hwInfo - > gtSystemInfo . MultiTileArchInfo ;
multiTileArchInfo . IsValid = true ;
multiTileArchInfo . TileCount = multiTileMask . count ( ) ;
multiTileArchInfo . TileMask = static_cast < uint8_t > ( multiTileMask . to_ulong ( ) ) ;
}
2023-09-18 10:49:16 +00:00
2023-05-02 11:05:31 +00:00
return std : : make_unique < EngineInfo > ( & drm , enginesPerTile ) ;
}
2024-04-15 11:09:00 +00:00
inline MemoryRegion createMemoryRegionFromXeMemRegion ( const drm_xe_mem_region & xeMemRegion , std : : bitset < 4 > tilesMask ) {
2025-04-09 21:32:48 +00:00
return {
. region {
. memoryClass = xeMemRegion . mem_class ,
. memoryInstance = xeMemRegion . instance } ,
. probedSize = xeMemRegion . total_size ,
. unallocatedSize = xeMemRegion . total_size - xeMemRegion . used ,
. cpuVisibleSize = xeMemRegion . cpu_visible_size ,
. tilesMask = tilesMask ,
} ;
2023-05-04 10:13:08 +00:00
}
std : : unique_ptr < MemoryInfo > IoctlHelperXe : : createMemoryInfo ( ) {
2023-11-27 13:16:31 +00:00
auto memUsageData = queryData < uint64_t > ( DRM_XE_DEVICE_QUERY_MEM_REGIONS ) ;
2023-05-04 10:13:08 +00:00
2024-04-12 11:39:48 +00:00
if ( memUsageData . empty ( ) ) {
2023-05-04 10:13:08 +00:00
return { } ;
}
2024-04-18 17:13:25 +00:00
constexpr auto maxSupportedTilesNumber { 4u } ;
std : : array < std : : bitset < maxSupportedTilesNumber > , 64 > regionTilesMask { } ;
2024-04-15 11:09:00 +00:00
for ( auto i { 0u } ; i < xeGtListData - > num_gt ; i + + ) {
const auto & gtEntry = xeGtListData - > gt_list [ i ] ;
if ( gtEntry . type ! = DRM_XE_QUERY_GT_TYPE_MAIN ) {
continue ;
}
2023-05-04 10:13:08 +00:00
2024-04-15 11:09:00 +00:00
uint64_t nearMemRegions { gtEntry . near_mem_regions } ;
auto regionIndex { Math : : log2 ( nearMemRegions ) } ;
regionTilesMask [ regionIndex ] . set ( gtEntry . tile_id ) ;
}
MemoryInfo : : RegionContainer regionsContainer { } ;
2023-05-04 10:13:08 +00:00
2024-04-15 11:09:00 +00:00
auto xeMemRegionsData = reinterpret_cast < drm_xe_query_mem_regions * > ( memUsageData . data ( ) ) ;
2023-12-05 14:40:08 +00:00
for ( auto i = 0u ; i < xeMemRegionsData - > num_mem_regions ; i + + ) {
2024-04-15 11:09:00 +00:00
auto & xeMemRegion { xeMemRegionsData - > mem_regions [ i ] } ;
if ( xeMemRegion . mem_class = = DRM_XE_MEM_REGION_CLASS_SYSMEM ) {
// Make sure sysmem is always put at the first position
regionsContainer . insert ( regionsContainer . begin ( ) , createMemoryRegionFromXeMemRegion ( xeMemRegion , 0u ) ) ;
} else {
auto regionIndex = xeMemRegion . instance ;
UNRECOVERABLE_IF ( regionIndex > = regionTilesMask . size ( ) ) ;
if ( auto tilesMask = regionTilesMask [ regionIndex ] ; tilesMask . any ( ) ) {
2024-04-18 17:13:25 +00:00
regionsContainer . push_back ( createMemoryRegionFromXeMemRegion ( xeMemRegion , tilesMask ) ) ;
2024-04-15 11:09:00 +00:00
}
2023-05-04 10:13:08 +00:00
}
}
if ( regionsContainer . empty ( ) ) {
return { } ;
}
2024-04-15 11:09:00 +00:00
return std : : make_unique < MemoryInfo > ( regionsContainer , drm ) ;
}
size_t IoctlHelperXe : : getLocalMemoryRegionsSize ( const MemoryInfo * memoryInfo , uint32_t subDevicesCount , uint32_t tileMask ) const {
size_t size = 0 ;
for ( const auto & memoryRegion : memoryInfo - > getLocalMemoryRegions ( ) ) {
if ( ( memoryRegion . tilesMask & std : : bitset < 4 > { tileMask } ) . any ( ) ) {
size + = memoryRegion . probedSize ;
2023-10-19 11:23:16 +00:00
}
2023-05-04 10:13:08 +00:00
}
2024-04-15 11:09:00 +00:00
return size ;
2023-05-04 10:13:08 +00:00
}
2024-03-27 16:55:20 +00:00
void IoctlHelperXe : : setupIpVersion ( ) {
auto & rootDeviceEnvironment = drm . getRootDeviceEnvironment ( ) ;
auto hwInfo = rootDeviceEnvironment . getMutableHardwareInfo ( ) ;
if ( auto hwIpVersion = GtIpVersion { } ; queryHwIpVersion ( hwIpVersion ) ) {
hwInfo - > ipVersion . architecture = hwIpVersion . major ;
hwInfo - > ipVersion . release = hwIpVersion . minor ;
hwInfo - > ipVersion . revision = hwIpVersion . revision ;
} else {
xeLog ( " No HW IP version received from drm_xe_gt. Falling back to default value. " ) ;
IoctlHelper : : setupIpVersion ( ) ;
}
}
2024-08-06 13:04:01 +00:00
bool IoctlHelperXe : : queryHwIpVersion ( GtIpVersion & gtIpVersion ) {
auto gtListData = queryData < uint64_t > ( DRM_XE_DEVICE_QUERY_GT_LIST ) ;
if ( gtListData . empty ( ) ) {
return false ;
}
auto xeGtListData = reinterpret_cast < drm_xe_query_gt_list * > ( gtListData . data ( ) ) ;
for ( auto i = 0u ; i < xeGtListData - > num_gt ; i + + ) {
auto & gtEntry = xeGtListData - > gt_list [ i ] ;
if ( gtEntry . type = = DRM_XE_QUERY_GT_TYPE_MEDIA | | gtEntry . ip_ver_major = = 0u ) {
continue ;
}
gtIpVersion . major = gtEntry . ip_ver_major ;
gtIpVersion . minor = gtEntry . ip_ver_minor ;
gtIpVersion . revision = gtEntry . ip_ver_rev ;
return true ;
}
return false ;
}
2023-09-18 10:49:16 +00:00
bool IoctlHelperXe : : setGpuCpuTimes ( TimeStampData * pGpuCpuTime , OSTime * osTime ) {
if ( pGpuCpuTime = = nullptr | | osTime = = nullptr ) {
return false ;
}
drm_xe_device_query deviceQuery = { } ;
deviceQuery . query = DRM_XE_DEVICE_QUERY_ENGINE_CYCLES ;
2023-12-12 08:48:32 +00:00
auto ret = IoctlHelper : : ioctl ( DrmIoctl : : query , & deviceQuery ) ;
2023-09-18 10:49:16 +00:00
if ( ret ! = 0 ) {
xeLog ( " -> IoctlHelperXe::%s s=0x%lx r=%d \n " , __FUNCTION__ , deviceQuery . size , ret ) ;
return false ;
}
std : : vector < uint8_t > retVal ( deviceQuery . size ) ;
deviceQuery . data = castToUint64 ( retVal . data ( ) ) ;
drm_xe_query_engine_cycles * queryEngineCycles = reinterpret_cast < drm_xe_query_engine_cycles * > ( retVal . data ( ) ) ;
queryEngineCycles - > clockid = CLOCK_MONOTONIC_RAW ;
queryEngineCycles - > eci = * this - > defaultEngine ;
2023-12-12 08:48:32 +00:00
ret = IoctlHelper : : ioctl ( DrmIoctl : : query , & deviceQuery ) ;
2023-09-18 10:49:16 +00:00
auto nValidBits = queryEngineCycles - > width ;
auto gpuTimestampValidBits = maxNBitValue ( nValidBits ) ;
auto gpuCycles = queryEngineCycles - > engine_cycles & gpuTimestampValidBits ;
xeLog ( " -> IoctlHelperXe::%s [%d,%d] clockId=0x%x s=0x%lx nValidBits=0x%x gpuCycles=0x%x cpuTimeInNS=0x%x r=%d \n " , __FUNCTION__ ,
queryEngineCycles - > eci . engine_class , queryEngineCycles - > eci . engine_instance ,
queryEngineCycles - > clockid , deviceQuery . size , nValidBits , gpuCycles , queryEngineCycles - > cpu_timestamp , ret ) ;
pGpuCpuTime - > gpuTimeStamp = gpuCycles ;
pGpuCpuTime - > cpuTimeinNS = queryEngineCycles - > cpu_timestamp ;
return ret = = 0 ;
}
2023-06-20 07:34:12 +00:00
bool IoctlHelperXe : : getTopologyDataAndMap ( const HardwareInfo & hwInfo , DrmQueryTopologyData & topologyData , TopologyMap & topologyMap ) {
2025-08-05 22:33:38 +00:00
const auto queryGtTopology = queryData < uint8_t > ( DRM_XE_DEVICE_QUERY_GT_TOPOLOGY ) ;
2023-06-20 07:34:12 +00:00
2025-08-05 22:33:38 +00:00
const auto numTiles = tileIdToGtId . size ( ) ;
std : : vector < TopologyBitmap > topologyBitmap ( numTiles ) ;
2023-10-19 11:23:16 +00:00
2023-10-05 05:46:43 +02:00
auto topologySize = queryGtTopology . size ( ) ;
2023-10-02 14:26:33 +00:00
auto dataPtr = queryGtTopology . data ( ) ;
2023-06-20 07:34:12 +00:00
while ( topologySize > = sizeof ( drm_xe_query_topology_mask ) ) {
2025-08-05 22:33:38 +00:00
const drm_xe_query_topology_mask * topo = reinterpret_cast < const drm_xe_query_topology_mask * > ( dataPtr ) ;
2023-06-20 07:34:12 +00:00
UNRECOVERABLE_IF ( topo = = nullptr ) ;
2025-08-05 22:33:38 +00:00
const auto gtId = topo - > gt_id ;
const auto tileId = gtIdToTileId [ gtId ] ;
2023-10-19 11:23:16 +00:00
2024-04-12 13:09:30 +00:00
if ( tileId ! = invalidIndex ) {
2025-08-05 22:33:38 +00:00
const auto bytes = std : : span < const uint8_t > ( topo - > mask , topo - > num_bytes ) ;
2023-10-19 11:23:16 +00:00
switch ( topo - > type ) {
2023-11-27 13:16:31 +00:00
case DRM_XE_TOPO_DSS_GEOMETRY :
2025-08-05 22:33:38 +00:00
topologyBitmap [ tileId ] . dssGeometry = bytes ;
2023-10-19 11:23:16 +00:00
break ;
2023-11-27 13:16:31 +00:00
case DRM_XE_TOPO_DSS_COMPUTE :
2025-08-05 22:33:38 +00:00
topologyBitmap [ tileId ] . dssCompute = bytes ;
2023-10-19 11:23:16 +00:00
break ;
2024-09-17 14:55:51 +00:00
case DRM_XE_TOPO_L3_BANK :
2025-08-05 22:33:38 +00:00
topologyBitmap [ tileId ] . l3Banks = bytes ;
2024-09-17 14:55:51 +00:00
break ;
2024-11-18 12:22:18 +00:00
case DRM_XE_TOPO_EU_PER_DSS :
case DRM_XE_TOPO_SIMD16_EU_PER_DSS :
2025-08-05 22:33:38 +00:00
topologyBitmap [ tileId ] . eu = bytes ;
2024-11-18 12:22:18 +00:00
break ;
2023-10-19 11:23:16 +00:00
default :
2024-11-18 12:22:18 +00:00
xeLog ( " Unhandle GT Topo type: %d \n " , topo - > type ) ;
2023-10-19 11:23:16 +00:00
}
2023-06-20 07:34:12 +00:00
}
2025-08-05 22:33:38 +00:00
const auto itemSize = sizeof ( drm_xe_query_topology_mask ) + topo - > num_bytes ;
2023-06-20 07:34:12 +00:00
topologySize - = itemSize ;
2023-10-02 14:26:33 +00:00
dataPtr = ptrOffset ( dataPtr , itemSize ) ;
2023-06-20 07:34:12 +00:00
}
2025-08-05 22:33:38 +00:00
const TopologyLimits topologyLimits {
. maxSlices = static_cast < int > ( hwInfo . gtSystemInfo . MaxSlicesSupported ) ,
. maxSubSlicesPerSlice = static_cast < int > ( hwInfo . gtSystemInfo . MaxSubSlicesSupported / hwInfo . gtSystemInfo . MaxSlicesSupported ) ,
. maxEusPerSubSlice = static_cast < int > ( hwInfo . gtSystemInfo . MaxEuPerSubSlice ) ,
} ;
2024-09-25 14:41:09 +00:00
2025-08-05 22:33:38 +00:00
const auto topologyInfo = getTopologyInfoMultiTile ( topologyBitmap , topologyLimits , topologyMap ) ;
2024-09-25 14:41:09 +00:00
2025-08-05 22:33:38 +00:00
topologyData . sliceCount = topologyInfo . sliceCount ;
topologyData . subSliceCount = topologyInfo . subSliceCount ;
topologyData . numL3Banks = topologyInfo . l3BankCount ;
topologyData . euCount = topologyInfo . euCount ;
topologyData . maxSlices = topologyLimits . maxSlices ;
topologyData . maxSubSlicesPerSlice = topologyLimits . maxSubSlicesPerSlice ;
topologyData . maxEusPerSubSlice = topologyLimits . maxEusPerSubSlice ;
2024-09-25 14:41:09 +00:00
2025-08-05 22:33:38 +00:00
return topologyInfo . subSliceCount ! = 0 ;
2023-06-20 07:34:12 +00:00
}
2024-12-13 14:15:31 +00:00
void IoctlHelperXe : : updateBindInfo ( uint64_t userPtr ) {
2023-09-12 14:34:53 +00:00
std : : unique_lock < std : : mutex > lock ( xeLock ) ;
2024-12-13 14:15:31 +00:00
BindInfo b = { userPtr , 0 } ;
2023-09-12 14:34:53 +00:00
bindInfo . push_back ( b ) ;
}
2024-04-12 09:40:28 +00:00
uint16_t IoctlHelperXe : : getDefaultEngineClass ( const aub_stream : : EngineType & defaultEngineType ) {
2024-01-29 10:37:00 +00:00
if ( defaultEngineType = = aub_stream : : EngineType : : ENGINE_CCS ) {
2024-04-12 09:40:28 +00:00
return DRM_XE_ENGINE_CLASS_COMPUTE ;
2024-01-29 10:37:00 +00:00
} else if ( defaultEngineType = = aub_stream : : EngineType : : ENGINE_RCS ) {
2024-04-12 09:40:28 +00:00
return DRM_XE_ENGINE_CLASS_RENDER ;
2024-01-29 10:37:00 +00:00
} else {
/* So far defaultEngineType is either ENGINE_RCS or ENGINE_CCS */
UNRECOVERABLE_IF ( true ) ;
2024-04-12 09:40:28 +00:00
return 0 ;
2023-09-18 10:49:16 +00:00
}
}
2024-04-10 15:39:16 +00:00
/**
* @ brief returns caching policy for new allocation .
* For system memory caching policy is write - back , otherwise it ' s write - combined .
*
* @ param [ in ] allocationInSystemMemory flag that indicates if allocation will be allocated in system memory
*
* @ return returns caching policy defined as DRM_XE_GEM_CPU_CACHING_WC or DRM_XE_GEM_CPU_CACHING_WB
*/
2024-05-22 13:31:31 +00:00
uint16_t IoctlHelperXe : : getCpuCachingMode ( std : : optional < bool > isCoherent , bool allocationInSystemMemory ) const {
2023-12-06 13:47:26 +00:00
uint16_t cpuCachingMode = DRM_XE_GEM_CPU_CACHING_WC ;
2024-07-17 13:51:20 +00:00
if ( allocationInSystemMemory ) {
if ( ( isCoherent . value_or ( true ) = = true ) ) {
cpuCachingMode = DRM_XE_GEM_CPU_CACHING_WB ;
}
2024-04-10 15:39:16 +00:00
}
2023-12-06 13:47:26 +00:00
if ( debugManager . flags . OverrideCpuCaching . get ( ) ! = - 1 ) {
cpuCachingMode = debugManager . flags . OverrideCpuCaching . get ( ) ;
}
return cpuCachingMode ;
}
2024-05-22 13:31:31 +00:00
int IoctlHelperXe : : createGemExt ( const MemRegionsVec & memClassInstances , size_t allocSize , uint32_t & handle , uint64_t patIndex , std : : optional < uint32_t > vmId , int32_t pairHandle , bool isChunked , uint32_t numOfChunks , std : : optional < uint32_t > memPolicyMode , std : : optional < std : : vector < unsigned long > > memPolicyNodemask , std : : optional < bool > isCoherent ) {
2023-01-09 12:07:23 +00:00
struct drm_xe_gem_create create = { } ;
uint32_t regionsSize = static_cast < uint32_t > ( memClassInstances . size ( ) ) ;
if ( ! regionsSize ) {
xeLog ( " memClassInstances empty ! \n " , " " ) ;
return - 1 ;
}
create . size = allocSize ;
MemoryClassInstance mem = memClassInstances [ regionsSize - 1 ] ;
2023-05-04 10:13:08 +00:00
std : : bitset < 32 > memoryInstances { } ;
2024-10-23 19:11:43 +00:00
bool isSysMemOnly = true ;
2023-05-04 10:13:08 +00:00
for ( const auto & memoryClassInstance : memClassInstances ) {
memoryInstances . set ( memoryClassInstance . memoryInstance ) ;
2024-10-23 19:11:43 +00:00
if ( memoryClassInstance . memoryClass ! = drm_xe_memory_class : : DRM_XE_MEM_REGION_CLASS_SYSMEM ) {
isSysMemOnly = false ;
}
2023-01-09 12:07:23 +00:00
}
2023-12-05 14:40:08 +00:00
create . placement = static_cast < uint32_t > ( memoryInstances . to_ulong ( ) ) ;
2024-10-23 19:11:43 +00:00
create . cpu_caching = this - > getCpuCachingMode ( isCoherent , isSysMemOnly ) ;
2024-07-08 14:40:39 +00:00
2025-09-11 05:18:55 +02:00
if ( debugManager . flags . EnableDeferBacking . get ( ) ) {
2025-02-03 10:27:00 +00:00
create . flags | = DRM_XE_GEM_CREATE_FLAG_DEFER_BACKING ;
}
2024-07-08 14:40:39 +00:00
printDebugString ( debugManager . flags . PrintBOCreateDestroyResult . get ( ) , stdout , " Performing DRM_IOCTL_XE_GEM_CREATE with {vmid=0x%x size=0x%lx flags=0x%x placement=0x%x caching=%hu } " ,
create . vm_id , create . size , create . flags , create . placement , create . cpu_caching ) ;
2023-12-12 08:48:32 +00:00
auto ret = IoctlHelper : : ioctl ( DrmIoctl : : gemCreate , & create ) ;
2023-01-09 12:07:23 +00:00
handle = create . handle ;
2024-07-08 14:40:39 +00:00
printDebugString ( debugManager . flags . PrintBOCreateDestroyResult . get ( ) , stdout , " DRM_IOCTL_XE_GEM_CREATE has returned: %d BO-%u with size: %lu \n " , ret , handle , create . size ) ;
2024-02-06 11:28:02 +00:00
xeLog ( " -> IoctlHelperXe::%s [%d,%d] vmid=0x%x s=0x%lx f=0x%x p=0x%x h=0x%x c=%hu r=%d \n " , __FUNCTION__ ,
2023-01-09 12:07:23 +00:00
mem . memoryClass , mem . memoryInstance ,
2024-02-06 11:28:02 +00:00
create . vm_id , create . size , create . flags , create . placement , handle , create . cpu_caching , ret ) ;
2023-01-18 20:22:32 +00:00
return ret ;
2023-01-09 12:07:23 +00:00
}
2024-05-22 13:31:31 +00:00
uint32_t IoctlHelperXe : : createGem ( uint64_t size , uint32_t memoryBanks , std : : optional < bool > isCoherent ) {
2023-09-06 11:04:13 +00:00
struct drm_xe_gem_create create = { } ;
create . size = size ;
auto pHwInfo = drm . getRootDeviceEnvironment ( ) . getHardwareInfo ( ) ;
auto memoryInfo = drm . getMemoryInfo ( ) ;
std : : bitset < 32 > memoryInstances { } ;
auto banks = std : : bitset < 4 > ( memoryBanks ) ;
size_t currentBank = 0 ;
size_t i = 0 ;
2024-10-23 19:11:43 +00:00
bool isSysMemOnly = true ;
2023-09-06 11:04:13 +00:00
while ( i < banks . count ( ) ) {
if ( banks . test ( currentBank ) ) {
auto regionClassAndInstance = memoryInfo - > getMemoryRegionClassAndInstance ( 1u < < currentBank , * pHwInfo ) ;
memoryInstances . set ( regionClassAndInstance . memoryInstance ) ;
2024-10-23 19:11:43 +00:00
if ( regionClassAndInstance . memoryClass ! = drm_xe_memory_class : : DRM_XE_MEM_REGION_CLASS_SYSMEM ) {
isSysMemOnly = false ;
}
2023-09-06 11:04:13 +00:00
i + + ;
}
currentBank + + ;
}
2023-09-08 14:14:57 +00:00
if ( memoryBanks = = 0 ) {
auto regionClassAndInstance = memoryInfo - > getMemoryRegionClassAndInstance ( memoryBanks , * pHwInfo ) ;
memoryInstances . set ( regionClassAndInstance . memoryInstance ) ;
}
2023-12-05 14:40:08 +00:00
create . placement = static_cast < uint32_t > ( memoryInstances . to_ulong ( ) ) ;
2024-10-23 19:11:43 +00:00
create . cpu_caching = this - > getCpuCachingMode ( isCoherent , isSysMemOnly ) ;
2024-07-08 14:40:39 +00:00
2025-09-11 05:18:55 +02:00
if ( debugManager . flags . EnableDeferBacking . get ( ) ) {
2025-02-03 10:27:00 +00:00
create . flags | = DRM_XE_GEM_CREATE_FLAG_DEFER_BACKING ;
}
2024-07-08 14:40:39 +00:00
printDebugString ( debugManager . flags . PrintBOCreateDestroyResult . get ( ) , stdout , " Performing DRM_IOCTL_XE_GEM_CREATE with {vmid=0x%x size=0x%lx flags=0x%x placement=0x%x caching=%hu } " ,
create . vm_id , create . size , create . flags , create . placement , create . cpu_caching ) ;
2023-12-12 08:48:32 +00:00
[[maybe_unused]] auto ret = ioctl ( DrmIoctl : : gemCreate , & create ) ;
2024-02-06 11:28:02 +00:00
2024-07-08 14:40:39 +00:00
printDebugString ( debugManager . flags . PrintBOCreateDestroyResult . get ( ) , stdout , " DRM_IOCTL_XE_GEM_CREATE has returned: %d BO-%u with size: %lu \n " , ret , create . handle , create . size ) ;
2024-02-06 11:28:02 +00:00
xeLog ( " -> IoctlHelperXe::%s vmid=0x%x s=0x%lx f=0x%x p=0x%x h=0x%x c=%hu r=%d \n " , __FUNCTION__ ,
create . vm_id , create . size , create . flags , create . placement , create . handle , create . cpu_caching , ret ) ;
2023-09-06 11:04:13 +00:00
DEBUG_BREAK_IF ( ret ! = 0 ) ;
return create . handle ;
}
2024-12-04 15:54:56 +00:00
CacheRegion IoctlHelperXe : : closAlloc ( CacheLevel cacheLevel ) {
2023-01-09 12:07:23 +00:00
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
2023-12-05 12:06:54 +00:00
return CacheRegion : : none ;
2023-01-09 12:07:23 +00:00
}
uint16_t IoctlHelperXe : : closAllocWays ( CacheRegion closIndex , uint16_t cacheLevel , uint16_t numWays ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
return 0 ;
}
CacheRegion IoctlHelperXe : : closFree ( CacheRegion closIndex ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
2023-12-05 12:06:54 +00:00
return CacheRegion : : none ;
2023-01-09 12:07:23 +00:00
}
2024-04-25 17:45:21 +00:00
void IoctlHelperXe : : setupXeWaitUserFenceStruct ( void * arg , uint32_t ctxId , uint16_t op , uint64_t addr , uint64_t value , int64_t timeout ) {
auto waitUserFence = reinterpret_cast < drm_xe_wait_user_fence * > ( arg ) ;
waitUserFence - > addr = addr ;
waitUserFence - > op = op ;
waitUserFence - > value = value ;
waitUserFence - > mask = std : : numeric_limits < uint64_t > : : max ( ) ;
waitUserFence - > timeout = timeout ;
waitUserFence - > exec_queue_id = ctxId ;
}
2024-05-07 11:47:34 +00:00
int IoctlHelperXe : : xeWaitUserFence ( uint32_t ctxId , uint16_t op , uint64_t addr , uint64_t value , int64_t timeout , bool userInterrupt , uint32_t externalInterruptId , GraphicsAllocation * allocForInterruptWait ) {
2024-08-13 11:27:54 +00:00
UNRECOVERABLE_IF ( addr = = 0x0 )
2024-04-25 17:45:21 +00:00
drm_xe_wait_user_fence waitUserFence = { } ;
setupXeWaitUserFenceStruct ( & waitUserFence , ctxId , op , addr , value , timeout ) ;
auto retVal = IoctlHelper : : ioctl ( DrmIoctl : : gemWaitUserFence , & waitUserFence ) ;
xeLog ( " -> IoctlHelperXe::%s a=0x%llx v=0x%llx T=0x%llx F=0x%x ctx=0x%x retVal=0x%x \n " , __FUNCTION__ ,
addr , value , timeout , waitUserFence . flags , ctxId , retVal ) ;
2023-05-04 10:13:08 +00:00
return retVal ;
2023-01-09 12:07:23 +00:00
}
int IoctlHelperXe : : waitUserFence ( uint32_t ctxId , uint64_t address ,
2024-04-25 17:45:21 +00:00
uint64_t value , uint32_t dataWidth , int64_t timeout , uint16_t flags ,
2024-05-07 11:47:34 +00:00
bool userInterrupt , uint32_t externalInterruptId , GraphicsAllocation * allocForInterruptWait ) {
2024-02-06 11:28:02 +00:00
xeLog ( " -> IoctlHelperXe::%s a=0x%llx v=0x%llx w=0x%x T=0x%llx F=0x%x ctx=0x%x \n " , __FUNCTION__ , address , value , dataWidth , timeout , flags , ctxId ) ;
2024-03-26 16:51:39 +00:00
UNRECOVERABLE_IF ( dataWidth ! = static_cast < uint32_t > ( Drm : : ValueWidth : : u64 ) ) ;
2023-01-09 12:07:23 +00:00
if ( address ) {
2024-05-07 11:47:34 +00:00
return xeWaitUserFence ( ctxId , DRM_XE_UFENCE_WAIT_OP_GTE , address , value , timeout , userInterrupt , externalInterruptId , allocForInterruptWait ) ;
2023-01-09 12:07:23 +00:00
}
return 0 ;
}
2025-02-13 22:26:10 +00:00
uint32_t IoctlHelperXe : : getAtomicAdvise ( bool /* isNonAtomic */ ) {
2023-01-09 12:07:23 +00:00
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
return 0 ;
}
2023-09-20 22:34:19 +00:00
uint32_t IoctlHelperXe : : getAtomicAccess ( AtomicAccessMode mode ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
return 0 ;
}
2023-01-09 12:07:23 +00:00
uint32_t IoctlHelperXe : : getPreferredLocationAdvise ( ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
return 0 ;
}
2023-04-21 00:36:45 +00:00
std : : optional < MemoryClassInstance > IoctlHelperXe : : getPreferredLocationRegion ( PreferredLocation memoryLocation , uint32_t memoryInstance ) {
2023-03-30 12:42:02 +00:00
return std : : nullopt ;
}
2023-01-09 12:07:23 +00:00
bool IoctlHelperXe : : setVmBoAdvise ( int32_t handle , uint32_t attribute , void * region ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
2024-08-13 11:27:54 +00:00
// There is no vmAdvise attribute in Xe, so return success
return true ;
2023-01-09 12:07:23 +00:00
}
2025-05-07 00:06:28 +00:00
bool IoctlHelperXe : : setVmSharedSystemMemAdvise ( uint64_t handle , const size_t size , const uint32_t attribute , const uint64_t param , const std : : vector < uint32_t > & vmIds ) {
std : : string vmIdsStr = " [ " ;
for ( size_t i = 0 ; i < vmIds . size ( ) ; + + i ) {
{
std : : stringstream ss ;
ss < < std : : hex < < vmIds [ i ] ;
vmIdsStr + = " 0x " + ss . str ( ) ;
}
if ( i ! = vmIds . size ( ) - 1 ) {
vmIdsStr + = " , " ;
}
}
vmIdsStr + = " ] " ;
xeLog ( " -> IoctlHelperXe::%s h=0x%x s=0x%lx vmids=%s \n " , __FUNCTION__ , handle , size , vmIdsStr . c_str ( ) ) ;
2025-02-13 22:26:10 +00:00
// There is no vmAdvise attribute in Xe, so return success
return true ;
}
2023-03-08 04:06:00 +00:00
bool IoctlHelperXe : : setVmBoAdviseForChunking ( int32_t handle , uint64_t start , uint64_t length , uint32_t attribute , void * region ) {
2024-08-13 11:27:54 +00:00
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
// There is no vmAdvise attribute in Xe, so return success
return true ;
2023-03-08 04:06:00 +00:00
}
2023-01-09 12:07:23 +00:00
bool IoctlHelperXe : : setVmPrefetch ( uint64_t start , uint64_t length , uint32_t region , uint32_t vmId ) {
2025-04-29 16:41:39 +00:00
xeLog ( " -> IoctlHelperXe::%s s=0x%llx l=0x%llx align_s=0x%llx align_l=0x%llx vmid=0x%x \n " , __FUNCTION__ , start , length , alignDown ( start , MemoryConstants : : pageSize ) , alignSizeWholePage ( reinterpret_cast < void * > ( start ) , length ) , vmId ) ;
2024-10-30 10:00:31 +00:00
drm_xe_vm_bind bind = { } ;
bind . vm_id = vmId ;
bind . num_binds = 1 ;
2025-04-29 16:41:39 +00:00
bind . bind . range = alignSizeWholePage ( reinterpret_cast < void * > ( start ) , length ) ;
bind . bind . addr = alignDown ( start , MemoryConstants : : pageSize ) ;
2024-10-30 10:00:31 +00:00
bind . bind . op = DRM_XE_VM_BIND_OP_PREFETCH ;
auto pHwInfo = this - > drm . getRootDeviceEnvironment ( ) . getHardwareInfo ( ) ;
constexpr uint32_t subDeviceMaskSize = DeviceBitfield ( ) . size ( ) ;
constexpr uint32_t subDeviceMaskMax = ( 1u < < subDeviceMaskSize ) - 1u ;
uint32_t subDeviceId = region & subDeviceMaskMax ;
DeviceBitfield subDeviceMask = ( 1u < < subDeviceId ) ;
MemoryClassInstance regionInstanceClass = this - > drm . getMemoryInfo ( ) - > getMemoryRegionClassAndInstance ( subDeviceMask , * pHwInfo ) ;
2025-03-11 02:59:53 +00:00
bind . bind . prefetch_mem_region_instance = regionInstanceClass . memoryInstance ;
2025-03-12 05:18:52 +01:00
2024-10-30 10:00:31 +00:00
int ret = IoctlHelper : : ioctl ( DrmIoctl : : gemVmBind , & bind ) ;
xeLog ( " vm=%d addr=0x%lx range=0x%lx region=0x%x operation=%d(%s) ret=%d \n " ,
bind . vm_id ,
bind . bind . addr ,
bind . bind . range ,
bind . bind . prefetch_mem_region_instance ,
bind . bind . op ,
xeGetBindOperationName ( bind . bind . op ) ,
ret ) ;
if ( ret ! = 0 ) {
xeLog ( " error: %s ret=%d \n " , xeGetBindOperationName ( bind . bind . op ) , ret ) ;
return false ;
}
return true ;
2023-01-09 12:07:23 +00:00
}
uint32_t IoctlHelperXe : : getDirectSubmissionFlag ( ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
return 0 ;
}
uint16_t IoctlHelperXe : : getWaitUserFenceSoftFlag ( ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
2023-06-19 12:59:19 +00:00
return 0 ;
2023-01-09 12:07:23 +00:00
} ;
2023-12-27 13:27:51 +00:00
void IoctlHelperXe : : fillExecObject ( ExecObject & execObject , uint32_t handle , uint64_t gpuAddress , uint32_t drmContextId , bool bindInfo , bool isMarkedForCapture ) {
auto execObjectXe = reinterpret_cast < ExecObjectXe * > ( execObject . data ) ;
execObjectXe - > gpuAddress = gpuAddress ;
execObjectXe - > handle = handle ;
}
void IoctlHelperXe : : logExecObject ( const ExecObject & execObject , std : : stringstream & logger , size_t size ) {
auto execObjectXe = reinterpret_cast < const ExecObjectXe * > ( execObject . data ) ;
logger < < " ExecBufferXe = { handle: BO- " < < execObjectXe - > handle
< < " , address range: 0x " < < reinterpret_cast < void * > ( execObjectXe - > gpuAddress ) < < " } \n " ;
}
void IoctlHelperXe : : fillExecBuffer ( ExecBuffer & execBuffer , uintptr_t buffersPtr , uint32_t bufferCount , uint32_t startOffset , uint32_t size , uint64_t flags , uint32_t drmContextId ) {
auto execBufferXe = reinterpret_cast < ExecBufferXe * > ( execBuffer . data ) ;
execBufferXe - > execObject = reinterpret_cast < ExecObjectXe * > ( buffersPtr ) ;
execBufferXe - > startOffset = startOffset ;
execBufferXe - > drmContextId = drmContextId ;
}
void IoctlHelperXe : : logExecBuffer ( const ExecBuffer & execBuffer , std : : stringstream & logger ) {
auto execBufferXe = reinterpret_cast < const ExecBufferXe * > ( execBuffer . data ) ;
logger < < " ExecBufferXe { "
< < " exec object: " + std : : to_string ( reinterpret_cast < uintptr_t > ( execBufferXe - > execObject ) )
< < " , start offset: " + std : : to_string ( execBufferXe - > startOffset )
< < " , drm context id: " + std : : to_string ( execBufferXe - > drmContextId )
< < " } \n " ;
}
2023-01-09 12:07:23 +00:00
int IoctlHelperXe : : execBuffer ( ExecBuffer * execBuffer , uint64_t completionGpuAddress , TaskCountType counterValue ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
int ret = 0 ;
if ( execBuffer ) {
2023-12-27 13:27:51 +00:00
auto execBufferXe = reinterpret_cast < ExecBufferXe * > ( execBuffer - > data ) ;
if ( execBufferXe ) {
auto execObject = execBufferXe - > execObject ;
uint32_t engine = execBufferXe - > drmContextId ;
xeLog ( " EXEC ofs=%d ctx=0x%x ptr=0x%p \n " ,
execBufferXe - > startOffset , execBufferXe - > drmContextId , execBufferXe - > execObject ) ;
xeLog ( " -> IoctlHelperXe::%s CA=0x%llx v=0x%x ctx=0x%x \n " , __FUNCTION__ ,
completionGpuAddress , counterValue , engine ) ;
struct drm_xe_sync sync [ 1 ] = { } ;
sync [ 0 ] . type = DRM_XE_SYNC_TYPE_USER_FENCE ;
sync [ 0 ] . flags = DRM_XE_SYNC_FLAG_SIGNAL ;
sync [ 0 ] . addr = completionGpuAddress ;
sync [ 0 ] . timeline_value = counterValue ;
struct drm_xe_exec exec = { } ;
exec . exec_queue_id = engine ;
exec . num_syncs = 1 ;
exec . syncs = reinterpret_cast < uintptr_t > ( & sync ) ;
exec . address = execObject - > gpuAddress + execBufferXe - > startOffset ;
exec . num_batch_buffer = 1 ;
ret = IoctlHelper : : ioctl ( DrmIoctl : : gemExecbuffer2 , & exec ) ;
xeLog ( " r=0x%x batch=0x%lx \n " , ret , exec . address ) ;
if ( debugManager . flags . PrintCompletionFenceUsage . get ( ) ) {
std : : cout < < " Completion fence submitted. "
< < " GPU address: " < < std : : hex < < completionGpuAddress < < std : : dec
< < " , value: " < < counterValue < < std : : endl ;
2023-01-09 12:07:23 +00:00
}
}
}
return ret ;
}
bool IoctlHelperXe : : completionFenceExtensionSupported ( const bool isVmBindAvailable ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
return isVmBindAvailable ;
}
2024-03-28 08:06:58 +00:00
uint64_t IoctlHelperXe : : getFlagsForVmBind ( bool bindCapture , bool bindImmediate , bool bindMakeResident , bool bindLock , bool readOnlyResource ) {
2024-07-25 10:47:37 +00:00
uint64_t flags = 0 ;
2024-03-28 08:06:58 +00:00
xeLog ( " -> IoctlHelperXe::%s %d %d %d %d %d \n " , __FUNCTION__ , bindCapture , bindImmediate , bindMakeResident , bindLock , readOnlyResource ) ;
2024-03-27 17:39:41 +00:00
if ( bindCapture ) {
2024-07-25 10:47:37 +00:00
flags | = DRM_XE_VM_BIND_FLAG_DUMPABLE ;
2024-03-27 17:39:41 +00:00
}
2024-09-24 13:39:34 +00:00
if ( bindImmediate ) {
2024-07-25 10:47:37 +00:00
flags | = DRM_XE_VM_BIND_FLAG_IMMEDIATE ;
}
2024-09-24 13:39:34 +00:00
if ( readOnlyResource ) {
2024-07-25 10:47:37 +00:00
flags | = DRM_XE_VM_BIND_FLAG_READONLY ;
}
2024-09-13 15:38:39 +00:00
if ( bindMakeResident ) {
flags | = DRM_XE_VM_BIND_FLAG_IMMEDIATE ;
}
2024-07-25 10:47:37 +00:00
return flags ;
2024-03-25 15:43:50 +00:00
}
2023-01-18 20:22:32 +00:00
int IoctlHelperXe : : queryDistances ( std : : vector < QueryItem > & queryItems , std : : vector < DistanceInfo > & distanceInfos ) {
2023-01-09 12:07:23 +00:00
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
return 0 ;
}
2024-08-13 11:27:54 +00:00
bool IoctlHelperXe : : isPageFaultSupported ( ) {
2025-05-21 17:51:30 +00:00
auto checkVmCreateFlagsSupport = [ & ] ( uint32_t flags ) - > bool {
struct drm_xe_vm_create vmCreate = { } ;
vmCreate . flags = flags ;
auto ret = IoctlHelper : : ioctl ( DrmIoctl : : gemVmCreate , & vmCreate ) ;
if ( ret = = 0 ) {
struct drm_xe_vm_destroy vmDestroy = { } ;
vmDestroy . vm_id = vmCreate . vm_id ;
ret = IoctlHelper : : ioctl ( DrmIoctl : : gemVmDestroy , & vmDestroy ) ;
DEBUG_BREAK_IF ( ret ! = 0 ) ;
return true ;
}
return false ;
} ;
bool pageFaultSupport = checkVmCreateFlagsSupport ( DRM_XE_VM_CREATE_FLAG_LR_MODE | DRM_XE_VM_CREATE_FLAG_FAULT_MODE ) ;
xeLog ( " -> IoctlHelperXe::%s %d \n " , __FUNCTION__ , pageFaultSupport ) ;
return pageFaultSupport ;
2025-01-14 09:32:55 +00:00
}
2023-01-09 12:07:23 +00:00
uint32_t IoctlHelperXe : : getEuStallFdParameter ( ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
2024-03-09 01:22:47 +00:00
return 0u ;
2023-01-09 12:07:23 +00:00
}
std : : unique_ptr < uint8_t [ ] > IoctlHelperXe : : createVmControlExtRegion ( const std : : optional < MemoryClassInstance > & regionInstanceClass ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
return { } ;
}
uint32_t IoctlHelperXe : : getFlagsForVmCreate ( bool disableScratch , bool enablePageFault , bool useVmBind ) {
xeLog ( " -> IoctlHelperXe::%s %d,%d,%d \n " , __FUNCTION__ , disableScratch , enablePageFault , useVmBind ) ;
2024-12-11 06:39:48 +01:00
uint32_t flags = DRM_XE_VM_CREATE_FLAG_LR_MODE ;
2025-01-14 09:32:55 +00:00
bool debuggingEnabled = drm . getRootDeviceEnvironment ( ) . executionEnvironment . isDebuggingEnabled ( ) ;
if ( enablePageFault | | debuggingEnabled ) {
2024-03-26 15:42:15 +00:00
flags | = DRM_XE_VM_CREATE_FLAG_FAULT_MODE ;
2023-01-09 12:07:23 +00:00
}
2025-01-15 13:12:34 +00:00
if ( ! disableScratch ) {
flags | = DRM_XE_VM_CREATE_FLAG_SCRATCH_PAGE ;
}
2023-01-09 12:07:23 +00:00
return flags ;
}
uint32_t IoctlHelperXe : : createContextWithAccessCounters ( GemContextCreateExt & gcc ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
return 0 ;
}
uint32_t IoctlHelperXe : : createCooperativeContext ( GemContextCreateExt & gcc ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
return 0 ;
}
void IoctlHelperXe : : fillVmBindExtSetPat ( VmBindExtSetPatT & vmBindExtSetPat , uint64_t patIndex , uint64_t nextExtension ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
}
void IoctlHelperXe : : fillVmBindExtUserFence ( VmBindExtUserFenceT & vmBindExtUserFence , uint64_t fenceAddress , uint64_t fenceValue , uint64_t nextExtension ) {
xeLog ( " -> IoctlHelperXe::%s 0x%lx 0x%lx \n " , __FUNCTION__ , fenceAddress , fenceValue ) ;
2023-05-18 10:19:49 +00:00
auto xeBindExtUserFence = reinterpret_cast < UserFenceExtension * > ( vmBindExtUserFence ) ;
2023-01-09 12:07:23 +00:00
UNRECOVERABLE_IF ( ! xeBindExtUserFence ) ;
2023-05-18 10:19:49 +00:00
xeBindExtUserFence - > tag = UserFenceExtension : : tagValue ;
2023-01-09 12:07:23 +00:00
xeBindExtUserFence - > addr = fenceAddress ;
xeBindExtUserFence - > value = fenceValue ;
}
2024-02-26 22:40:34 +00:00
void IoctlHelperXe : : setVmBindUserFence ( VmBindParams & vmBind , VmBindExtUserFenceT vmBindUserFence ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
vmBind . userFence = castToUint64 ( vmBindUserFence ) ;
return ;
}
2024-08-13 11:27:54 +00:00
std : : optional < uint32_t > IoctlHelperXe : : getVmAdviseAtomicAttribute ( ) {
2023-01-09 12:07:23 +00:00
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
2024-08-13 11:27:54 +00:00
// There is no vmAdvise attribute in Xe
return { } ;
2023-01-09 12:07:23 +00:00
}
int IoctlHelperXe : : vmBind ( const VmBindParams & vmBindParams ) {
return xeVmBind ( vmBindParams , true ) ;
}
int IoctlHelperXe : : vmUnbind ( const VmBindParams & vmBindParams ) {
return xeVmBind ( vmBindParams , false ) ;
}
2024-02-26 07:57:36 +00:00
int IoctlHelperXe : : getResetStats ( ResetStats & resetStats , uint32_t * status , ResetStatsFault * resetStatsFault ) {
return ioctl ( DrmIoctl : : getResetStats , & resetStats ) ;
}
2023-01-09 12:07:23 +00:00
UuidRegisterResult IoctlHelperXe : : registerUuid ( const std : : string & uuid , uint32_t uuidClass , uint64_t ptr , uint64_t size ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
return { } ;
}
UuidRegisterResult IoctlHelperXe : : registerStringClassUuid ( const std : : string & uuid , uint64_t ptr , uint64_t size ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
return { } ;
}
int IoctlHelperXe : : unregisterUuid ( uint32_t handle ) {
xeLog ( " -> IoctlHelperXe::%s \n " , __FUNCTION__ ) ;
return 0 ;
}
bool IoctlHelperXe : : isContextDebugSupported ( ) {
return false ;
}
int IoctlHelperXe : : setContextDebugFlag ( uint32_t drmContextId ) {
return 0 ;
}
bool IoctlHelperXe : : isDebugAttachAvailable ( ) {
2024-01-23 22:32:35 +00:00
return true ;
2023-01-09 12:07:23 +00:00
}
int IoctlHelperXe : : getDrmParamValue ( DrmParam drmParam ) const {
xeLog ( " -> IoctlHelperXe::%s 0x%x %s \n " , __FUNCTION__ , drmParam , getDrmParamString ( drmParam ) . c_str ( ) ) ;
switch ( drmParam ) {
2025-02-13 22:26:10 +00:00
case DrmParam : : atomicClassUndefined :
return - 1 ;
case DrmParam : : atomicClassDevice :
return - 1 ;
case DrmParam : : atomicClassGlobal :
return - 1 ;
case DrmParam : : atomicClassSystem :
return - 1 ;
2023-12-13 09:05:31 +00:00
case DrmParam : : memoryClassDevice :
2023-11-27 13:16:31 +00:00
return DRM_XE_MEM_REGION_CLASS_VRAM ;
2023-12-13 09:05:31 +00:00
case DrmParam : : memoryClassSystem :
2023-11-27 13:16:31 +00:00
return DRM_XE_MEM_REGION_CLASS_SYSMEM ;
2023-12-13 09:05:31 +00:00
case DrmParam : : engineClassRender :
2023-09-13 18:58:57 +00:00
return DRM_XE_ENGINE_CLASS_RENDER ;
2023-12-13 09:05:31 +00:00
case DrmParam : : engineClassCopy :
2023-09-13 18:58:57 +00:00
return DRM_XE_ENGINE_CLASS_COPY ;
2023-12-13 09:05:31 +00:00
case DrmParam : : engineClassVideo :
2023-09-13 18:58:57 +00:00
return DRM_XE_ENGINE_CLASS_VIDEO_DECODE ;
2023-12-13 09:05:31 +00:00
case DrmParam : : engineClassVideoEnhance :
2023-09-13 18:58:57 +00:00
return DRM_XE_ENGINE_CLASS_VIDEO_ENHANCE ;
2023-12-13 09:05:31 +00:00
case DrmParam : : engineClassCompute :
2023-09-13 18:58:57 +00:00
return DRM_XE_ENGINE_CLASS_COMPUTE ;
2023-12-13 09:05:31 +00:00
case DrmParam : : engineClassInvalid :
2023-09-13 18:58:57 +00:00
return - 1 ;
2024-03-15 13:47:40 +00:00
case DrmParam : : execDefault :
return DRM_XE_ENGINE_CLASS_COMPUTE ;
case DrmParam : : execBlt :
return DRM_XE_ENGINE_CLASS_COPY ;
case DrmParam : : execRender :
return DRM_XE_ENGINE_CLASS_RENDER ;
2023-01-09 12:07:23 +00:00
default :
return getDrmParamValueBase ( drmParam ) ;
}
}
int IoctlHelperXe : : getDrmParamValueBase ( DrmParam drmParam ) const {
return static_cast < int > ( drmParam ) ;
}
2023-01-18 20:22:32 +00:00
int IoctlHelperXe : : ioctl ( DrmIoctl request , void * arg ) {
int ret = - 1 ;
2023-01-09 12:07:23 +00:00
xeLog ( " => IoctlHelperXe::%s 0x%x \n " , __FUNCTION__ , request ) ;
switch ( request ) {
2023-12-12 08:48:32 +00:00
case DrmIoctl : : getparam : {
2024-04-12 09:40:28 +00:00
auto getParam = reinterpret_cast < GetParam * > ( arg ) ;
2023-01-09 12:07:23 +00:00
ret = 0 ;
2024-04-12 09:40:28 +00:00
switch ( getParam - > param ) {
2023-12-13 09:05:31 +00:00
case static_cast < int > ( DrmParam : : paramCsTimestampFrequency ) : {
2024-04-12 13:38:19 +00:00
* getParam - > value = xeGtListData - > gt_list [ defaultEngine - > gt_id ] . reference_clock ;
2023-09-18 10:49:16 +00:00
} break ;
2023-01-09 12:07:23 +00:00
default :
2023-01-18 20:22:32 +00:00
ret = - 1 ;
2023-01-09 12:07:23 +00:00
}
2024-04-12 09:40:28 +00:00
xeLog ( " -> IoctlHelperXe::ioctl Getparam 0x%x/0x%x r=%d \n " , getParam - > param , * getParam - > value , ret ) ;
2023-01-09 12:07:23 +00:00
} break ;
2023-12-12 08:48:32 +00:00
case DrmIoctl : : query : {
2023-01-09 12:07:23 +00:00
2023-12-27 13:27:51 +00:00
Query * query = static_cast < Query * > ( arg ) ;
QueryItem * queryItems = reinterpret_cast < QueryItem * > ( query - > itemsPtr ) ;
for ( auto i = 0u ; i < query - > numItems ; i + + ) {
auto & queryItem = queryItems [ i ] ;
2023-01-09 12:07:23 +00:00
2023-12-27 13:27:51 +00:00
if ( queryItem . queryId ! = static_cast < int > ( DrmParam : : queryHwconfigTable ) ) {
xeLog ( " error: bad query 0x%x \n " , queryItem . queryId ) ;
return - 1 ;
}
auto queryDataSize = static_cast < int32_t > ( hwconfig . size ( ) * sizeof ( uint32_t ) ) ;
if ( queryItem . length = = 0 ) {
queryItem . length = queryDataSize ;
} else {
UNRECOVERABLE_IF ( queryItem . length ! = queryDataSize ) ;
memcpy_s ( reinterpret_cast < void * > ( queryItem . dataPtr ) ,
queryItem . length , hwconfig . data ( ) , queryItem . length ) ;
}
xeLog ( " -> IoctlHelperXe::ioctl Query id=0x%x f=0x%x len=%d \n " ,
static_cast < int > ( queryItem . queryId ) , static_cast < int > ( queryItem . flags ) , queryItem . length ) ;
ret = 0 ;
2023-01-09 12:07:23 +00:00
}
} break ;
2023-12-12 08:48:32 +00:00
case DrmIoctl : : gemUserptr : {
2023-01-09 12:07:23 +00:00
GemUserPtr * d = static_cast < GemUserPtr * > ( arg ) ;
2024-12-13 14:15:31 +00:00
updateBindInfo ( d - > userPtr ) ;
2023-01-09 12:07:23 +00:00
ret = 0 ;
2024-02-06 11:28:02 +00:00
xeLog ( " -> IoctlHelperXe::ioctl GemUserptr p=0x%llx s=0x%llx f=0x%x h=0x%x r=%d \n " , d - > userPtr ,
2023-01-09 12:07:23 +00:00
d - > userSize , d - > flags , d - > handle , ret ) ;
xeShowBindTable ( ) ;
} break ;
2023-12-12 08:48:32 +00:00
case DrmIoctl : : gemContextDestroy : {
2023-01-09 12:07:23 +00:00
GemContextDestroy * d = static_cast < GemContextDestroy * > ( arg ) ;
2023-09-13 18:58:57 +00:00
struct drm_xe_exec_queue_destroy destroy = { } ;
destroy . exec_queue_id = d - > contextId ;
2024-03-22 12:17:31 +00:00
ret = IoctlHelper : : ioctl ( request , & destroy ) ;
2024-07-03 11:23:58 +00:00
xeLog ( " -> IoctlHelperXe::ioctl GemContextDestrory ctx=0x%x r=%d \n " ,
2023-01-09 12:07:23 +00:00
d - > contextId , ret ) ;
} break ;
2023-12-12 08:48:32 +00:00
case DrmIoctl : : gemContextGetparam : {
2023-01-09 12:07:23 +00:00
GemContextParam * d = static_cast < GemContextParam * > ( arg ) ;
2023-08-07 12:32:02 +00:00
auto addressSpace = drm . getRootDeviceEnvironment ( ) . getHardwareInfo ( ) - > capabilityTable . gpuAddressSpace ;
2023-01-09 12:07:23 +00:00
ret = 0 ;
switch ( d - > param ) {
2023-12-13 09:05:31 +00:00
case static_cast < int > ( DrmParam : : contextParamGttSize ) :
2023-08-07 12:32:02 +00:00
d - > value = addressSpace + 1u ;
2023-01-09 12:07:23 +00:00
break ;
default :
2023-01-18 20:22:32 +00:00
ret = - 1 ;
2023-01-09 12:07:23 +00:00
break ;
}
2023-01-18 20:22:32 +00:00
xeLog ( " -> IoctlHelperXe::ioctl GemContextGetparam r=%d \n " , ret ) ;
2023-01-09 12:07:23 +00:00
} break ;
2023-12-12 08:48:32 +00:00
case DrmIoctl : : gemContextSetparam : {
2024-03-21 14:47:48 +00:00
GemContextParam * gemContextParam = static_cast < GemContextParam * > ( arg ) ;
switch ( gemContextParam - > param ) {
2023-12-13 09:05:31 +00:00
case static_cast < int > ( DrmParam : : contextParamEngines ) : {
2024-03-21 14:47:48 +00:00
auto contextEngine = reinterpret_cast < ContextParamEngines < > * > ( gemContextParam - > value ) ;
if ( ! contextEngine | | contextEngine - > numEnginesInContext = = 0 ) {
break ;
2023-01-09 12:07:23 +00:00
}
2024-03-21 14:47:48 +00:00
auto numEngines = contextEngine - > numEnginesInContext ;
contextParamEngine . resize ( numEngines ) ;
memcpy_s ( contextParamEngine . data ( ) , numEngines * sizeof ( uint64_t ) , contextEngine - > enginesData , numEngines * sizeof ( uint64_t ) ) ;
ret = 0 ;
2023-01-09 12:07:23 +00:00
} break ;
default :
2023-01-18 20:22:32 +00:00
ret = - 1 ;
2023-01-09 12:07:23 +00:00
break ;
}
2023-01-18 20:22:32 +00:00
xeLog ( " -> IoctlHelperXe::ioctl GemContextSetparam r=%d \n " , ret ) ;
2023-01-09 12:07:23 +00:00
} break ;
2023-12-12 08:48:32 +00:00
case DrmIoctl : : gemClose : {
2024-09-10 19:27:33 +00:00
std : : unique_lock < std : : mutex > lock ( gemCloseLock ) ;
2023-01-09 12:07:23 +00:00
struct GemClose * d = static_cast < struct GemClose * > ( arg ) ;
xeShowBindTable ( ) ;
2024-03-22 10:54:26 +00:00
bool isUserptr = false ;
2024-12-13 14:15:31 +00:00
if ( d - > userptr ) {
std : : unique_lock < std : : mutex > lock ( xeLock ) ;
for ( unsigned int i = 0 ; i < bindInfo . size ( ) ; i + + ) {
if ( d - > userptr = = bindInfo [ i ] . userptr ) {
isUserptr = true ;
xeLog ( " removing 0x%x 0x%lx \n " ,
bindInfo [ i ] . userptr ,
bindInfo [ i ] . addr ) ;
bindInfo . erase ( bindInfo . begin ( ) + i ) ;
ret = 0 ;
break ;
}
2023-01-09 12:07:23 +00:00
}
}
2024-12-13 14:15:31 +00:00
if ( ! isUserptr ) {
ret = IoctlHelper : : ioctl ( request , arg ) ;
2023-01-09 12:07:23 +00:00
}
2024-12-13 14:15:31 +00:00
xeLog ( " -> IoctlHelperXe::ioctl GemClose h=0x%x r=%d \n " , d - > handle , ret ) ;
2023-01-09 12:07:23 +00:00
} break ;
2023-12-12 08:48:32 +00:00
case DrmIoctl : : gemVmCreate : {
2024-03-26 15:42:15 +00:00
GemVmControl * vmControl = static_cast < GemVmControl * > ( arg ) ;
2023-01-09 12:07:23 +00:00
struct drm_xe_vm_create args = { } ;
2024-03-26 15:42:15 +00:00
args . flags = vmControl - > flags ;
2023-01-09 12:07:23 +00:00
ret = IoctlHelper : : ioctl ( request , & args ) ;
2024-03-26 15:42:15 +00:00
vmControl - > vmId = args . vm_id ;
xeLog ( " -> IoctlHelperXe::ioctl gemVmCreate f=0x%x vmid=0x%x r=%d \n " , vmControl - > flags , vmControl - > vmId , ret ) ;
2023-01-09 12:07:23 +00:00
} break ;
2023-12-12 08:48:32 +00:00
case DrmIoctl : : gemVmDestroy : {
2023-01-09 12:07:23 +00:00
GemVmControl * d = static_cast < GemVmControl * > ( arg ) ;
struct drm_xe_vm_destroy args = { } ;
args . vm_id = d - > vmId ;
ret = IoctlHelper : : ioctl ( request , & args ) ;
2023-01-18 20:22:32 +00:00
xeLog ( " -> IoctlHelperXe::ioctl GemVmDestroy vmid=0x%x r=%d \n " , d - > vmId , ret ) ;
2023-01-09 12:07:23 +00:00
} break ;
2023-12-12 08:48:32 +00:00
case DrmIoctl : : gemMmapOffset : {
2023-01-09 12:07:23 +00:00
GemMmapOffset * d = static_cast < GemMmapOffset * > ( arg ) ;
struct drm_xe_gem_mmap_offset mmo = { } ;
mmo . handle = d - > handle ;
2025-06-24 11:16:24 +00:00
mmo . flags = static_cast < uint32_t > ( d - > flags ) ;
2023-01-09 12:07:23 +00:00
ret = IoctlHelper : : ioctl ( request , & mmo ) ;
d - > offset = mmo . offset ;
2024-02-06 11:28:02 +00:00
xeLog ( " -> IoctlHelperXe::ioctl GemMmapOffset h=0x%x o=0x%x f=0x%x r=%d \n " ,
d - > handle , d - > offset , d - > flags , ret ) ;
2023-01-09 12:07:23 +00:00
} break ;
2023-12-12 08:48:32 +00:00
case DrmIoctl : : getResetStats : {
2024-04-24 08:23:11 +00:00
ResetStats * resetStats = static_cast < ResetStats * > ( arg ) ;
drm_xe_exec_queue_get_property getProperty { } ;
getProperty . exec_queue_id = resetStats - > contextId ;
getProperty . property = DRM_XE_EXEC_QUEUE_GET_PROPERTY_BAN ;
ret = IoctlHelper : : ioctl ( request , & getProperty ) ;
resetStats - > batchPending = static_cast < uint32_t > ( getProperty . value ) ;
xeLog ( " -> IoctlHelperXe::ioctl GetResetStats ctx=0x%x r=%d value=%llu \n " ,
resetStats - > contextId , ret , getProperty . value ) ;
2023-01-09 12:07:23 +00:00
} break ;
2023-12-12 08:48:32 +00:00
case DrmIoctl : : primeFdToHandle : {
2023-01-09 12:07:23 +00:00
PrimeHandle * prime = static_cast < PrimeHandle * > ( arg ) ;
ret = IoctlHelper : : ioctl ( request , arg ) ;
2024-02-06 11:28:02 +00:00
xeLog ( " ->PrimeFdToHandle h=0x%x f=0x%x d=0x%x r=%d \n " ,
2023-01-09 12:07:23 +00:00
prime - > handle , prime - > flags , prime - > fileDescriptor , ret ) ;
} break ;
2023-12-12 08:48:32 +00:00
case DrmIoctl : : primeHandleToFd : {
2023-01-09 12:07:23 +00:00
PrimeHandle * prime = static_cast < PrimeHandle * > ( arg ) ;
2023-08-30 11:23:22 +00:00
ret = IoctlHelper : : ioctl ( request , arg ) ;
2023-01-18 20:22:32 +00:00
xeLog ( " ->PrimeHandleToFd h=0x%x f=0x%x d=0x%x r=%d \n " ,
2023-01-09 12:07:23 +00:00
prime - > handle , prime - > flags , prime - > fileDescriptor , ret ) ;
} break ;
2025-06-19 01:28:58 +00:00
case DrmIoctl : : syncObjFdToHandle : {
ret = IoctlHelper : : ioctl ( request , arg ) ;
xeLog ( " -> IoctlHelperXe::ioctl SyncObjFdToHandle r=%d \n " , ret ) ;
} break ;
case DrmIoctl : : syncObjTimelineWait : {
ret = IoctlHelper : : ioctl ( request , arg ) ;
xeLog ( " -> IoctlHelperXe::ioctl SyncObjTimelineWait r=%d \n " , ret ) ;
} break ;
case DrmIoctl : : syncObjWait : {
ret = IoctlHelper : : ioctl ( request , arg ) ;
xeLog ( " -> IoctlHelperXe::ioctl SyncObjWait r=%d \n " , ret ) ;
} break ;
case DrmIoctl : : syncObjSignal : {
ret = IoctlHelper : : ioctl ( request , arg ) ;
xeLog ( " -> IoctlHelperXe::ioctl SyncObjSignal r=%d \n " , ret ) ;
} break ;
case DrmIoctl : : syncObjTimelineSignal : {
ret = IoctlHelper : : ioctl ( request , arg ) ;
xeLog ( " -> IoctlHelperXe::ioctl SyncObjTimelineSignal r=%d \n " , ret ) ;
} break ;
2023-12-12 08:48:32 +00:00
case DrmIoctl : : gemCreate : {
2023-09-06 11:04:13 +00:00
drm_xe_gem_create * gemCreate = static_cast < drm_xe_gem_create * > ( arg ) ;
2023-03-10 16:24:01 +00:00
ret = IoctlHelper : : ioctl ( request , arg ) ;
2024-02-06 11:28:02 +00:00
xeLog ( " -> IoctlHelperXe::ioctl GemCreate h=0x%x s=0x%lx p=0x%x f=0x%x vmid=0x%x r=%d \n " ,
gemCreate - > handle , gemCreate - > size , gemCreate - > placement , gemCreate - > flags , gemCreate - > vm_id , ret ) ;
2023-03-10 16:24:01 +00:00
} break ;
2024-01-23 16:25:51 +00:00
case DrmIoctl : : debuggerOpen : {
ret = debuggerOpenIoctl ( request , arg ) ;
} break ;
2024-02-01 22:03:55 +00:00
case DrmIoctl : : metadataCreate : {
ret = debuggerMetadataCreateIoctl ( request , arg ) ;
} break ;
case DrmIoctl : : metadataDestroy : {
ret = debuggerMetadataDestroyIoctl ( request , arg ) ;
} break ;
2025-03-11 18:21:47 +00:00
case DrmIoctl : : perfQuery :
2024-05-29 19:35:26 +00:00
case DrmIoctl : : perfOpen : {
ret = perfOpenIoctl ( request , arg ) ;
} break ;
2024-02-01 22:03:55 +00:00
2023-01-09 12:07:23 +00:00
default :
xeLog ( " Not handled 0x%x \n " , request ) ;
UNRECOVERABLE_IF ( true ) ;
}
return ret ;
}
void IoctlHelperXe : : xeShowBindTable ( ) {
2024-02-02 10:00:45 +00:00
if ( debugManager . flags . PrintXeLogs . get ( ) ) {
2023-07-03 11:14:02 +00:00
std : : unique_lock < std : : mutex > lock ( xeLock ) ;
2024-12-13 14:15:31 +00:00
xeLog ( " show bind: (<index> <userptr> <addr>) \n " , " " ) ;
2023-07-03 11:14:02 +00:00
for ( unsigned int i = 0 ; i < bindInfo . size ( ) ; i + + ) {
2024-12-13 14:15:31 +00:00
xeLog ( " %3d x%016lx x%016lx \n " , i ,
2023-07-03 11:14:02 +00:00
bindInfo [ i ] . userptr ,
2024-12-13 14:15:31 +00:00
bindInfo [ i ] . addr ) ;
2023-07-03 11:14:02 +00:00
}
2023-01-09 12:07:23 +00:00
}
}
2025-08-21 09:53:34 +00:00
void IoctlHelperXe : : applyContextFlags ( void * execQueueCreate , bool allocateInterrupt ) {
if ( this - > isLowLatencyHintAvailable ) {
reinterpret_cast < drm_xe_exec_queue_create * > ( execQueueCreate ) - > flags | = DRM_XE_EXEC_QUEUE_LOW_LATENCY_HINT ;
}
}
2024-06-06 11:23:55 +00:00
int IoctlHelperXe : : createDrmContext ( Drm & drm , OsContextLinux & osContext , uint32_t drmVmId , uint32_t deviceIndex , bool allocateInterrupt ) {
2023-01-09 12:07:23 +00:00
uint32_t drmContextId = 0 ;
xeLog ( " createDrmContext VM=0x%x \n " , drmVmId ) ;
2024-10-08 13:23:41 +00:00
drm . bindDrmContext ( drmContextId , deviceIndex , osContext . getEngineType ( ) ) ;
2024-03-15 13:47:40 +00:00
2024-07-02 19:15:10 +00:00
UNRECOVERABLE_IF ( contextParamEngine . empty ( ) ) ;
2023-01-09 12:07:23 +00:00
2024-03-22 14:58:25 +00:00
std : : array < drm_xe_ext_set_property , maxContextSetProperties > extProperties { } ;
2024-07-02 19:15:10 +00:00
uint32_t extPropertyIndex { 0U } ;
2025-08-29 00:16:27 +00:00
setOptionalContextProperties ( osContext , drm , & extProperties , extPropertyIndex ) ;
2025-02-24 15:34:54 +00:00
setContextProperties ( osContext , deviceIndex , & extProperties , extPropertyIndex ) ;
2024-03-22 14:58:25 +00:00
2024-07-02 19:15:10 +00:00
drm_xe_exec_queue_create create { } ;
create . width = 1 ;
create . num_placements = contextParamEngine . size ( ) ;
create . vm_id = drmVmId ;
create . instances = castToUint64 ( contextParamEngine . data ( ) ) ;
create . extensions = ( extPropertyIndex > 0U ? castToUint64 ( extProperties . data ( ) ) : 0UL ) ;
applyContextFlags ( & create , allocateInterrupt ) ;
2024-01-09 09:54:33 +00:00
2023-12-12 08:48:32 +00:00
int ret = IoctlHelper : : ioctl ( DrmIoctl : : gemContextCreateExt , & create ) ;
2023-09-13 18:58:57 +00:00
drmContextId = create . exec_queue_id ;
2024-07-02 19:15:10 +00:00
2024-03-21 14:47:48 +00:00
xeLog ( " %s:%d (%d) vmid=0x%x ctx=0x%x r=0x%x \n " , xeGetClassName ( contextParamEngine [ 0 ] . engine_class ) ,
contextParamEngine [ 0 ] . engine_instance , create . num_placements , drmVmId , drmContextId , ret ) ;
2023-01-09 12:07:23 +00:00
if ( ret ! = 0 ) {
UNRECOVERABLE_IF ( true ) ;
}
return drmContextId ;
}
2023-09-18 10:49:16 +00:00
int IoctlHelperXe : : xeVmBind ( const VmBindParams & vmBindParams , bool isBind ) {
2025-02-21 17:04:42 +01:00
auto gmmHelper = drm . getRootDeviceEnvironment ( ) . getGmmHelper ( ) ;
2023-01-09 12:07:23 +00:00
int ret = - 1 ;
2023-09-18 10:49:16 +00:00
const char * operation = isBind ? " bind " : " unbind " ;
2024-12-13 14:15:31 +00:00
uint64_t userptr = 0u ;
2023-09-18 10:49:16 +00:00
{
2024-12-13 14:15:31 +00:00
std : : unique_lock < std : : mutex > lock ( xeLock ) ;
if ( isBind ) {
if ( vmBindParams . userptr ) {
for ( auto i = 0u ; i < bindInfo . size ( ) ; i + + ) {
if ( vmBindParams . userptr = = bindInfo [ i ] . userptr ) {
userptr = bindInfo [ i ] . userptr ;
2025-02-21 17:04:42 +01:00
bindInfo [ i ] . addr = gmmHelper - > decanonize ( vmBindParams . start ) ;
2024-12-13 14:15:31 +00:00
break ;
}
}
}
} else // unbind
{
2025-02-21 17:04:42 +01:00
auto address = gmmHelper - > decanonize ( vmBindParams . start ) ;
2024-12-13 14:15:31 +00:00
for ( auto i = 0u ; i < bindInfo . size ( ) ; i + + ) {
if ( address = = bindInfo [ i ] . addr ) {
userptr = bindInfo [ i ] . userptr ;
break ;
}
2023-01-09 12:07:23 +00:00
}
}
}
2023-09-18 10:49:16 +00:00
2024-12-13 14:15:31 +00:00
drm_xe_vm_bind bind = { } ;
bind . vm_id = vmBindParams . vmId ;
2025-01-24 03:59:30 +00:00
2024-12-13 14:15:31 +00:00
bind . num_binds = 1 ;
2024-08-13 11:27:54 +00:00
2024-12-13 14:15:31 +00:00
bind . bind . range = vmBindParams . length ;
bind . bind . obj_offset = vmBindParams . offset ;
2025-07-16 18:26:34 +00:00
if ( isBind ) {
bind . bind . pat_index = static_cast < uint16_t > ( vmBindParams . patIndex ) ;
} else {
GMM_RESOURCE_USAGE_TYPE usageType = GMM_RESOURCE_USAGE_OCL_BUFFER ;
bool compressed = false ;
bool cachable = false ;
bind . bind . pat_index = static_cast < uint16_t > ( drm . getRootDeviceEnvironment ( ) . getGmmClientContext ( ) - > cachePolicyGetPATIndex ( nullptr , usageType , compressed , cachable ) ) ;
}
2024-12-13 14:15:31 +00:00
bind . bind . extensions = vmBindParams . extensions ;
bind . bind . flags = static_cast < uint32_t > ( vmBindParams . flags ) ;
2023-09-18 10:49:16 +00:00
2024-12-13 14:15:31 +00:00
drm_xe_sync sync [ 1 ] = { } ;
2025-01-24 03:59:30 +00:00
if ( vmBindParams . sharedSystemUsmBind = = true ) {
bind . bind . addr = 0 ;
} else {
2025-02-21 17:04:42 +01:00
bind . bind . addr = gmmHelper - > decanonize ( vmBindParams . start ) ;
2025-01-24 03:59:30 +00:00
}
bind . num_syncs = 1 ;
UNRECOVERABLE_IF ( vmBindParams . userFence = = 0x0 ) ;
2024-12-13 14:15:31 +00:00
auto xeBindExtUserFence = reinterpret_cast < UserFenceExtension * > ( vmBindParams . userFence ) ;
UNRECOVERABLE_IF ( xeBindExtUserFence - > tag ! = UserFenceExtension : : tagValue ) ;
sync [ 0 ] . type = DRM_XE_SYNC_TYPE_USER_FENCE ;
sync [ 0 ] . flags = DRM_XE_SYNC_FLAG_SIGNAL ;
sync [ 0 ] . addr = xeBindExtUserFence - > addr ;
sync [ 0 ] . timeline_value = xeBindExtUserFence - > value ;
bind . syncs = reinterpret_cast < uintptr_t > ( & sync ) ;
2024-08-13 11:27:54 +00:00
2024-12-13 14:15:31 +00:00
if ( isBind ) {
bind . bind . op = DRM_XE_VM_BIND_OP_MAP ;
bind . bind . obj = vmBindParams . handle ;
if ( userptr ) {
bind . bind . op = DRM_XE_VM_BIND_OP_MAP_USERPTR ;
2023-01-09 12:07:23 +00:00
bind . bind . obj = 0 ;
2024-12-13 14:15:31 +00:00
bind . bind . obj_offset = userptr ;
2023-01-09 12:07:23 +00:00
}
2024-12-13 14:15:31 +00:00
} else {
2025-01-24 03:59:30 +00:00
if ( vmBindParams . sharedSystemUsmEnabled ) {
// Use of MAP on unbind required for restoring the address space to the system allocator
bind . bind . op = DRM_XE_VM_BIND_OP_MAP ;
2025-04-29 16:41:39 +00:00
bind . bind . flags | = DRM_XE_VM_BIND_FLAG_CPU_ADDR_MIRROR ;
2025-01-24 03:59:30 +00:00
} else {
bind . bind . op = DRM_XE_VM_BIND_OP_UNMAP ;
if ( userptr ) {
bind . bind . obj_offset = userptr ;
}
2024-06-18 10:47:51 +00:00
}
2025-01-24 03:59:30 +00:00
bind . bind . obj = 0 ;
2023-01-09 12:07:23 +00:00
}
2024-12-13 14:15:31 +00:00
ret = IoctlHelper : : ioctl ( DrmIoctl : : gemVmBind , & bind ) ;
2023-01-09 12:07:23 +00:00
2024-12-13 14:15:31 +00:00
xeLog ( " vm=%d obj=0x%x off=0x%llx range=0x%llx addr=0x%llx operation=%d(%s) flags=%d(%s) nsy=%d pat=%hu ret=%d \n " ,
bind . vm_id ,
bind . bind . obj ,
bind . bind . obj_offset ,
bind . bind . range ,
bind . bind . addr ,
bind . bind . op ,
xeGetBindOperationName ( bind . bind . op ) ,
bind . bind . flags ,
xeGetBindFlagNames ( bind . bind . flags ) . c_str ( ) ,
bind . num_syncs ,
bind . bind . pat_index ,
ret ) ;
if ( ret ! = 0 ) {
xeLog ( " error: %s \n " , operation ) ;
return ret ;
}
2025-02-02 17:25:43 +01:00
constexpr auto oneSecTimeout = 1000000000ll ;
constexpr auto infiniteTimeout = - 1 ;
bool debuggingEnabled = drm . getRootDeviceEnvironment ( ) . executionEnvironment . isDebuggingEnabled ( ) ;
uint64_t timeout = debuggingEnabled ? infiniteTimeout : oneSecTimeout ;
if ( debugManager . flags . VmBindWaitUserFenceTimeout . get ( ) ! = - 1 ) {
timeout = debugManager . flags . VmBindWaitUserFenceTimeout . get ( ) ;
2024-12-17 05:32:42 +01:00
}
2025-02-02 17:25:43 +01:00
return xeWaitUserFence ( bind . exec_queue_id , DRM_XE_UFENCE_WAIT_OP_EQ ,
sync [ 0 ] . addr ,
sync [ 0 ] . timeline_value , timeout ,
false , NEO : : InterruptId : : notUsed , nullptr ) ;
2023-01-09 12:07:23 +00:00
}
std : : string IoctlHelperXe : : getDrmParamString ( DrmParam drmParam ) const {
switch ( drmParam ) {
2025-02-13 22:26:10 +00:00
case DrmParam : : atomicClassUndefined :
return " AtomicClassUndefined " ;
case DrmParam : : atomicClassDevice :
return " AtomicClassDevice " ;
case DrmParam : : atomicClassGlobal :
return " AtomicClassGlobal " ;
case DrmParam : : atomicClassSystem :
return " AtomicClassSystem " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : contextCreateExtSetparam :
2023-01-09 12:07:23 +00:00
return " ContextCreateExtSetparam " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : contextCreateFlagsUseExtensions :
2023-01-09 12:07:23 +00:00
return " ContextCreateFlagsUseExtensions " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : contextEnginesExtLoadBalance :
2023-01-09 12:07:23 +00:00
return " ContextEnginesExtLoadBalance " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : contextParamEngines :
2023-01-09 12:07:23 +00:00
return " ContextParamEngines " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : contextParamGttSize :
2023-01-09 12:07:23 +00:00
return " ContextParamGttSize " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : contextParamPersistence :
2023-01-09 12:07:23 +00:00
return " ContextParamPersistence " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : contextParamPriority :
2023-01-09 12:07:23 +00:00
return " ContextParamPriority " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : contextParamRecoverable :
2023-01-09 12:07:23 +00:00
return " ContextParamRecoverable " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : contextParamSseu :
2023-01-09 12:07:23 +00:00
return " ContextParamSseu " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : contextParamVm :
2023-01-09 12:07:23 +00:00
return " ContextParamVm " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : engineClassRender :
2023-01-09 12:07:23 +00:00
return " EngineClassRender " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : engineClassCompute :
2023-01-09 12:07:23 +00:00
return " EngineClassCompute " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : engineClassCopy :
2023-01-09 12:07:23 +00:00
return " EngineClassCopy " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : engineClassVideo :
2023-01-09 12:07:23 +00:00
return " EngineClassVideo " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : engineClassVideoEnhance :
2023-01-09 12:07:23 +00:00
return " EngineClassVideoEnhance " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : engineClassInvalid :
2023-01-09 12:07:23 +00:00
return " EngineClassInvalid " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : engineClassInvalidNone :
2023-01-09 12:07:23 +00:00
return " EngineClassInvalidNone " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : execBlt :
2023-01-09 12:07:23 +00:00
return " ExecBlt " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : execDefault :
2023-01-09 12:07:23 +00:00
return " ExecDefault " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : execNoReloc :
2023-01-09 12:07:23 +00:00
return " ExecNoReloc " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : execRender :
2023-01-09 12:07:23 +00:00
return " ExecRender " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : memoryClassDevice :
2023-01-09 12:07:23 +00:00
return " MemoryClassDevice " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : memoryClassSystem :
2023-01-09 12:07:23 +00:00
return " MemoryClassSystem " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : mmapOffsetWb :
2023-01-09 12:07:23 +00:00
return " MmapOffsetWb " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : mmapOffsetWc :
2023-01-09 12:07:23 +00:00
return " MmapOffsetWc " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : paramHasPooledEu :
2023-01-09 12:07:23 +00:00
return " ParamHasPooledEu " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : paramEuTotal :
2023-01-09 12:07:23 +00:00
return " ParamEuTotal " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : paramSubsliceTotal :
2023-01-09 12:07:23 +00:00
return " ParamSubsliceTotal " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : paramMinEuInPool :
2023-01-09 12:07:23 +00:00
return " ParamMinEuInPool " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : paramCsTimestampFrequency :
2023-01-09 12:07:23 +00:00
return " ParamCsTimestampFrequency " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : paramHasVmBind :
2023-01-09 12:07:23 +00:00
return " ParamHasVmBind " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : paramHasPageFault :
2023-01-09 12:07:23 +00:00
return " ParamHasPageFault " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : queryEngineInfo :
2023-01-09 12:07:23 +00:00
return " QueryEngineInfo " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : queryHwconfigTable :
2023-01-09 12:07:23 +00:00
return " QueryHwconfigTable " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : queryComputeSlices :
2023-01-09 12:07:23 +00:00
return " QueryComputeSlices " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : queryMemoryRegions :
2023-01-09 12:07:23 +00:00
return " QueryMemoryRegions " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : queryTopologyInfo :
2023-01-09 12:07:23 +00:00
return " QueryTopologyInfo " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : tilingNone :
2023-01-09 12:07:23 +00:00
return " TilingNone " ;
2023-12-13 09:05:31 +00:00
case DrmParam : : tilingY :
2023-01-09 12:07:23 +00:00
return " TilingY " ;
default :
return " DrmParam::<missing> " ;
}
}
2024-04-12 10:50:47 +00:00
inline std : : string getDirectoryWithFrequencyFiles ( int tileId , int gtId ) {
return " /device/tile " + std : : to_string ( tileId ) + " /gt " + std : : to_string ( gtId ) + " /freq0 " ;
}
2023-01-09 12:07:23 +00:00
std : : string IoctlHelperXe : : getFileForMaxGpuFrequency ( ) const {
2024-04-12 13:09:30 +00:00
return getFileForMaxGpuFrequencyOfSubDevice ( 0 /* tileId */ ) ;
2023-01-09 12:07:23 +00:00
}
2024-04-12 13:09:30 +00:00
std : : string IoctlHelperXe : : getFileForMaxGpuFrequencyOfSubDevice ( int tileId ) const {
return getDirectoryWithFrequencyFiles ( tileId , tileIdToGtId [ tileId ] ) + " /max_freq " ;
2023-01-09 12:07:23 +00:00
}
2024-04-12 13:09:30 +00:00
std : : string IoctlHelperXe : : getFileForMaxMemoryFrequencyOfSubDevice ( int tileId ) const {
return getDirectoryWithFrequencyFiles ( tileId , tileIdToGtId [ tileId ] ) + " /rp0_freq " ;
2023-01-09 12:07:23 +00:00
}
2025-04-08 22:05:33 +00:00
void IoctlHelperXe : : configureCcsMode ( std : : vector < std : : string > & files , const std : : string expectedFilePrefix , uint32_t ccsMode ,
std : : vector < std : : tuple < std : : string , uint32_t > > & deviceCcsModeVec ) {
// On Xe, path is /sys/class/drm/card0/device/tile*/gt*/ccs_mode
for ( const auto & file : files ) {
if ( file . find ( expectedFilePrefix . c_str ( ) ) = = std : : string : : npos ) {
continue ;
}
std : : string tilePath = file + " /device/tile " ;
auto tileFiles = Directory : : getFiles ( tilePath . c_str ( ) ) ;
for ( const auto & tileFile : tileFiles ) {
std : : string gtPath = tileFile + " /gt " ;
auto gtFiles = Directory : : getFiles ( gtPath . c_str ( ) ) ;
for ( const auto & gtFile : gtFiles ) {
writeCcsMode ( gtFile , ccsMode , deviceCcsModeVec ) ;
}
}
}
}
2023-01-09 12:07:23 +00:00
bool IoctlHelperXe : : getFabricLatency ( uint32_t fabricId , uint32_t & latency , uint32_t & bandwidth ) {
return false ;
}
2023-03-17 13:00:44 +00:00
bool IoctlHelperXe : : isWaitBeforeBindRequired ( bool bind ) const {
return true ;
}
2023-09-12 14:57:55 +00:00
bool IoctlHelperXe : : setGemTiling ( void * setTiling ) {
return true ;
}
bool IoctlHelperXe : : getGemTiling ( void * setTiling ) {
return true ;
}
2023-09-26 10:44:11 +00:00
bool IoctlHelperXe : : isImmediateVmBindRequired ( ) const {
return true ;
}
2024-03-21 14:47:48 +00:00
2025-02-03 10:27:00 +00:00
bool IoctlHelperXe : : makeResidentBeforeLockNeeded ( ) const {
2025-09-11 05:18:55 +02:00
auto makeResidentBeforeLockNeeded = false ;
if ( debugManager . flags . EnableDeferBacking . get ( ) ) {
makeResidentBeforeLockNeeded = true ;
}
return makeResidentBeforeLockNeeded ;
2025-02-03 10:27:00 +00:00
}
2024-03-21 14:47:48 +00:00
void IoctlHelperXe : : insertEngineToContextParams ( ContextParamEngines < > & contextParamEngines , uint32_t engineId , const EngineClassInstance * engineClassInstance , uint32_t tileId , bool hasVirtualEngines ) {
auto engines = reinterpret_cast < drm_xe_engine_class_instance * > ( contextParamEngines . enginesData ) ;
if ( engineClassInstance ) {
engines [ engineId ] . engine_class = engineClassInstance - > engineClass ;
engines [ engineId ] . engine_instance = engineClassInstance - > engineInstance ;
2024-04-15 11:26:13 +00:00
engines [ engineId ] . gt_id = tileIdToGtId [ tileId ] ;
2024-03-21 14:47:48 +00:00
contextParamEngines . numEnginesInContext = std : : max ( contextParamEngines . numEnginesInContext , engineId + 1 ) ;
}
}
2024-03-18 23:41:10 +00:00
void IoctlHelperXe : : registerBOBindHandle ( Drm * drm , DrmAllocation * drmAllocation ) {
DrmResourceClass resourceClass = DrmResourceClass : : maxSize ;
switch ( drmAllocation - > getAllocationType ( ) ) {
case AllocationType : : debugContextSaveArea :
resourceClass = DrmResourceClass : : contextSaveArea ;
break ;
case AllocationType : : debugSbaTrackingBuffer :
resourceClass = DrmResourceClass : : sbaTrackingBuffer ;
break ;
case AllocationType : : debugModuleArea :
resourceClass = DrmResourceClass : : moduleHeapDebugArea ;
break ;
2025-01-06 22:12:14 +00:00
case AllocationType : : kernelIsa :
if ( drmAllocation - > storageInfo . tileInstanced ) {
auto & bos = drmAllocation - > getBOs ( ) ;
for ( auto bo : bos ) {
if ( ! bo ) {
continue ;
}
bo - > setRegisteredBindHandleCookie ( drmAllocation - > storageInfo . subDeviceBitfield . to_ulong ( ) ) ;
}
}
return ;
2024-03-18 23:41:10 +00:00
default :
return ;
}
uint64_t gpuAddress = drmAllocation - > getGpuAddress ( ) ;
auto handle = drm - > registerResource ( resourceClass , & gpuAddress , sizeof ( gpuAddress ) ) ;
drmAllocation - > addRegisteredBoBindHandle ( handle ) ;
auto & bos = drmAllocation - > getBOs ( ) ;
for ( auto bo : bos ) {
if ( ! bo ) {
continue ;
}
bo - > addBindExtHandle ( handle ) ;
bo - > markForCapture ( ) ;
bo - > requireImmediateBinding ( true ) ;
}
}
2024-04-16 16:19:33 +00:00
bool IoctlHelperXe : : getFdFromVmExport ( uint32_t vmId , uint32_t flags , int32_t * fd ) {
return false ;
}
2025-08-29 00:16:27 +00:00
void IoctlHelperXe : : setOptionalContextProperties ( const OsContextLinux & osContext , Drm & drm , void * extProperties , uint32_t & extIndexInOut ) {
2024-07-02 19:15:10 +00:00
auto & ext = * reinterpret_cast < std : : array < drm_xe_ext_set_property , maxContextSetProperties > * > ( extProperties ) ;
if ( ( contextParamEngine [ 0 ] . engine_class = = DRM_XE_ENGINE_CLASS_RENDER ) | | ( contextParamEngine [ 0 ] . engine_class = = DRM_XE_ENGINE_CLASS_COMPUTE ) ) {
2025-08-29 00:16:27 +00:00
const bool isSecondaryContext = osContext . isPartOfContextGroup ( ) & & ( nullptr ! = osContext . getPrimaryContext ( ) ) ;
if ( ! isSecondaryContext & & drm . getRootDeviceEnvironment ( ) . executionEnvironment . isDebuggingEnabled ( ) ) {
2024-07-02 19:15:10 +00:00
ext [ extIndexInOut ] . base . next_extension = 0 ;
ext [ extIndexInOut ] . base . name = DRM_XE_EXEC_QUEUE_EXTENSION_SET_PROPERTY ;
2024-09-17 17:01:05 +00:00
ext [ extIndexInOut ] . property = getEudebugExtProperty ( ) ;
2025-02-04 20:24:41 +00:00
ext [ extIndexInOut ] . value = getEudebugExtPropertyValue ( ) ;
2024-07-02 19:15:10 +00:00
extIndexInOut + + ;
}
}
}
2025-02-24 15:34:54 +00:00
void IoctlHelperXe : : setContextProperties ( const OsContextLinux & osContext , uint32_t deviceIndex , void * extProperties , uint32_t & extIndexInOut ) {
2024-04-16 16:19:33 +00:00
auto & ext = * reinterpret_cast < std : : array < drm_xe_ext_set_property , maxContextSetProperties > * > ( extProperties ) ;
if ( osContext . isLowPriority ( ) ) {
ext [ extIndexInOut ] . base . name = DRM_XE_EXEC_QUEUE_EXTENSION_SET_PROPERTY ;
ext [ extIndexInOut ] . property = DRM_XE_EXEC_QUEUE_SET_PROPERTY_PRIORITY ;
ext [ extIndexInOut ] . value = 0 ;
if ( extIndexInOut > 0 ) {
ext [ extIndexInOut - 1 ] . base . next_extension = castToUint64 ( & ext [ extIndexInOut ] ) ;
}
extIndexInOut + + ;
}
}
unsigned int IoctlHelperXe : : getIoctlRequestValue ( DrmIoctl ioctlRequest ) const {
xeLog ( " -> IoctlHelperXe::%s 0x%x \n " , __FUNCTION__ , ioctlRequest ) ;
switch ( ioctlRequest ) {
case DrmIoctl : : gemClose :
RETURN_ME ( DRM_IOCTL_GEM_CLOSE ) ;
case DrmIoctl : : gemVmCreate :
RETURN_ME ( DRM_IOCTL_XE_VM_CREATE ) ;
case DrmIoctl : : gemVmDestroy :
RETURN_ME ( DRM_IOCTL_XE_VM_DESTROY ) ;
case DrmIoctl : : gemMmapOffset :
RETURN_ME ( DRM_IOCTL_XE_GEM_MMAP_OFFSET ) ;
case DrmIoctl : : gemCreate :
RETURN_ME ( DRM_IOCTL_XE_GEM_CREATE ) ;
case DrmIoctl : : gemExecbuffer2 :
RETURN_ME ( DRM_IOCTL_XE_EXEC ) ;
case DrmIoctl : : gemVmBind :
RETURN_ME ( DRM_IOCTL_XE_VM_BIND ) ;
case DrmIoctl : : query :
RETURN_ME ( DRM_IOCTL_XE_DEVICE_QUERY ) ;
case DrmIoctl : : gemContextCreateExt :
RETURN_ME ( DRM_IOCTL_XE_EXEC_QUEUE_CREATE ) ;
case DrmIoctl : : gemContextDestroy :
RETURN_ME ( DRM_IOCTL_XE_EXEC_QUEUE_DESTROY ) ;
case DrmIoctl : : gemWaitUserFence :
RETURN_ME ( DRM_IOCTL_XE_WAIT_USER_FENCE ) ;
case DrmIoctl : : primeFdToHandle :
RETURN_ME ( DRM_IOCTL_PRIME_FD_TO_HANDLE ) ;
case DrmIoctl : : primeHandleToFd :
RETURN_ME ( DRM_IOCTL_PRIME_HANDLE_TO_FD ) ;
2025-05-13 16:44:10 +00:00
case DrmIoctl : : syncObjFdToHandle :
RETURN_ME ( DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE ) ;
2025-05-18 18:08:22 +00:00
case DrmIoctl : : syncObjWait :
RETURN_ME ( DRM_IOCTL_SYNCOBJ_WAIT ) ;
case DrmIoctl : : syncObjSignal :
RETURN_ME ( DRM_IOCTL_SYNCOBJ_SIGNAL ) ;
2025-05-22 23:13:11 +00:00
case DrmIoctl : : syncObjTimelineWait :
RETURN_ME ( DRM_IOCTL_SYNCOBJ_TIMELINE_WAIT ) ;
case DrmIoctl : : syncObjTimelineSignal :
RETURN_ME ( DRM_IOCTL_SYNCOBJ_TIMELINE_SIGNAL ) ;
2024-04-24 08:23:11 +00:00
case DrmIoctl : : getResetStats :
RETURN_ME ( DRM_IOCTL_XE_EXEC_QUEUE_GET_PROPERTY ) ;
2024-04-16 16:19:33 +00:00
case DrmIoctl : : debuggerOpen :
case DrmIoctl : : metadataCreate :
case DrmIoctl : : metadataDestroy :
return getIoctlRequestValueDebugger ( ioctlRequest ) ;
2024-05-29 19:35:26 +00:00
case DrmIoctl : : perfOpen :
case DrmIoctl : : perfEnable :
case DrmIoctl : : perfDisable :
2025-03-11 18:21:47 +00:00
case DrmIoctl : : perfQuery :
2024-05-29 19:35:26 +00:00
return getIoctlRequestValuePerf ( ioctlRequest ) ;
2024-04-16 16:19:33 +00:00
default :
UNRECOVERABLE_IF ( true ) ;
return 0 ;
}
}
2024-05-29 19:35:26 +00:00
int IoctlHelperXe : : ioctl ( int fd , DrmIoctl request , void * arg ) {
return NEO : : SysCalls : : ioctl ( fd , getIoctlRequestValue ( request ) , arg ) ;
}
2024-04-16 16:19:33 +00:00
std : : string IoctlHelperXe : : getIoctlString ( DrmIoctl ioctlRequest ) const {
switch ( ioctlRequest ) {
case DrmIoctl : : gemClose :
STRINGIFY_ME ( DRM_IOCTL_GEM_CLOSE ) ;
case DrmIoctl : : gemVmCreate :
STRINGIFY_ME ( DRM_IOCTL_XE_VM_CREATE ) ;
case DrmIoctl : : gemVmDestroy :
STRINGIFY_ME ( DRM_IOCTL_XE_VM_DESTROY ) ;
case DrmIoctl : : gemMmapOffset :
STRINGIFY_ME ( DRM_IOCTL_XE_GEM_MMAP_OFFSET ) ;
case DrmIoctl : : gemCreate :
STRINGIFY_ME ( DRM_IOCTL_XE_GEM_CREATE ) ;
case DrmIoctl : : gemExecbuffer2 :
STRINGIFY_ME ( DRM_IOCTL_XE_EXEC ) ;
case DrmIoctl : : gemVmBind :
STRINGIFY_ME ( DRM_IOCTL_XE_VM_BIND ) ;
case DrmIoctl : : query :
STRINGIFY_ME ( DRM_IOCTL_XE_DEVICE_QUERY ) ;
case DrmIoctl : : gemContextCreateExt :
STRINGIFY_ME ( DRM_IOCTL_XE_EXEC_QUEUE_CREATE ) ;
case DrmIoctl : : gemContextDestroy :
STRINGIFY_ME ( DRM_IOCTL_XE_EXEC_QUEUE_DESTROY ) ;
case DrmIoctl : : gemWaitUserFence :
STRINGIFY_ME ( DRM_IOCTL_XE_WAIT_USER_FENCE ) ;
case DrmIoctl : : primeFdToHandle :
STRINGIFY_ME ( DRM_IOCTL_PRIME_FD_TO_HANDLE ) ;
case DrmIoctl : : primeHandleToFd :
STRINGIFY_ME ( DRM_IOCTL_PRIME_HANDLE_TO_FD ) ;
2025-05-13 16:44:10 +00:00
case DrmIoctl : : syncObjFdToHandle :
STRINGIFY_ME ( DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE ) ;
2025-05-18 18:08:22 +00:00
case DrmIoctl : : syncObjWait :
STRINGIFY_ME ( DRM_IOCTL_SYNCOBJ_WAIT ) ;
case DrmIoctl : : syncObjSignal :
STRINGIFY_ME ( DRM_IOCTL_SYNCOBJ_SIGNAL ) ;
2025-05-22 23:13:11 +00:00
case DrmIoctl : : syncObjTimelineWait :
STRINGIFY_ME ( DRM_IOCTL_SYNCOBJ_TIMELINE_WAIT ) ;
case DrmIoctl : : syncObjTimelineSignal :
STRINGIFY_ME ( DRM_IOCTL_SYNCOBJ_TIMELINE_SIGNAL ) ;
2024-04-16 16:19:33 +00:00
case DrmIoctl : : debuggerOpen :
STRINGIFY_ME ( DRM_IOCTL_XE_EUDEBUG_CONNECT ) ;
case DrmIoctl : : metadataCreate :
STRINGIFY_ME ( DRM_IOCTL_XE_DEBUG_METADATA_CREATE ) ;
case DrmIoctl : : metadataDestroy :
STRINGIFY_ME ( DRM_IOCTL_XE_DEBUG_METADATA_DESTROY ) ;
2024-04-24 08:23:11 +00:00
case DrmIoctl : : getResetStats :
STRINGIFY_ME ( DRM_IOCTL_XE_EXEC_QUEUE_GET_PROPERTY ) ;
2024-04-16 16:19:33 +00:00
default :
return " ??? " ;
}
}
2024-07-25 10:47:37 +00:00
2025-06-24 11:16:24 +00:00
void * IoctlHelperXe : : pciBarrierMmap ( ) {
GemMmapOffset mmapOffset = { } ;
mmapOffset . flags = DRM_XE_MMAP_OFFSET_FLAG_PCI_BARRIER ;
auto ret = ioctl ( DrmIoctl : : gemMmapOffset , & mmapOffset ) ;
if ( ret ! = 0 ) {
return MAP_FAILED ;
}
return SysCalls : : mmap ( NULL , MemoryConstants : : pageSize , PROT_WRITE , MAP_SHARED , drm . getFileDescriptor ( ) , static_cast < off_t > ( mmapOffset . offset ) ) ;
}
bool IoctlHelperXe : : retrieveMmapOffsetForBufferObject ( BufferObject & bo , uint64_t flags , uint64_t & offset ) {
GemMmapOffset mmapOffset = { } ;
mmapOffset . handle = bo . peekHandle ( ) ;
auto & rootDeviceEnvironment = drm . getRootDeviceEnvironment ( ) ;
auto memoryManager = rootDeviceEnvironment . executionEnvironment . memoryManager . get ( ) ;
auto ret = ioctl ( DrmIoctl : : gemMmapOffset , & mmapOffset ) ;
if ( ret ! = 0 & & memoryManager - > isLocalMemorySupported ( bo . getRootDeviceIndex ( ) ) ) {
mmapOffset . flags = flags ;
ret = ioctl ( DrmIoctl : : gemMmapOffset , & mmapOffset ) ;
}
if ( ret ! = 0 ) {
int err = drm . getErrno ( ) ;
CREATE_DEBUG_STRING ( str , " ioctl(%s) failed with %d. errno=%d(%s) \n " ,
getIoctlString ( DrmIoctl : : gemMmapOffset ) . c_str ( ) , ret , err , strerror ( err ) ) ;
drm . getRootDeviceEnvironment ( ) . executionEnvironment . setErrorDescription ( std : : string ( str . get ( ) ) ) ;
PRINT_DEBUG_STRING ( debugManager . flags . PrintDebugMessages . get ( ) , stderr , str . get ( ) ) ;
DEBUG_BREAK_IF ( true ) ;
return false ;
}
offset = mmapOffset . offset ;
return true ;
}
2025-08-25 13:22:55 +00:00
bool IoctlHelperXe : : is2MBSizeAlignmentRequired ( AllocationType allocationType ) const {
if ( debugManager . flags . Disable2MBSizeAlignment . get ( ) ) {
return false ;
}
auto & rootDeviceEnvironment = drm . getRootDeviceEnvironment ( ) ;
auto hwInfo = rootDeviceEnvironment . getHardwareInfo ( ) ;
auto memoryManager = rootDeviceEnvironment . executionEnvironment . memoryManager . get ( ) ;
if ( hwInfo - > capabilityTable . isIntegratedDevice ) {
return memoryManager - > isExternalAllocation ( allocationType ) ;
}
return false ;
}
2023-10-04 22:57:00 +00:00
} // namespace NEO