2017-12-21 07:45:38 +08:00
/*
2021-02-03 22:53:13 +08:00
* Copyright ( C ) 2017 - 2021 Intel Corporation
2017-12-21 07:45:38 +08:00
*
2018-09-18 15:11:08 +08:00
* SPDX - License - Identifier : MIT
2017-12-21 07:45:38 +08:00
*
*/
2020-02-24 05:44:01 +08:00
# include "shared/source/os_interface/linux/drm_memory_manager.h"
2019-02-27 18:39:32 +08:00
2020-02-24 05:44:01 +08:00
# include "shared/source/command_stream/command_stream_receiver.h"
# include "shared/source/execution_environment/execution_environment.h"
# include "shared/source/execution_environment/root_device_environment.h"
# include "shared/source/gmm_helper/gmm.h"
# include "shared/source/gmm_helper/gmm_helper.h"
# include "shared/source/gmm_helper/resource_info.h"
2020-08-27 14:55:09 +08:00
# include "shared/source/helpers/heap_assigner.h"
2020-02-24 05:44:01 +08:00
# include "shared/source/helpers/hw_info.h"
# include "shared/source/helpers/ptr_math.h"
# include "shared/source/helpers/surface_format_info.h"
# include "shared/source/memory_manager/host_ptr_manager.h"
# include "shared/source/memory_manager/residency.h"
# include "shared/source/os_interface/linux/allocator_helper.h"
2020-05-08 16:04:06 +08:00
# include "shared/source/os_interface/linux/drm_memory_operations_handler.h"
2020-02-24 05:44:01 +08:00
# include "shared/source/os_interface/linux/os_context_linux.h"
# include "shared/source/os_interface/linux/os_interface.h"
2017-12-21 07:45:38 +08:00
# include "drm/i915_drm.h"
2019-02-27 18:39:32 +08:00
# include <cstring>
# include <iostream>
2020-07-17 19:40:52 +08:00
# include <memory>
2017-12-21 07:45:38 +08:00
2019-03-26 18:59:46 +08:00
namespace NEO {
2017-12-21 07:45:38 +08:00
2019-03-25 20:12:55 +08:00
DrmMemoryManager : : DrmMemoryManager ( gemCloseWorkerMode mode ,
2019-03-12 06:06:30 +08:00
bool forcePinAllowed ,
bool validateHostPtrMemory ,
2019-03-15 17:22:35 +08:00
ExecutionEnvironment & executionEnvironment ) : MemoryManager ( executionEnvironment ) ,
2019-03-12 06:06:30 +08:00
forcePinEnabled ( forcePinAllowed ) ,
validateHostPtrMemory ( validateHostPtrMemory ) {
2020-09-18 22:19:41 +08:00
initialize ( mode ) ;
}
void DrmMemoryManager : : initialize ( gemCloseWorkerMode mode ) {
2019-10-22 16:26:23 +08:00
for ( uint32_t rootDeviceIndex = 0 ; rootDeviceIndex < gfxPartitions . size ( ) ; + + rootDeviceIndex ) {
2020-02-18 20:29:30 +08:00
auto gpuAddressSpace = executionEnvironment . rootDeviceEnvironments [ rootDeviceIndex ] - > getHardwareInfo ( ) - > capabilityTable . gpuAddressSpace ;
2020-09-18 22:19:41 +08:00
if ( ! getGfxPartition ( rootDeviceIndex ) - > init ( gpuAddressSpace , getSizeToReserve ( ) , rootDeviceIndex , gfxPartitions . size ( ) , heapAssigner . apiAllowExternalHeapForSshAndDsh ) ) {
initialized = false ;
return ;
}
2020-09-10 18:36:44 +08:00
localMemAllocs . emplace_back ( ) ;
2019-10-22 16:26:23 +08:00
}
2017-12-21 07:45:38 +08:00
MemoryManager : : virtualPaddingAvailable = true ;
2020-10-01 17:48:08 +08:00
2021-02-19 20:05:32 +08:00
if ( DebugManager . flags . EnableDirectSubmission . get ( ) = = 1 ) {
mode = gemCloseWorkerMode : : gemCloseWorkerInactive ;
}
2020-10-01 17:48:08 +08:00
if ( DebugManager . flags . EnableGemCloseWorker . get ( ) ! = - 1 ) {
mode = DebugManager . flags . EnableGemCloseWorker . get ( ) ? gemCloseWorkerMode : : gemCloseWorkerActive : gemCloseWorkerMode : : gemCloseWorkerInactive ;
}
2017-12-21 07:45:38 +08:00
if ( mode ! = gemCloseWorkerMode : : gemCloseWorkerInactive ) {
gemCloseWorker . reset ( new DrmGemCloseWorker ( * this ) ) ;
}
2020-02-21 20:41:02 +08:00
for ( uint32_t rootDeviceIndex = 0 ; rootDeviceIndex < gfxPartitions . size ( ) ; + + rootDeviceIndex ) {
2020-04-29 02:30:50 +08:00
BufferObject * bo = nullptr ;
2020-02-21 20:41:02 +08:00
if ( forcePinEnabled | | validateHostPtrMemory ) {
2020-04-29 02:30:50 +08:00
auto cpuAddrBo = alignedMallocWrapper ( MemoryConstants : : pageSize , MemoryConstants : : pageSize ) ;
2020-05-13 02:37:26 +08:00
UNRECOVERABLE_IF ( cpuAddrBo = = nullptr ) ;
2020-04-29 02:30:50 +08:00
// Preprogram the Bo with MI_BATCH_BUFFER_END and MI_NOOP. This BO will be used as the last BB in a series to indicate the end of submission.
reinterpret_cast < uint32_t * > ( cpuAddrBo ) [ 0 ] = 0x05000000 ; // MI_BATCH_BUFFER_END
reinterpret_cast < uint32_t * > ( cpuAddrBo ) [ 1 ] = 0 ; // MI_NOOP
memoryForPinBBs . push_back ( cpuAddrBo ) ;
2020-02-24 19:46:03 +08:00
DEBUG_BREAK_IF ( memoryForPinBBs [ rootDeviceIndex ] = = nullptr ) ;
2020-04-29 02:30:50 +08:00
bo = allocUserptr ( reinterpret_cast < uintptr_t > ( memoryForPinBBs [ rootDeviceIndex ] ) , MemoryConstants : : pageSize , 0 , rootDeviceIndex ) ;
if ( bo ) {
if ( isLimitedRange ( rootDeviceIndex ) ) {
2021-03-10 06:41:46 +08:00
bo - > gpuAddress = acquireGpuRange ( bo - > size , rootDeviceIndex , HeapIndex : : HEAP_STANDARD ) ;
2020-04-29 02:30:50 +08:00
}
} else {
2020-02-24 19:46:03 +08:00
alignedFreeWrapper ( memoryForPinBBs [ rootDeviceIndex ] ) ;
memoryForPinBBs [ rootDeviceIndex ] = nullptr ;
2020-02-21 20:41:02 +08:00
DEBUG_BREAK_IF ( true ) ;
UNRECOVERABLE_IF ( validateHostPtrMemory ) ;
}
}
2020-04-29 02:30:50 +08:00
pinBBs . push_back ( bo ) ;
2017-12-21 07:45:38 +08:00
}
2020-09-18 22:19:41 +08:00
initialized = true ;
2017-12-21 07:45:38 +08:00
}
DrmMemoryManager : : ~ DrmMemoryManager ( ) {
2020-02-21 20:41:02 +08:00
for ( auto & memoryForPinBB : memoryForPinBBs ) {
if ( memoryForPinBB ) {
MemoryManager : : alignedFreeWrapper ( memoryForPinBB ) ;
}
2020-02-12 00:48:40 +08:00
}
}
void DrmMemoryManager : : commonCleanup ( ) {
2017-12-21 07:45:38 +08:00
if ( gemCloseWorker ) {
gemCloseWorker - > close ( false ) ;
}
2020-04-29 02:30:50 +08:00
for ( uint32_t rootDeviceIndex = 0 ; rootDeviceIndex < pinBBs . size ( ) ; + + rootDeviceIndex ) {
if ( auto bo = pinBBs [ rootDeviceIndex ] ) {
if ( isLimitedRange ( rootDeviceIndex ) ) {
releaseGpuRange ( reinterpret_cast < void * > ( bo - > gpuAddress ) , bo - > size , rootDeviceIndex ) ;
}
DrmMemoryManager : : unreference ( bo , true ) ;
2020-02-21 20:41:02 +08:00
}
2019-08-05 19:34:29 +08:00
}
2020-02-21 20:41:02 +08:00
pinBBs . clear ( ) ;
2017-12-21 07:45:38 +08:00
}
2019-03-26 18:59:46 +08:00
void DrmMemoryManager : : eraseSharedBufferObject ( NEO : : BufferObject * bo ) {
2017-12-21 07:45:38 +08:00
auto it = std : : find ( sharingBufferObjects . begin ( ) , sharingBufferObjects . end ( ) , bo ) ;
DEBUG_BREAK_IF ( it = = sharingBufferObjects . end ( ) ) ;
2020-03-19 17:41:35 +08:00
releaseGpuRange ( reinterpret_cast < void * > ( ( * it ) - > gpuAddress ) , ( * it ) - > peekUnmapSize ( ) , this - > getRootDeviceIndex ( bo - > drm ) ) ;
2017-12-21 07:45:38 +08:00
sharingBufferObjects . erase ( it ) ;
}
2019-03-26 18:59:46 +08:00
void DrmMemoryManager : : pushSharedBufferObject ( NEO : : BufferObject * bo ) {
2017-12-21 07:45:38 +08:00
bo - > isReused = true ;
sharingBufferObjects . push_back ( bo ) ;
}
2019-03-26 18:59:46 +08:00
uint32_t DrmMemoryManager : : unreference ( NEO : : BufferObject * bo , bool synchronousDestroy ) {
2017-12-21 07:45:38 +08:00
if ( ! bo )
return - 1 ;
if ( synchronousDestroy ) {
while ( bo - > refCount > 1 )
;
}
2018-08-14 19:14:06 +08:00
std : : unique_lock < std : : mutex > lock ( mtx , std : : defer_lock ) ;
if ( bo - > isReused ) {
lock . lock ( ) ;
}
2017-12-21 07:45:38 +08:00
uint32_t r = bo - > refCount . fetch_sub ( 1 ) ;
if ( r = = 1 ) {
if ( bo - > isReused ) {
eraseSharedBufferObject ( bo ) ;
}
bo - > close ( ) ;
2018-08-14 19:14:06 +08:00
if ( lock ) {
lock . unlock ( ) ;
}
2017-12-21 07:45:38 +08:00
delete bo ;
}
return r ;
}
2021-03-10 06:41:46 +08:00
uint64_t DrmMemoryManager : : acquireGpuRange ( size_t & size , uint32_t rootDeviceIndex , HeapIndex heapIndex ) {
2019-10-22 16:26:23 +08:00
auto gfxPartition = getGfxPartition ( rootDeviceIndex ) ;
2021-03-10 06:41:46 +08:00
return GmmHelper : : canonize ( gfxPartition - > heapAllocate ( heapIndex , size ) ) ;
2018-11-16 02:43:12 +08:00
}
2019-10-22 16:26:23 +08:00
void DrmMemoryManager : : releaseGpuRange ( void * address , size_t unmapSize , uint32_t rootDeviceIndex ) {
2018-11-16 02:43:12 +08:00
uint64_t graphicsAddress = static_cast < uint64_t > ( reinterpret_cast < uintptr_t > ( address ) ) ;
2019-07-09 01:05:23 +08:00
graphicsAddress = GmmHelper : : decanonize ( graphicsAddress ) ;
2019-10-22 16:26:23 +08:00
auto gfxPartition = getGfxPartition ( rootDeviceIndex ) ;
2019-07-29 23:50:46 +08:00
gfxPartition - > freeGpuAddressRange ( graphicsAddress , unmapSize ) ;
2018-11-16 02:43:12 +08:00
}
2021-02-03 22:53:13 +08:00
bool DrmMemoryManager : : isKmdMigrationAvailable ( uint32_t rootDeviceIndex ) {
auto hwInfo = executionEnvironment . rootDeviceEnvironments [ rootDeviceIndex ] - > getHardwareInfo ( ) ;
auto & hwHelper = NEO : : HwHelper : : get ( hwInfo - > platform . eRenderCoreFamily ) ;
auto useKmdMigration = hwHelper . isKmdMigrationSupported ( * hwInfo ) & &
this - > getDrm ( rootDeviceIndex ) . isVmBindAvailable ( ) ;
if ( DebugManager . flags . UseKmdMigration . get ( ) ! = - 1 ) {
useKmdMigration = DebugManager . flags . UseKmdMigration . get ( ) ;
}
return useKmdMigration ;
}
2019-10-22 16:26:23 +08:00
NEO : : BufferObject * DrmMemoryManager : : allocUserptr ( uintptr_t address , size_t size , uint64_t flags , uint32_t rootDeviceIndex ) {
2018-03-20 17:49:09 +08:00
drm_i915_gem_userptr userptr = { } ;
2017-12-21 07:45:38 +08:00
userptr . user_ptr = address ;
userptr . user_size = size ;
userptr . flags = static_cast < uint32_t > ( flags ) ;
2020-01-07 14:42:40 +08:00
if ( this - > getDrm ( rootDeviceIndex ) . ioctl ( DRM_IOCTL_I915_GEM_USERPTR , & userptr ) ! = 0 ) {
2017-12-21 07:45:38 +08:00
return nullptr ;
2018-03-20 17:49:09 +08:00
}
2017-12-21 07:45:38 +08:00
2020-09-25 17:24:15 +08:00
PRINT_DEBUG_STRING ( DebugManager . flags . PrintBOCreateDestroyResult . get ( ) , stdout , " Created new BO with GEM_USERPTR, handle: BO-%d \n " , userptr . handle ) ;
2020-08-13 16:10:48 +08:00
2020-08-20 19:20:20 +08:00
auto res = new ( std : : nothrow ) BufferObject ( & getDrm ( rootDeviceIndex ) , userptr . handle , size , maxOsContextCount ) ;
2017-12-21 07:45:38 +08:00
if ( ! res ) {
2017-12-20 21:28:42 +08:00
DEBUG_BREAK_IF ( true ) ;
2017-12-21 07:45:38 +08:00
return nullptr ;
}
2019-02-20 22:08:03 +08:00
res - > gpuAddress = address ;
2017-12-21 07:45:38 +08:00
return res ;
}
2019-02-25 21:11:34 +08:00
void DrmMemoryManager : : emitPinningRequest ( BufferObject * bo , const AllocationData & allocationData ) const {
2021-04-20 20:24:04 +08:00
auto rootDeviceIndex = allocationData . rootDeviceIndex ;
if ( forcePinEnabled & & pinBBs . at ( rootDeviceIndex ) ! = nullptr & & allocationData . flags . forcePin & & allocationData . size > = this - > pinThreshold ) {
pinBBs . at ( rootDeviceIndex ) - > pin ( & bo , 1 , registeredEngines [ defaultEngineIndex [ rootDeviceIndex ] ] . osContext , 0 , getDefaultDrmContextId ( rootDeviceIndex ) ) ;
2019-02-25 21:11:34 +08:00
}
}
DrmAllocation * DrmMemoryManager : : createGraphicsAllocation ( OsHandleStorage & handleStorage , const AllocationData & allocationData ) {
auto hostPtr = const_cast < void * > ( allocationData . hostPtr ) ;
2021-02-17 06:58:32 +08:00
auto allocation = std : : make_unique < DrmAllocation > ( allocationData . rootDeviceIndex , allocationData . type , nullptr , hostPtr , castToUint64 ( hostPtr ) , allocationData . size , MemoryPool : : System4KBPages ) ;
2017-12-21 07:45:38 +08:00
allocation - > fragmentsStorage = handleStorage ;
2021-02-17 06:58:32 +08:00
if ( ! allocation - > setCacheRegion ( & this - > getDrm ( allocationData . rootDeviceIndex ) , static_cast < CacheRegion > ( allocationData . cacheRegion ) ) ) {
return nullptr ;
}
return allocation . release ( ) ;
2017-12-21 07:45:38 +08:00
}
2018-11-30 18:01:33 +08:00
DrmAllocation * DrmMemoryManager : : allocateGraphicsMemoryWithAlignment ( const AllocationData & allocationData ) {
2020-08-28 14:37:26 +08:00
const size_t minAlignment = getUserptrAlignment ( ) ;
2018-11-30 18:01:33 +08:00
size_t cAlignment = alignUp ( std : : max ( allocationData . alignment , minAlignment ) , minAlignment ) ;
2017-12-21 07:45:38 +08:00
// When size == 0 allocate allocationAlignment
// It's needed to prevent overlapping pages with user pointers
2018-11-30 18:01:33 +08:00
size_t cSize = std : : max ( alignUp ( allocationData . size , minAlignment ) , minAlignment ) ;
2017-12-21 07:45:38 +08:00
2019-08-05 19:34:29 +08:00
uint64_t gpuAddress = 0 ;
size_t alignedSize = cSize ;
auto svmCpuAllocation = allocationData . type = = GraphicsAllocation : : AllocationType : : SVM_CPU ;
if ( svmCpuAllocation ) {
//add 2MB padding in case reserved addr is not 2MB aligned
alignedSize = alignUp ( cSize , cAlignment ) + cAlignment ;
2019-02-08 03:29:30 +08:00
}
2020-10-05 15:57:50 +08:00
// if limitedRangeAlloction is enabled, memory allocation for bo in the limited Range heap is required
if ( ( isLimitedRange ( allocationData . rootDeviceIndex ) | | svmCpuAllocation ) & & ! allocationData . flags . isUSMHostAllocation ) {
2021-03-10 06:41:46 +08:00
gpuAddress = acquireGpuRange ( alignedSize , allocationData . rootDeviceIndex , HeapIndex : : HEAP_STANDARD ) ;
2019-08-05 19:34:29 +08:00
if ( ! gpuAddress ) {
2019-07-18 05:31:38 +08:00
return nullptr ;
}
2019-08-05 19:34:29 +08:00
if ( svmCpuAllocation ) {
2020-10-05 15:57:50 +08:00
gpuAddress = alignUp ( gpuAddress , cAlignment ) ;
2019-08-05 19:34:29 +08:00
}
2019-07-18 05:31:38 +08:00
}
2020-10-05 15:57:50 +08:00
return createAllocWithAlignment ( allocationData , cSize , cAlignment , alignedSize , gpuAddress ) ;
}
2019-02-08 03:29:30 +08:00
2020-10-05 15:57:50 +08:00
DrmAllocation * DrmMemoryManager : : createAllocWithAlignmentFromUserptr ( const AllocationData & allocationData , size_t size , size_t alignment , size_t alignedSVMSize , uint64_t gpuAddress ) {
auto res = alignedMallocWrapper ( size , alignment ) ;
if ( ! res ) {
return nullptr ;
}
2019-07-18 05:31:38 +08:00
2021-02-17 06:58:32 +08:00
std : : unique_ptr < BufferObject , BufferObject : : Deleter > bo ( allocUserptr ( reinterpret_cast < uintptr_t > ( res ) , size , 0 , allocationData . rootDeviceIndex ) ) ;
2020-10-05 15:57:50 +08:00
if ( ! bo ) {
alignedFreeWrapper ( res ) ;
return nullptr ;
}
2021-02-17 06:58:32 +08:00
obtainGpuAddress ( allocationData , bo . get ( ) , gpuAddress ) ;
emitPinningRequest ( bo . get ( ) , allocationData ) ;
2020-10-05 15:57:50 +08:00
2021-02-17 06:58:32 +08:00
auto allocation = std : : make_unique < DrmAllocation > ( allocationData . rootDeviceIndex , allocationData . type , bo . get ( ) , res , bo - > gpuAddress , size , MemoryPool : : System4KBPages ) ;
2020-10-05 15:57:50 +08:00
allocation - > setDriverAllocatedCpuPtr ( res ) ;
allocation - > setReservedAddressRange ( reinterpret_cast < void * > ( gpuAddress ) , alignedSVMSize ) ;
2021-02-17 06:58:32 +08:00
if ( ! allocation - > setCacheRegion ( & this - > getDrm ( allocationData . rootDeviceIndex ) , static_cast < CacheRegion > ( allocationData . cacheRegion ) ) ) {
alignedFreeWrapper ( res ) ;
return nullptr ;
}
bo . release ( ) ;
2019-07-18 05:31:38 +08:00
2021-02-17 06:58:32 +08:00
return allocation . release ( ) ;
2017-12-21 07:45:38 +08:00
}
2020-10-05 15:57:50 +08:00
void DrmMemoryManager : : obtainGpuAddress ( const AllocationData & allocationData , BufferObject * bo , uint64_t gpuAddress ) {
if ( ( isLimitedRange ( allocationData . rootDeviceIndex ) | | allocationData . type = = GraphicsAllocation : : AllocationType : : SVM_CPU ) & &
! allocationData . flags . isUSMHostAllocation ) {
bo - > gpuAddress = gpuAddress ;
}
}
2020-09-03 15:57:05 +08:00
DrmAllocation * DrmMemoryManager : : allocateUSMHostGraphicsMemory ( const AllocationData & allocationData ) {
const size_t minAlignment = getUserptrAlignment ( ) ;
// When size == 0 allocate allocationAlignment
// It's needed to prevent overlapping pages with user pointers
size_t cSize = std : : max ( alignUp ( allocationData . size , minAlignment ) , minAlignment ) ;
void * bufferPtr = const_cast < void * > ( allocationData . hostPtr ) ;
DEBUG_BREAK_IF ( nullptr = = bufferPtr ) ;
std : : unique_ptr < BufferObject , BufferObject : : Deleter > bo ( allocUserptr ( reinterpret_cast < uintptr_t > ( bufferPtr ) ,
cSize ,
0 ,
allocationData . rootDeviceIndex ) ) ;
if ( ! bo ) {
return nullptr ;
}
// if limitedRangeAlloction is enabled, memory allocation for bo in the limited Range heap is required
uint64_t gpuAddress = 0 ;
if ( isLimitedRange ( allocationData . rootDeviceIndex ) ) {
2021-03-10 06:41:46 +08:00
gpuAddress = acquireGpuRange ( cSize , allocationData . rootDeviceIndex , HeapIndex : : HEAP_STANDARD ) ;
2020-09-03 15:57:05 +08:00
if ( ! gpuAddress ) {
return nullptr ;
}
bo - > gpuAddress = gpuAddress ;
}
emitPinningRequest ( bo . get ( ) , allocationData ) ;
auto allocation = new DrmAllocation ( allocationData . rootDeviceIndex ,
allocationData . type ,
bo . get ( ) ,
bufferPtr ,
bo - > gpuAddress ,
cSize ,
MemoryPool : : System4KBPages ) ;
allocation - > setReservedAddressRange ( reinterpret_cast < void * > ( gpuAddress ) , cSize ) ;
bo . release ( ) ;
return allocation ;
}
2018-11-30 18:01:33 +08:00
DrmAllocation * DrmMemoryManager : : allocateGraphicsMemoryWithHostPtr ( const AllocationData & allocationData ) {
auto res = static_cast < DrmAllocation * > ( MemoryManager : : allocateGraphicsMemoryWithHostPtr ( allocationData ) ) ;
2017-12-21 07:45:38 +08:00
2019-02-25 21:11:34 +08:00
if ( res ! = nullptr & & ! validateHostPtrMemory ) {
emitPinningRequest ( res - > getBO ( ) , allocationData ) ;
2017-12-21 07:45:38 +08:00
}
return res ;
}
2020-07-01 16:38:19 +08:00
GraphicsAllocation * DrmMemoryManager : : allocateGraphicsMemoryWithGpuVa ( const AllocationData & allocationData ) {
auto osContextLinux = static_cast < OsContextLinux * > ( allocationData . osContext ) ;
2020-08-28 14:37:26 +08:00
const size_t minAlignment = getUserptrAlignment ( ) ;
2020-07-01 16:38:19 +08:00
size_t alignedSize = alignUp ( allocationData . size , minAlignment ) ;
auto res = alignedMallocWrapper ( alignedSize , minAlignment ) ;
if ( ! res )
return nullptr ;
2020-07-17 19:40:52 +08:00
std : : unique_ptr < BufferObject , BufferObject : : Deleter > bo ( allocUserptr ( reinterpret_cast < uintptr_t > ( res ) , alignedSize , 0 , allocationData . rootDeviceIndex ) ) ;
2020-07-01 16:38:19 +08:00
if ( ! bo ) {
alignedFreeWrapper ( res ) ;
return nullptr ;
}
UNRECOVERABLE_IF ( allocationData . gpuAddress = = 0 ) ;
bo - > gpuAddress = allocationData . gpuAddress ;
2020-07-17 19:40:52 +08:00
BufferObject * boPtr = bo . get ( ) ;
2020-07-01 16:38:19 +08:00
if ( forcePinEnabled & & pinBBs . at ( allocationData . rootDeviceIndex ) ! = nullptr & & alignedSize > = this - > pinThreshold ) {
2020-10-16 19:46:25 +08:00
pinBBs . at ( allocationData . rootDeviceIndex ) - > pin ( & boPtr , 1 , osContextLinux , 0 , osContextLinux - > getDrmContextIds ( ) [ 0 ] ) ;
2020-07-01 16:38:19 +08:00
}
2020-07-17 19:40:52 +08:00
auto allocation = new DrmAllocation ( allocationData . rootDeviceIndex , allocationData . type , bo . get ( ) , res , bo - > gpuAddress , alignedSize , MemoryPool : : System4KBPages ) ;
2020-07-01 16:38:19 +08:00
allocation - > setDriverAllocatedCpuPtr ( res ) ;
2020-07-17 19:40:52 +08:00
bo . release ( ) ;
2020-07-01 16:38:19 +08:00
return allocation ;
}
2019-03-12 19:00:41 +08:00
DrmAllocation * DrmMemoryManager : : allocateGraphicsMemoryForNonSvmHostPtr ( const AllocationData & allocationData ) {
if ( allocationData . size = = 0 | | ! allocationData . hostPtr )
2018-11-16 02:43:12 +08:00
return nullptr ;
2019-03-12 19:00:41 +08:00
auto alignedPtr = alignDown ( allocationData . hostPtr , MemoryConstants : : pageSize ) ;
auto alignedSize = alignSizeWholePage ( allocationData . hostPtr , allocationData . size ) ;
2019-03-14 00:10:04 +08:00
auto realAllocationSize = alignedSize ;
2019-03-12 19:00:41 +08:00
auto offsetInPage = ptrDiff ( allocationData . hostPtr , alignedPtr ) ;
2021-04-20 20:24:04 +08:00
auto rootDeviceIndex = allocationData . rootDeviceIndex ;
2018-11-16 02:43:12 +08:00
2021-04-20 20:24:04 +08:00
auto gpuVirtualAddress = acquireGpuRange ( alignedSize , rootDeviceIndex , HeapIndex : : HEAP_STANDARD ) ;
2018-11-16 02:43:12 +08:00
if ( ! gpuVirtualAddress ) {
return nullptr ;
}
2021-04-20 20:24:04 +08:00
std : : unique_ptr < BufferObject , BufferObject : : Deleter > bo ( allocUserptr ( reinterpret_cast < uintptr_t > ( alignedPtr ) , realAllocationSize , 0 , rootDeviceIndex ) ) ;
2018-11-16 02:43:12 +08:00
if ( ! bo ) {
2021-04-20 20:24:04 +08:00
releaseGpuRange ( reinterpret_cast < void * > ( gpuVirtualAddress ) , alignedSize , rootDeviceIndex ) ;
2018-11-16 02:43:12 +08:00
return nullptr ;
}
2020-03-14 06:24:12 +08:00
bo - > gpuAddress = gpuVirtualAddress ;
2019-12-17 00:50:54 +08:00
if ( validateHostPtrMemory ) {
2020-07-17 19:40:52 +08:00
auto boPtr = bo . get ( ) ;
2021-04-20 20:24:04 +08:00
int result = pinBBs . at ( rootDeviceIndex ) - > validateHostPtr ( & boPtr , 1 , registeredEngines [ defaultEngineIndex [ rootDeviceIndex ] ] . osContext , 0 , getDefaultDrmContextId ( rootDeviceIndex ) ) ;
2020-10-23 19:01:46 +08:00
if ( result ! = 0 ) {
2020-07-17 19:40:52 +08:00
unreference ( bo . release ( ) , true ) ;
2021-04-20 20:24:04 +08:00
releaseGpuRange ( reinterpret_cast < void * > ( gpuVirtualAddress ) , alignedSize , rootDeviceIndex ) ;
2019-12-17 00:50:54 +08:00
return nullptr ;
}
}
2020-07-17 19:40:52 +08:00
auto allocation = new DrmAllocation ( allocationData . rootDeviceIndex , allocationData . type , bo . get ( ) , const_cast < void * > ( allocationData . hostPtr ) ,
2019-11-04 23:03:30 +08:00
gpuVirtualAddress , allocationData . size , MemoryPool : : System4KBPages ) ;
2019-02-27 21:59:46 +08:00
allocation - > setAllocationOffset ( offsetInPage ) ;
2018-11-16 02:43:12 +08:00
2019-08-05 19:34:29 +08:00
allocation - > setReservedAddressRange ( reinterpret_cast < void * > ( gpuVirtualAddress ) , alignedSize ) ;
2020-07-17 19:40:52 +08:00
bo . release ( ) ;
2018-11-16 02:43:12 +08:00
return allocation ;
}
2019-02-28 21:12:13 +08:00
DrmAllocation * DrmMemoryManager : : allocateGraphicsMemory64kb ( const AllocationData & allocationData ) {
2017-12-21 07:45:38 +08:00
return nullptr ;
}
2019-11-14 17:08:59 +08:00
GraphicsAllocation * DrmMemoryManager : : allocateShareableMemory ( const AllocationData & allocationData ) {
2021-04-12 22:34:35 +08:00
auto gmm = std : : make_unique < Gmm > ( executionEnvironment . rootDeviceEnvironments [ allocationData . rootDeviceIndex ] - > getGmmClientContext ( ) , allocationData . hostPtr , allocationData . size , 0u , false ) ;
2019-11-14 17:08:59 +08:00
size_t bufferSize = allocationData . size ;
2021-03-10 06:41:46 +08:00
uint64_t gpuRange = acquireGpuRange ( bufferSize , allocationData . rootDeviceIndex , HeapIndex : : HEAP_STANDARD64KB ) ;
2019-11-14 17:08:59 +08:00
drm_i915_gem_create create = { 0 , 0 , 0 } ;
create . size = bufferSize ;
2020-01-07 14:42:40 +08:00
auto ret = this - > getDrm ( allocationData . rootDeviceIndex ) . ioctl ( DRM_IOCTL_I915_GEM_CREATE , & create ) ;
2019-11-14 17:08:59 +08:00
DEBUG_BREAK_IF ( ret ! = 0 ) ;
( ( void ) ( ret ) ) ;
2020-08-20 19:20:20 +08:00
std : : unique_ptr < BufferObject , BufferObject : : Deleter > bo ( new BufferObject ( & getDrm ( allocationData . rootDeviceIndex ) , create . handle , bufferSize , maxOsContextCount ) ) ;
2019-11-14 17:08:59 +08:00
bo - > gpuAddress = gpuRange ;
2020-07-17 19:40:52 +08:00
auto allocation = new DrmAllocation ( allocationData . rootDeviceIndex , allocationData . type , bo . get ( ) , nullptr , gpuRange , bufferSize , MemoryPool : : SystemCpuInaccessible ) ;
2019-11-14 17:08:59 +08:00
allocation - > setDefaultGmm ( gmm . release ( ) ) ;
allocation - > setReservedAddressRange ( reinterpret_cast < void * > ( gpuRange ) , bufferSize ) ;
2020-07-17 19:40:52 +08:00
bo . release ( ) ;
2019-11-14 17:08:59 +08:00
return allocation ;
}
2019-01-22 19:40:17 +08:00
GraphicsAllocation * DrmMemoryManager : : allocateGraphicsMemoryForImageImpl ( const AllocationData & allocationData , std : : unique_ptr < Gmm > gmm ) {
2019-08-26 15:27:30 +08:00
if ( allocationData . imgInfo - > linearStorage ) {
2019-01-22 19:40:17 +08:00
auto alloc = allocateGraphicsMemoryWithAlignment ( allocationData ) ;
2017-12-21 07:45:38 +08:00
if ( alloc ) {
2019-03-12 20:24:58 +08:00
alloc - > setDefaultGmm ( gmm . release ( ) ) ;
2017-12-21 07:45:38 +08:00
}
return alloc ;
}
2021-03-10 06:41:46 +08:00
uint64_t gpuRange = acquireGpuRange ( allocationData . imgInfo - > size , allocationData . rootDeviceIndex , HeapIndex : : HEAP_STANDARD ) ;
2017-12-21 07:45:38 +08:00
drm_i915_gem_create create = { 0 , 0 , 0 } ;
2019-01-22 19:40:17 +08:00
create . size = allocationData . imgInfo - > size ;
2017-12-21 07:45:38 +08:00
2020-01-07 14:42:40 +08:00
auto ret = this - > getDrm ( allocationData . rootDeviceIndex ) . ioctl ( DRM_IOCTL_I915_GEM_CREATE , & create ) ;
2017-12-21 07:45:38 +08:00
DEBUG_BREAK_IF ( ret ! = 0 ) ;
2019-11-14 21:20:23 +08:00
UNUSED_VARIABLE ( ret ) ;
2017-12-21 07:45:38 +08:00
2020-08-20 19:20:20 +08:00
std : : unique_ptr < BufferObject , BufferObject : : Deleter > bo ( new ( std : : nothrow ) BufferObject ( & getDrm ( allocationData . rootDeviceIndex ) , create . handle , allocationData . imgInfo - > size , maxOsContextCount ) ) ;
2017-12-21 07:45:38 +08:00
if ( ! bo ) {
return nullptr ;
}
2019-07-11 20:13:13 +08:00
bo - > gpuAddress = gpuRange ;
2019-04-25 16:32:56 +08:00
2019-01-22 19:40:17 +08:00
auto ret2 = bo - > setTiling ( I915_TILING_Y , static_cast < uint32_t > ( allocationData . imgInfo - > rowPitch ) ) ;
2017-12-21 07:45:38 +08:00
DEBUG_BREAK_IF ( ret2 ! = true ) ;
2019-11-14 21:20:23 +08:00
UNUSED_VARIABLE ( ret2 ) ;
2017-12-21 07:45:38 +08:00
2020-07-17 19:40:52 +08:00
auto allocation = new DrmAllocation ( allocationData . rootDeviceIndex , allocationData . type , bo . get ( ) , nullptr , gpuRange , allocationData . imgInfo - > size , MemoryPool : : SystemCpuInaccessible ) ;
2019-03-12 20:24:58 +08:00
allocation - > setDefaultGmm ( gmm . release ( ) ) ;
2019-08-05 19:34:29 +08:00
allocation - > setReservedAddressRange ( reinterpret_cast < void * > ( gpuRange ) , allocationData . imgInfo - > size ) ;
2020-07-17 19:40:52 +08:00
bo . release ( ) ;
2017-12-21 07:45:38 +08:00
return allocation ;
}
2020-07-01 20:03:46 +08:00
DrmAllocation * DrmMemoryManager : : allocate32BitGraphicsMemoryImpl ( const AllocationData & allocationData , bool useLocalMemory ) {
2020-08-27 14:55:09 +08:00
auto hwInfo = executionEnvironment . rootDeviceEnvironments [ allocationData . rootDeviceIndex ] - > getHardwareInfo ( ) ;
2020-10-21 16:48:09 +08:00
auto allocatorToUse = heapAssigner . get32BitHeapIndex ( allocationData . type , useLocalMemory , * hwInfo , allocationData . flags . use32BitFrontWindow ) ;
2018-02-28 22:12:10 +08:00
2018-12-21 17:16:27 +08:00
if ( allocationData . hostPtr ) {
uintptr_t inputPtr = reinterpret_cast < uintptr_t > ( allocationData . hostPtr ) ;
auto allocationSize = alignSizeWholePage ( allocationData . hostPtr , allocationData . size ) ;
2017-12-21 07:45:38 +08:00
auto realAllocationSize = allocationSize ;
2019-10-22 16:26:23 +08:00
auto gfxPartition = getGfxPartition ( allocationData . rootDeviceIndex ) ;
2019-07-29 23:50:46 +08:00
auto gpuVirtualAddress = gfxPartition - > heapAllocate ( allocatorToUse , realAllocationSize ) ;
2017-12-21 07:45:38 +08:00
if ( ! gpuVirtualAddress ) {
return nullptr ;
}
2018-12-21 17:16:27 +08:00
auto alignedUserPointer = reinterpret_cast < uintptr_t > ( alignDown ( allocationData . hostPtr , MemoryConstants : : pageSize ) ) ;
2017-12-21 07:45:38 +08:00
auto inputPointerOffset = inputPtr - alignedUserPointer ;
2020-07-17 19:40:52 +08:00
std : : unique_ptr < BufferObject , BufferObject : : Deleter > bo ( allocUserptr ( alignedUserPointer , allocationSize , 0 , allocationData . rootDeviceIndex ) ) ;
2017-12-21 07:45:38 +08:00
if ( ! bo ) {
2019-07-29 23:50:46 +08:00
gfxPartition - > heapFree ( allocatorToUse , gpuVirtualAddress , realAllocationSize ) ;
2017-12-21 07:45:38 +08:00
return nullptr ;
}
2019-06-25 22:13:45 +08:00
bo - > gpuAddress = GmmHelper : : canonize ( gpuVirtualAddress ) ;
2020-07-17 19:40:52 +08:00
auto allocation = new DrmAllocation ( allocationData . rootDeviceIndex , allocationData . type , bo . get ( ) , const_cast < void * > ( allocationData . hostPtr ) , GmmHelper : : canonize ( ptrOffset ( gpuVirtualAddress , inputPointerOffset ) ) ,
2019-11-04 23:03:30 +08:00
allocationSize , MemoryPool : : System4KBPagesWith32BitGpuAddressing ) ;
2019-02-27 21:59:46 +08:00
allocation - > set32BitAllocation ( true ) ;
2019-07-29 23:50:46 +08:00
allocation - > setGpuBaseAddress ( GmmHelper : : canonize ( gfxPartition - > getHeapBase ( allocatorToUse ) ) ) ;
2019-08-05 19:34:29 +08:00
allocation - > setReservedAddressRange ( reinterpret_cast < void * > ( gpuVirtualAddress ) , realAllocationSize ) ;
2020-07-17 19:40:52 +08:00
bo . release ( ) ;
2019-02-08 03:29:30 +08:00
return allocation ;
2017-12-21 07:45:38 +08:00
}
2018-12-21 17:16:27 +08:00
size_t alignedAllocationSize = alignUp ( allocationData . size , MemoryConstants : : pageSize ) ;
2017-12-21 07:45:38 +08:00
auto allocationSize = alignedAllocationSize ;
2019-10-22 16:26:23 +08:00
auto gfxPartition = getGfxPartition ( allocationData . rootDeviceIndex ) ;
2020-10-14 15:50:07 +08:00
auto gpuVA = gfxPartition - > heapAllocate ( allocatorToUse , allocationSize ) ;
2017-12-21 07:45:38 +08:00
2020-10-14 15:50:07 +08:00
if ( ! gpuVA ) {
2017-12-21 07:45:38 +08:00
return nullptr ;
}
2020-08-28 14:37:26 +08:00
auto ptrAlloc = alignedMallocWrapper ( alignedAllocationSize , getUserptrAlignment ( ) ) ;
2019-04-25 16:32:56 +08:00
2019-06-25 22:13:45 +08:00
if ( ! ptrAlloc ) {
2020-10-14 15:50:07 +08:00
gfxPartition - > heapFree ( allocatorToUse , gpuVA , allocationSize ) ;
2019-06-25 22:13:45 +08:00
return nullptr ;
2018-12-21 01:47:43 +08:00
}
2020-07-17 19:40:52 +08:00
std : : unique_ptr < BufferObject , BufferObject : : Deleter > bo ( allocUserptr ( reinterpret_cast < uintptr_t > ( ptrAlloc ) , alignedAllocationSize , 0 , allocationData . rootDeviceIndex ) ) ;
2017-12-21 07:45:38 +08:00
if ( ! bo ) {
2019-06-25 22:13:45 +08:00
alignedFreeWrapper ( ptrAlloc ) ;
2020-10-14 15:50:07 +08:00
gfxPartition - > heapFree ( allocatorToUse , gpuVA , allocationSize ) ;
2017-12-21 07:45:38 +08:00
return nullptr ;
}
2020-10-14 15:50:07 +08:00
bo - > gpuAddress = GmmHelper : : canonize ( gpuVA ) ;
2018-02-28 22:12:10 +08:00
2019-04-25 16:32:56 +08:00
// softpin to the GPU address, res if it uses limitedRange Allocation
2020-10-14 15:50:07 +08:00
auto allocation = new DrmAllocation ( allocationData . rootDeviceIndex , allocationData . type , bo . get ( ) , ptrAlloc , GmmHelper : : canonize ( gpuVA ) , alignedAllocationSize ,
2019-11-04 23:03:30 +08:00
MemoryPool : : System4KBPagesWith32BitGpuAddressing ) ;
2018-12-21 01:47:43 +08:00
2019-02-27 21:59:46 +08:00
allocation - > set32BitAllocation ( true ) ;
2019-07-29 23:50:46 +08:00
allocation - > setGpuBaseAddress ( GmmHelper : : canonize ( gfxPartition - > getHeapBase ( allocatorToUse ) ) ) ;
2019-06-25 22:13:45 +08:00
allocation - > setDriverAllocatedCpuPtr ( ptrAlloc ) ;
2020-10-14 15:50:07 +08:00
allocation - > setReservedAddressRange ( reinterpret_cast < void * > ( gpuVA ) , allocationSize ) ;
2020-07-17 19:40:52 +08:00
bo . release ( ) ;
2019-02-08 03:29:30 +08:00
return allocation ;
2017-12-21 07:45:38 +08:00
}
BufferObject * DrmMemoryManager : : findAndReferenceSharedBufferObject ( int boHandle ) {
BufferObject * bo = nullptr ;
for ( const auto & i : sharingBufferObjects ) {
2019-07-29 22:11:51 +08:00
if ( i - > handle = = boHandle ) {
2017-12-21 07:45:38 +08:00
bo = i ;
bo - > reference ( ) ;
break ;
}
}
return bo ;
}
2019-04-01 20:04:50 +08:00
GraphicsAllocation * DrmMemoryManager : : createGraphicsAllocationFromSharedHandle ( osHandle handle , const AllocationProperties & properties , bool requireSpecificBitness ) {
2018-08-29 20:50:36 +08:00
std : : unique_lock < std : : mutex > lock ( mtx ) ;
2017-12-21 07:45:38 +08:00
drm_prime_handle openFd = { 0 , 0 , 0 } ;
openFd . fd = handle ;
2018-08-14 19:14:06 +08:00
2020-01-07 14:42:40 +08:00
auto ret = this - > getDrm ( properties . rootDeviceIndex ) . ioctl ( DRM_IOCTL_PRIME_FD_TO_HANDLE , & openFd ) ;
2018-08-14 19:14:06 +08:00
if ( ret ! = 0 ) {
int err = errno ;
2020-09-25 17:24:15 +08:00
PRINT_DEBUG_STRING ( DebugManager . flags . PrintDebugMessages . get ( ) , stderr , " ioctl(PRIME_FD_TO_HANDLE) failed with %d. errno=%d(%s) \n " , ret , err , strerror ( err ) ) ;
2018-08-14 19:14:06 +08:00
DEBUG_BREAK_IF ( ret ! = 0 ) ;
2019-11-14 21:20:23 +08:00
UNUSED_VARIABLE ( ret ) ;
2018-08-14 19:14:06 +08:00
return nullptr ;
}
2017-12-21 07:45:38 +08:00
auto boHandle = openFd . handle ;
2018-08-29 20:50:36 +08:00
auto bo = findAndReferenceSharedBufferObject ( boHandle ) ;
2017-12-21 07:45:38 +08:00
if ( bo = = nullptr ) {
size_t size = lseekFunction ( handle , 0 , SEEK_END ) ;
2021-03-02 19:53:34 +08:00
bo = new ( std : : nothrow ) BufferObject ( & getDrm ( properties . rootDeviceIndex ) , boHandle , size , maxOsContextCount ) ;
2017-12-21 07:45:38 +08:00
if ( ! bo ) {
return nullptr ;
}
2021-03-10 06:41:46 +08:00
auto heapIndex = isLocalMemorySupported ( properties . rootDeviceIndex ) ? HeapIndex : : HEAP_STANDARD2MB : HeapIndex : : HEAP_STANDARD ;
if ( requireSpecificBitness & & this - > force32bitAllocations ) {
heapIndex = HeapIndex : : HEAP_EXTERNAL ;
}
2021-03-11 21:11:44 +08:00
auto gpuRange = acquireGpuRange ( size , properties . rootDeviceIndex , heapIndex ) ;
2021-03-02 19:53:34 +08:00
bo - > setAddress ( gpuRange ) ;
bo - > setUnmapSize ( size ) ;
2018-08-29 20:50:36 +08:00
pushSharedBufferObject ( bo ) ;
2018-08-14 19:14:06 +08:00
}
2017-12-21 07:45:38 +08:00
2018-08-29 20:50:36 +08:00
lock . unlock ( ) ;
2019-11-07 21:15:04 +08:00
auto drmAllocation = new DrmAllocation ( properties . rootDeviceIndex , properties . allocationType , bo , reinterpret_cast < void * > ( bo - > gpuAddress ) , bo - > size ,
2019-11-04 23:03:30 +08:00
handle , MemoryPool : : SystemCpuInaccessible ) ;
2017-12-21 07:45:38 +08:00
if ( requireSpecificBitness & & this - > force32bitAllocations ) {
2019-02-27 21:59:46 +08:00
drmAllocation - > set32BitAllocation ( true ) ;
2020-07-01 20:03:46 +08:00
drmAllocation - > setGpuBaseAddress ( GmmHelper : : canonize ( getExternalHeapBaseAddress ( properties . rootDeviceIndex , drmAllocation - > isAllocatedInLocalMemoryPool ( ) ) ) ) ;
2017-12-21 07:45:38 +08:00
}
2019-04-02 16:53:22 +08:00
if ( properties . imgInfo ) {
drm_i915_gem_get_tiling getTiling = { 0 } ;
getTiling . handle = boHandle ;
2020-01-07 14:42:40 +08:00
ret = this - > getDrm ( properties . rootDeviceIndex ) . ioctl ( DRM_IOCTL_I915_GEM_GET_TILING , & getTiling ) ;
2019-04-02 16:53:22 +08:00
2020-02-21 05:37:44 +08:00
if ( ret = = 0 ) {
if ( getTiling . tiling_mode = = I915_TILING_NONE ) {
properties . imgInfo - > linearStorage = true ;
}
2019-08-26 15:27:30 +08:00
}
2020-02-25 01:04:30 +08:00
Gmm * gmm = new Gmm ( executionEnvironment . rootDeviceEnvironments [ properties . rootDeviceIndex ] - > getGmmClientContext ( ) , * properties . imgInfo , createStorageInfoFromProperties ( properties ) ) ;
2019-04-02 16:53:22 +08:00
drmAllocation - > setDefaultGmm ( gmm ) ;
}
2017-12-21 07:45:38 +08:00
return drmAllocation ;
}
2021-03-18 23:16:58 +08:00
void DrmMemoryManager : : closeSharedHandle ( GraphicsAllocation * gfxAllocation ) {
DrmAllocation * drmAllocation = static_cast < DrmAllocation * > ( gfxAllocation ) ;
if ( drmAllocation - > peekSharedHandle ( ) ! = Sharing : : nonSharedResource ) {
closeFunction ( drmAllocation - > peekSharedHandle ( ) ) ;
drmAllocation - > setSharedHandle ( Sharing : : nonSharedResource ) ;
}
2020-11-23 05:46:47 +08:00
}
2017-12-21 07:45:38 +08:00
GraphicsAllocation * DrmMemoryManager : : createPaddedAllocation ( GraphicsAllocation * inputGraphicsAllocation , size_t sizeWithPadding ) {
2018-11-16 02:43:12 +08:00
uint64_t gpuRange = 0llu ;
2019-10-22 16:26:23 +08:00
auto rootDeviceIndex = inputGraphicsAllocation - > getRootDeviceIndex ( ) ;
2021-03-10 06:41:46 +08:00
gpuRange = acquireGpuRange ( sizeWithPadding , rootDeviceIndex , HeapIndex : : HEAP_STANDARD ) ;
2017-12-21 07:45:38 +08:00
auto srcPtr = inputGraphicsAllocation - > getUnderlyingBuffer ( ) ;
auto srcSize = inputGraphicsAllocation - > getUnderlyingBufferSize ( ) ;
auto alignedSrcSize = alignUp ( srcSize , MemoryConstants : : pageSize ) ;
auto alignedPtr = ( uintptr_t ) alignDown ( srcPtr , MemoryConstants : : pageSize ) ;
auto offset = ( uintptr_t ) srcPtr - alignedPtr ;
2020-07-17 19:40:52 +08:00
std : : unique_ptr < BufferObject , BufferObject : : Deleter > bo ( allocUserptr ( alignedPtr , alignedSrcSize , 0 , rootDeviceIndex ) ) ;
2017-12-21 07:45:38 +08:00
if ( ! bo ) {
return nullptr ;
}
2019-07-11 20:13:13 +08:00
bo - > gpuAddress = gpuRange ;
2020-07-17 19:40:52 +08:00
auto allocation = new DrmAllocation ( rootDeviceIndex , inputGraphicsAllocation - > getAllocationType ( ) , bo . get ( ) , srcPtr , GmmHelper : : canonize ( ptrOffset ( gpuRange , offset ) ) , sizeWithPadding ,
2019-11-04 23:03:30 +08:00
inputGraphicsAllocation - > getMemoryPool ( ) ) ;
2019-08-05 19:34:29 +08:00
allocation - > setReservedAddressRange ( reinterpret_cast < void * > ( gpuRange ) , sizeWithPadding ) ;
2020-07-17 19:40:52 +08:00
bo . release ( ) ;
2019-08-05 19:34:29 +08:00
return allocation ;
2017-12-21 07:45:38 +08:00
}
2018-05-08 16:00:23 +08:00
void DrmMemoryManager : : addAllocationToHostPtrManager ( GraphicsAllocation * gfxAllocation ) {
DrmAllocation * drmMemory = static_cast < DrmAllocation * > ( gfxAllocation ) ;
2020-08-17 22:38:21 +08:00
auto maxOsContextCount = 1u ;
2018-05-08 16:00:23 +08:00
FragmentStorage fragment = { } ;
fragment . driverAllocation = true ;
fragment . fragmentCpuPointer = gfxAllocation - > getUnderlyingBuffer ( ) ;
fragment . fragmentSize = alignUp ( gfxAllocation - > getUnderlyingBufferSize ( ) , MemoryConstants : : pageSize ) ;
2021-04-03 01:01:51 +08:00
auto osHandle = new OsHandleLinux ( ) ;
osHandle - > bo = drmMemory - > getBO ( ) ;
fragment . osInternalStorage = osHandle ;
2020-08-17 22:38:21 +08:00
fragment . residency = new ResidencyData ( maxOsContextCount ) ;
2020-07-07 14:41:26 +08:00
hostPtrManager - > storeFragment ( gfxAllocation - > getRootDeviceIndex ( ) , fragment ) ;
2018-05-08 16:00:23 +08:00
}
void DrmMemoryManager : : removeAllocationFromHostPtrManager ( GraphicsAllocation * gfxAllocation ) {
auto buffer = gfxAllocation - > getUnderlyingBuffer ( ) ;
2020-07-07 14:41:26 +08:00
auto fragment = hostPtrManager - > getFragment ( { buffer , gfxAllocation - > getRootDeviceIndex ( ) } ) ;
2018-05-08 16:00:23 +08:00
if ( fragment & & fragment - > driverAllocation ) {
OsHandle * osStorageToRelease = fragment - > osInternalStorage ;
2018-08-27 17:33:25 +08:00
ResidencyData * residencyDataToRelease = fragment - > residency ;
2020-07-07 14:41:26 +08:00
if ( hostPtrManager - > releaseHostPtr ( gfxAllocation - > getRootDeviceIndex ( ) , buffer ) ) {
2018-05-08 16:00:23 +08:00
delete osStorageToRelease ;
2018-08-27 17:33:25 +08:00
delete residencyDataToRelease ;
2018-05-08 16:00:23 +08:00
}
}
}
2017-12-21 07:45:38 +08:00
void DrmMemoryManager : : freeGraphicsMemoryImpl ( GraphicsAllocation * gfxAllocation ) {
2020-09-17 19:27:32 +08:00
DrmAllocation * drmAlloc = static_cast < DrmAllocation * > ( gfxAllocation ) ;
2020-09-10 18:36:44 +08:00
this - > unregisterAllocation ( gfxAllocation ) ;
2020-07-02 17:49:46 +08:00
for ( auto & engine : this - > registeredEngines ) {
auto memoryOperationsInterface = static_cast < DrmMemoryOperationsHandler * > ( executionEnvironment . rootDeviceEnvironments [ gfxAllocation - > getRootDeviceIndex ( ) ] - > memoryOperationsInterface . get ( ) ) ;
memoryOperationsInterface - > evictWithinOsContext ( engine . osContext , * gfxAllocation ) ;
}
2020-05-08 16:04:06 +08:00
2021-04-09 18:57:55 +08:00
if ( drmAlloc - > getMmapPtr ( ) ) {
this - > munmapFunction ( drmAlloc - > getMmapPtr ( ) , drmAlloc - > getMmapSize ( ) ) ;
}
2020-04-21 19:16:45 +08:00
for ( auto handleId = 0u ; handleId < gfxAllocation - > getNumGmms ( ) ; handleId + + ) {
delete gfxAllocation - > getGmm ( handleId ) ;
2019-03-21 21:01:08 +08:00
}
2017-12-21 07:45:38 +08:00
if ( gfxAllocation - > fragmentsStorage . fragmentCount ) {
cleanGraphicsMemoryCreatedFromHostPtr ( gfxAllocation ) ;
2019-08-27 21:33:58 +08:00
} else {
2019-09-02 03:36:15 +08:00
auto & bos = static_cast < DrmAllocation * > ( gfxAllocation ) - > getBOs ( ) ;
for ( auto bo : bos ) {
unreference ( bo , bo & & bo - > isReused ? false : true ) ;
}
2021-03-18 23:16:58 +08:00
closeSharedHandle ( gfxAllocation ) ;
2017-12-21 07:45:38 +08:00
}
2019-08-05 19:34:29 +08:00
2019-10-22 16:26:23 +08:00
releaseGpuRange ( gfxAllocation - > getReservedAddressPtr ( ) , gfxAllocation - > getReservedAddressSize ( ) , gfxAllocation - > getRootDeviceIndex ( ) ) ;
2019-08-27 21:33:58 +08:00
alignedFreeWrapper ( gfxAllocation - > getDriverAllocatedCpuPtr ( ) ) ;
2019-08-05 19:34:29 +08:00
2020-09-17 19:27:32 +08:00
drmAlloc - > freeRegisteredBOBindExtHandles ( & getDrm ( drmAlloc - > getRootDeviceIndex ( ) ) ) ;
2017-12-21 07:45:38 +08:00
delete gfxAllocation ;
2018-02-27 06:23:43 +08:00
}
2017-12-21 07:45:38 +08:00
2019-03-04 21:50:26 +08:00
void DrmMemoryManager : : handleFenceCompletion ( GraphicsAllocation * allocation ) {
static_cast < DrmAllocation * > ( allocation ) - > getBO ( ) - > wait ( - 1 ) ;
}
2020-10-21 16:50:53 +08:00
GraphicsAllocation * DrmMemoryManager : : createGraphicsAllocationFromExistingStorage ( AllocationProperties & properties , void * ptr , MultiGraphicsAllocation & multiGraphicsAllocation ) {
auto defaultAlloc = multiGraphicsAllocation . getDefaultGraphicsAllocation ( ) ;
if ( static_cast < DrmAllocation * > ( defaultAlloc ) - > getMmapPtr ( ) ) {
properties . size = defaultAlloc - > getUnderlyingBufferSize ( ) ;
properties . gpuAddress = castToUint64 ( ptr ) ;
auto internalHandle = defaultAlloc - > peekInternalHandle ( this ) ;
return createUSMHostAllocationFromSharedHandle ( static_cast < osHandle > ( internalHandle ) , properties ) ;
} else {
return allocateGraphicsMemoryWithProperties ( properties , ptr ) ;
}
}
DrmAllocation * DrmMemoryManager : : createUSMHostAllocationFromSharedHandle ( osHandle handle , const AllocationProperties & properties ) {
std : : unique_lock < std : : mutex > lock ( mtx ) ;
drm_prime_handle openFd = { 0 , 0 , 0 } ;
openFd . fd = handle ;
auto ret = this - > getDrm ( properties . rootDeviceIndex ) . ioctl ( DRM_IOCTL_PRIME_FD_TO_HANDLE , & openFd ) ;
if ( ret ! = 0 ) {
int err = this - > getDrm ( properties . rootDeviceIndex ) . getErrno ( ) ;
PRINT_DEBUG_STRING ( DebugManager . flags . PrintDebugMessages . get ( ) , stderr , " ioctl(PRIME_FD_TO_HANDLE) failed with %d. errno=%d(%s) \n " , ret , err , strerror ( err ) ) ;
DEBUG_BREAK_IF ( ret ! = 0 ) ;
return nullptr ;
}
auto bo = new BufferObject ( & getDrm ( properties . rootDeviceIndex ) , openFd . handle , properties . size , maxOsContextCount ) ;
bo - > setAddress ( properties . gpuAddress ) ;
return new DrmAllocation ( properties . rootDeviceIndex , properties . allocationType , bo , reinterpret_cast < void * > ( bo - > gpuAddress ) , bo - > size ,
handle , MemoryPool : : SystemCpuInaccessible ) ;
}
2020-01-07 14:42:40 +08:00
uint64_t DrmMemoryManager : : getSystemSharedMemory ( uint32_t rootDeviceIndex ) {
2017-12-21 07:45:38 +08:00
uint64_t hostMemorySize = MemoryConstants : : pageSize * ( uint64_t ) ( sysconf ( _SC_PHYS_PAGES ) ) ;
2018-03-15 22:54:28 +08:00
drm_i915_gem_context_param getContextParam = { } ;
getContextParam . param = I915_CONTEXT_PARAM_GTT_SIZE ;
2020-01-07 14:42:40 +08:00
auto ret = getDrm ( rootDeviceIndex ) . ioctl ( DRM_IOCTL_I915_GEM_CONTEXT_GETPARAM , & getContextParam ) ;
2017-12-21 07:45:38 +08:00
DEBUG_BREAK_IF ( ret ! = 0 ) ;
2019-11-14 21:20:23 +08:00
UNUSED_VARIABLE ( ret ) ;
2018-03-15 22:54:28 +08:00
uint64_t gpuMemorySize = getContextParam . value ;
2017-12-21 07:45:38 +08:00
return std : : min ( hostMemorySize , gpuMemorySize ) ;
}
2020-01-07 14:42:40 +08:00
MemoryManager : : AllocationStatus DrmMemoryManager : : populateOsHandles ( OsHandleStorage & handleStorage , uint32_t rootDeviceIndex ) {
2018-10-22 20:20:05 +08:00
BufferObject * allocatedBos [ maxFragmentsCount ] ;
2018-03-27 20:01:04 +08:00
uint32_t numberOfBosAllocated = 0 ;
2018-10-22 20:20:05 +08:00
uint32_t indexesOfAllocatedBos [ maxFragmentsCount ] ;
2020-08-17 22:38:21 +08:00
auto maxOsContextCount = 1u ;
2018-02-28 19:09:48 +08:00
2018-10-22 20:20:05 +08:00
for ( unsigned int i = 0 ; i < maxFragmentsCount ; i + + ) {
2017-12-21 07:45:38 +08:00
// If there is no fragment it means it already exists.
if ( ! handleStorage . fragmentStorageData [ i ] . osHandleStorage & & handleStorage . fragmentStorageData [ i ] . fragmentSize ) {
2021-04-03 01:01:51 +08:00
auto osHandle = new OsHandleLinux ( ) ;
handleStorage . fragmentStorageData [ i ] . osHandleStorage = osHandle ;
2020-08-17 22:38:21 +08:00
handleStorage . fragmentStorageData [ i ] . residency = new ResidencyData ( maxOsContextCount ) ;
2017-12-21 07:45:38 +08:00
2021-04-03 01:01:51 +08:00
osHandle - > bo = allocUserptr ( ( uintptr_t ) handleStorage . fragmentStorageData [ i ] . cpuPtr ,
handleStorage . fragmentStorageData [ i ] . fragmentSize ,
0 , rootDeviceIndex ) ;
if ( ! osHandle - > bo ) {
2017-12-21 07:45:38 +08:00
handleStorage . fragmentStorageData [ i ] . freeTheFragment = true ;
2018-02-28 19:09:48 +08:00
return AllocationStatus : : Error ;
2017-12-21 07:45:38 +08:00
}
2018-02-28 19:09:48 +08:00
2021-04-03 01:01:51 +08:00
allocatedBos [ numberOfBosAllocated ] = osHandle - > bo ;
2018-03-20 23:06:16 +08:00
indexesOfAllocatedBos [ numberOfBosAllocated ] = i ;
2018-02-28 19:09:48 +08:00
numberOfBosAllocated + + ;
2017-12-21 07:45:38 +08:00
}
}
2018-02-28 19:09:48 +08:00
if ( validateHostPtrMemory ) {
2021-04-20 20:24:04 +08:00
int result = pinBBs . at ( rootDeviceIndex ) - > validateHostPtr ( allocatedBos , numberOfBosAllocated , registeredEngines [ defaultEngineIndex [ rootDeviceIndex ] ] . osContext , 0 , getDefaultDrmContextId ( rootDeviceIndex ) ) ;
2018-02-28 19:09:48 +08:00
if ( result = = EFAULT ) {
2018-03-20 23:06:16 +08:00
for ( uint32_t i = 0 ; i < numberOfBosAllocated ; i + + ) {
handleStorage . fragmentStorageData [ indexesOfAllocatedBos [ i ] ] . freeTheFragment = true ;
}
2018-02-28 19:09:48 +08:00
return AllocationStatus : : InvalidHostPointer ;
} else if ( result ! = 0 ) {
return AllocationStatus : : Error ;
}
}
2018-03-27 20:01:04 +08:00
for ( uint32_t i = 0 ; i < numberOfBosAllocated ; i + + ) {
2020-07-07 14:41:26 +08:00
hostPtrManager - > storeFragment ( rootDeviceIndex , handleStorage . fragmentStorageData [ indexesOfAllocatedBos [ i ] ] ) ;
2018-03-27 20:01:04 +08:00
}
2018-02-28 19:09:48 +08:00
return AllocationStatus : : Success ;
2017-12-21 07:45:38 +08:00
}
2018-03-27 20:01:04 +08:00
2019-11-15 16:59:48 +08:00
void DrmMemoryManager : : cleanOsHandles ( OsHandleStorage & handleStorage , uint32_t rootDeviceIndex ) {
2018-10-22 20:20:05 +08:00
for ( unsigned int i = 0 ; i < maxFragmentsCount ; i + + ) {
2017-12-21 07:45:38 +08:00
if ( handleStorage . fragmentStorageData [ i ] . freeTheFragment ) {
2021-04-03 01:01:51 +08:00
auto osHandle = static_cast < OsHandleLinux * > ( handleStorage . fragmentStorageData [ i ] . osHandleStorage ) ;
if ( osHandle - > bo ) {
BufferObject * search = osHandle - > bo ;
2017-12-21 07:45:38 +08:00
search - > wait ( - 1 ) ;
auto refCount = unreference ( search , true ) ;
DEBUG_BREAK_IF ( refCount ! = 1u ) ;
2019-11-14 21:20:23 +08:00
UNUSED_VARIABLE ( refCount ) ;
2017-12-21 07:45:38 +08:00
}
delete handleStorage . fragmentStorageData [ i ] . osHandleStorage ;
2018-03-27 20:01:04 +08:00
handleStorage . fragmentStorageData [ i ] . osHandleStorage = nullptr ;
2017-12-21 07:45:38 +08:00
delete handleStorage . fragmentStorageData [ i ] . residency ;
2018-03-27 20:01:04 +08:00
handleStorage . fragmentStorageData [ i ] . residency = nullptr ;
2017-12-21 07:45:38 +08:00
}
}
}
2018-02-27 06:23:43 +08:00
bool DrmMemoryManager : : setDomainCpu ( GraphicsAllocation & graphicsAllocation , bool writeEnable ) {
DEBUG_BREAK_IF ( writeEnable ) ; //unsupported path (for CPU writes call SW_FINISH ioctl in unlockResource)
auto bo = static_cast < DrmAllocation * > ( & graphicsAllocation ) - > getBO ( ) ;
if ( bo = = nullptr )
return false ;
// move a buffer object to the CPU read, and possibly write domain, including waiting on flushes to occur
2018-03-20 17:49:09 +08:00
drm_i915_gem_set_domain set_domain = { } ;
2018-02-27 06:23:43 +08:00
set_domain . handle = bo - > peekHandle ( ) ;
set_domain . read_domains = I915_GEM_DOMAIN_CPU ;
set_domain . write_domain = writeEnable ? I915_GEM_DOMAIN_CPU : 0 ;
2020-01-07 14:42:40 +08:00
return getDrm ( graphicsAllocation . getRootDeviceIndex ( ) ) . ioctl ( DRM_IOCTL_I915_GEM_SET_DOMAIN , & set_domain ) = = 0 ;
2018-02-27 06:23:43 +08:00
}
2019-01-24 18:51:33 +08:00
void * DrmMemoryManager : : lockResourceImpl ( GraphicsAllocation & graphicsAllocation ) {
2019-07-08 02:33:17 +08:00
if ( MemoryPool : : LocalMemory = = graphicsAllocation . getMemoryPool ( ) ) {
return lockResourceInLocalMemoryImpl ( graphicsAllocation ) ;
}
2019-01-24 18:51:33 +08:00
auto cpuPtr = graphicsAllocation . getUnderlyingBuffer ( ) ;
2018-02-27 06:23:43 +08:00
if ( cpuPtr ! = nullptr ) {
2019-01-24 18:51:33 +08:00
auto success = setDomainCpu ( graphicsAllocation , false ) ;
2018-02-27 06:23:43 +08:00
DEBUG_BREAK_IF ( ! success ) ;
( void ) success ;
return cpuPtr ;
}
2019-01-24 18:51:33 +08:00
auto bo = static_cast < DrmAllocation & > ( graphicsAllocation ) . getBO ( ) ;
2018-02-27 06:23:43 +08:00
if ( bo = = nullptr )
return nullptr ;
2018-03-20 17:49:09 +08:00
drm_i915_gem_mmap mmap_arg = { } ;
2018-02-27 06:23:43 +08:00
mmap_arg . handle = bo - > peekHandle ( ) ;
mmap_arg . size = bo - > peekSize ( ) ;
2020-01-07 14:42:40 +08:00
if ( getDrm ( graphicsAllocation . getRootDeviceIndex ( ) ) . ioctl ( DRM_IOCTL_I915_GEM_MMAP , & mmap_arg ) ! = 0 ) {
2018-02-27 06:23:43 +08:00
return nullptr ;
2018-03-20 17:49:09 +08:00
}
2018-02-27 06:23:43 +08:00
bo - > setLockedAddress ( reinterpret_cast < void * > ( mmap_arg . addr_ptr ) ) ;
2019-01-24 18:51:33 +08:00
auto success = setDomainCpu ( graphicsAllocation , false ) ;
2018-02-27 06:23:43 +08:00
DEBUG_BREAK_IF ( ! success ) ;
( void ) success ;
return bo - > peekLockedAddress ( ) ;
}
2019-01-24 18:51:33 +08:00
void DrmMemoryManager : : unlockResourceImpl ( GraphicsAllocation & graphicsAllocation ) {
2020-03-23 17:51:30 +08:00
if ( MemoryPool : : LocalMemory = = graphicsAllocation . getMemoryPool ( ) ) {
return unlockResourceInLocalMemoryImpl ( static_cast < DrmAllocation & > ( graphicsAllocation ) . getBO ( ) ) ;
}
2019-01-24 18:51:33 +08:00
auto cpuPtr = graphicsAllocation . getUnderlyingBuffer ( ) ;
2018-02-27 06:23:43 +08:00
if ( cpuPtr ! = nullptr ) {
return ;
}
2019-01-24 18:51:33 +08:00
auto bo = static_cast < DrmAllocation & > ( graphicsAllocation ) . getBO ( ) ;
2018-02-27 06:23:43 +08:00
if ( bo = = nullptr )
return ;
2020-01-07 14:42:40 +08:00
releaseReservedCpuAddressRange ( bo - > peekLockedAddress ( ) , bo - > peekSize ( ) , graphicsAllocation . getRootDeviceIndex ( ) ) ;
2018-02-27 06:23:43 +08:00
bo - > setLockedAddress ( nullptr ) ;
}
2019-07-08 02:33:17 +08:00
2020-01-07 14:42:40 +08:00
int DrmMemoryManager : : obtainFdFromHandle ( int boHandle , uint32_t rootDeviceindex ) {
2019-06-06 22:26:47 +08:00
drm_prime_handle openFd = { 0 , 0 , 0 } ;
openFd . flags = DRM_CLOEXEC | DRM_RDWR ;
openFd . handle = boHandle ;
2020-01-07 14:42:40 +08:00
getDrm ( rootDeviceindex ) . ioctl ( DRM_IOCTL_PRIME_HANDLE_TO_FD , & openFd ) ;
2019-06-06 22:26:47 +08:00
return openFd . fd ;
}
2019-07-17 21:38:14 +08:00
2021-04-20 20:24:04 +08:00
uint32_t DrmMemoryManager : : getDefaultDrmContextId ( uint32_t rootDeviceIndex ) const {
auto osContextLinux = static_cast < OsContextLinux * > ( registeredEngines [ defaultEngineIndex [ rootDeviceIndex ] ] . osContext ) ;
2019-10-22 19:29:39 +08:00
return osContextLinux - > getDrmContextIds ( ) [ 0 ] ;
2019-07-17 21:38:14 +08:00
}
2019-08-05 19:34:29 +08:00
2020-08-28 14:37:26 +08:00
size_t DrmMemoryManager : : getUserptrAlignment ( ) {
auto alignment = MemoryConstants : : allocationAlignment ;
if ( DebugManager . flags . ForceUserptrAlignment . get ( ) ! = - 1 ) {
alignment = DebugManager . flags . ForceUserptrAlignment . get ( ) * MemoryConstants : : kiloByte ;
}
return alignment ;
}
2020-01-07 14:42:40 +08:00
Drm & DrmMemoryManager : : getDrm ( uint32_t rootDeviceIndex ) const {
return * this - > executionEnvironment . rootDeviceEnvironments [ rootDeviceIndex ] - > osInterface - > get ( ) - > getDrm ( ) ;
}
2020-03-19 17:41:35 +08:00
uint32_t DrmMemoryManager : : getRootDeviceIndex ( const Drm * drm ) {
auto rootDeviceCount = this - > executionEnvironment . rootDeviceEnvironments . size ( ) ;
for ( auto rootDeviceIndex = 0u ; rootDeviceIndex < rootDeviceCount ; rootDeviceIndex + + ) {
if ( & getDrm ( rootDeviceIndex ) = = drm ) {
return rootDeviceIndex ;
}
}
2020-03-27 22:21:18 +08:00
return CommonConstants : : unspecifiedDeviceIndex ;
2020-03-19 17:41:35 +08:00
}
2020-07-01 16:38:19 +08:00
AddressRange DrmMemoryManager : : reserveGpuAddress ( size_t size , uint32_t rootDeviceIndex ) {
2021-03-10 06:41:46 +08:00
auto gpuVa = acquireGpuRange ( size , rootDeviceIndex , HeapIndex : : HEAP_STANDARD ) ;
2020-07-01 16:38:19 +08:00
return AddressRange { gpuVa , size } ;
}
void DrmMemoryManager : : freeGpuAddress ( AddressRange addressRange , uint32_t rootDeviceIndex ) {
releaseGpuRange ( reinterpret_cast < void * > ( addressRange . address ) , addressRange . size , rootDeviceIndex ) ;
}
2020-09-10 18:36:44 +08:00
std : : unique_lock < std : : mutex > DrmMemoryManager : : acquireAllocLock ( ) {
return std : : unique_lock < std : : mutex > ( this - > allocMutex ) ;
}
std : : vector < GraphicsAllocation * > & DrmMemoryManager : : getSysMemAllocs ( ) {
return this - > sysMemAllocs ;
}
std : : vector < GraphicsAllocation * > & DrmMemoryManager : : getLocalMemAllocs ( uint32_t rootDeviceIndex ) {
return this - > localMemAllocs [ rootDeviceIndex ] ;
}
void DrmMemoryManager : : registerSysMemAlloc ( GraphicsAllocation * allocation ) {
std : : lock_guard < std : : mutex > lock ( this - > allocMutex ) ;
this - > sysMemAllocs . push_back ( allocation ) ;
}
void DrmMemoryManager : : registerLocalMemAlloc ( GraphicsAllocation * allocation , uint32_t rootDeviceIndex ) {
std : : lock_guard < std : : mutex > lock ( this - > allocMutex ) ;
this - > localMemAllocs [ rootDeviceIndex ] . push_back ( allocation ) ;
}
void DrmMemoryManager : : unregisterAllocation ( GraphicsAllocation * allocation ) {
std : : lock_guard < std : : mutex > lock ( this - > allocMutex ) ;
sysMemAllocs . erase ( std : : remove ( sysMemAllocs . begin ( ) , sysMemAllocs . end ( ) , allocation ) ,
sysMemAllocs . end ( ) ) ;
localMemAllocs [ allocation - > getRootDeviceIndex ( ) ] . erase ( std : : remove ( localMemAllocs [ allocation - > getRootDeviceIndex ( ) ] . begin ( ) ,
localMemAllocs [ allocation - > getRootDeviceIndex ( ) ] . end ( ) ,
allocation ) ,
localMemAllocs [ allocation - > getRootDeviceIndex ( ) ] . end ( ) ) ;
}
2020-09-17 19:27:32 +08:00
2020-10-09 16:48:37 +08:00
void DrmMemoryManager : : registerAllocationInOs ( GraphicsAllocation * allocation ) {
2020-11-19 22:11:37 +08:00
if ( allocation & & getDrm ( allocation - > getRootDeviceIndex ( ) ) . resourceRegistrationEnabled ( ) ) {
2020-09-17 19:27:32 +08:00
auto drmAllocation = static_cast < DrmAllocation * > ( allocation ) ;
drmAllocation - > registerBOBindExtHandle ( & getDrm ( drmAllocation - > getRootDeviceIndex ( ) ) ) ;
2021-03-10 07:02:59 +08:00
if ( isAllocationTypeToCapture ( drmAllocation - > getAllocationType ( ) ) ) {
drmAllocation - > markForCapture ( ) ;
}
2020-09-17 19:27:32 +08:00
}
}
2019-03-26 18:59:46 +08:00
} // namespace NEO