2017-12-21 07:45:38 +08:00
|
|
|
/*
|
2019-01-07 16:29:49 +08:00
|
|
|
* Copyright (C) 2017-2019 Intel Corporation
|
2017-12-21 07:45:38 +08:00
|
|
|
*
|
2018-09-18 15:11:08 +08:00
|
|
|
* SPDX-License-Identifier: MIT
|
2017-12-21 07:45:38 +08:00
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2019-02-27 18:39:32 +08:00
|
|
|
#include "runtime/os_interface/linux/drm_memory_manager.h"
|
|
|
|
|
2019-05-29 10:09:40 +08:00
|
|
|
#include "core/helpers/ptr_math.h"
|
2018-12-11 15:21:56 +08:00
|
|
|
#include "runtime/command_stream/command_stream_receiver.h"
|
2017-12-21 07:45:38 +08:00
|
|
|
#include "runtime/device/device.h"
|
2019-03-25 20:12:55 +08:00
|
|
|
#include "runtime/execution_environment/execution_environment.h"
|
2019-02-27 18:39:32 +08:00
|
|
|
#include "runtime/gmm_helper/gmm.h"
|
|
|
|
#include "runtime/gmm_helper/gmm_helper.h"
|
|
|
|
#include "runtime/gmm_helper/resource_info.h"
|
2019-03-01 23:14:28 +08:00
|
|
|
#include "runtime/helpers/hw_info.h"
|
2017-12-21 07:45:38 +08:00
|
|
|
#include "runtime/helpers/options.h"
|
2019-02-27 18:39:32 +08:00
|
|
|
#include "runtime/helpers/surface_formats.h"
|
2018-10-24 14:46:54 +08:00
|
|
|
#include "runtime/memory_manager/host_ptr_manager.h"
|
2019-04-25 16:32:56 +08:00
|
|
|
#include "runtime/os_interface/32bit_memory.h"
|
2018-12-11 15:21:56 +08:00
|
|
|
#include "runtime/os_interface/linux/os_context_linux.h"
|
2019-03-25 20:12:55 +08:00
|
|
|
#include "runtime/os_interface/linux/os_interface.h"
|
2019-04-02 16:53:22 +08:00
|
|
|
#include "runtime/os_interface/linux/tiling_mode_helper.h"
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
#include "drm/i915_drm.h"
|
|
|
|
|
2019-02-27 18:39:32 +08:00
|
|
|
#include <cstring>
|
|
|
|
#include <iostream>
|
2017-12-21 07:45:38 +08:00
|
|
|
|
2019-03-26 18:59:46 +08:00
|
|
|
namespace NEO {
|
2017-12-21 07:45:38 +08:00
|
|
|
|
2019-03-25 20:12:55 +08:00
|
|
|
DrmMemoryManager::DrmMemoryManager(gemCloseWorkerMode mode,
|
2019-03-12 06:06:30 +08:00
|
|
|
bool forcePinAllowed,
|
|
|
|
bool validateHostPtrMemory,
|
2019-03-15 17:22:35 +08:00
|
|
|
ExecutionEnvironment &executionEnvironment) : MemoryManager(executionEnvironment),
|
2019-03-25 20:12:55 +08:00
|
|
|
drm(executionEnvironment.osInterface->get()->getDrm()),
|
2019-03-12 06:06:30 +08:00
|
|
|
pinBB(nullptr),
|
|
|
|
forcePinEnabled(forcePinAllowed),
|
|
|
|
validateHostPtrMemory(validateHostPtrMemory) {
|
2019-03-01 23:14:28 +08:00
|
|
|
gfxPartition.init(platformDevices[0]->capabilityTable.gpuAddressSpace);
|
2017-12-21 07:45:38 +08:00
|
|
|
MemoryManager::virtualPaddingAvailable = true;
|
|
|
|
if (mode != gemCloseWorkerMode::gemCloseWorkerInactive) {
|
|
|
|
gemCloseWorker.reset(new DrmGemCloseWorker(*this));
|
|
|
|
}
|
|
|
|
|
2018-02-28 19:09:48 +08:00
|
|
|
auto mem = alignedMalloc(MemoryConstants::pageSize, MemoryConstants::pageSize);
|
|
|
|
DEBUG_BREAK_IF(mem == nullptr);
|
2017-12-21 07:45:38 +08:00
|
|
|
|
2018-02-28 19:09:48 +08:00
|
|
|
if (forcePinEnabled || validateHostPtrMemory) {
|
2019-05-06 20:20:48 +08:00
|
|
|
pinBB = allocUserptr(reinterpret_cast<uintptr_t>(mem), MemoryConstants::pageSize, 0);
|
2018-02-28 19:09:48 +08:00
|
|
|
}
|
2017-12-21 07:45:38 +08:00
|
|
|
|
2018-02-28 19:09:48 +08:00
|
|
|
if (!pinBB) {
|
|
|
|
alignedFree(mem);
|
|
|
|
DEBUG_BREAK_IF(true);
|
|
|
|
UNRECOVERABLE_IF(validateHostPtrMemory);
|
|
|
|
} else {
|
|
|
|
pinBB->isAllocated = true;
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
2019-04-25 16:32:56 +08:00
|
|
|
|
|
|
|
initInternalRangeAllocator(platformDevices[0]->capabilityTable.gpuAddressSpace);
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
DrmMemoryManager::~DrmMemoryManager() {
|
2019-04-25 16:32:56 +08:00
|
|
|
if (this->limitedGpuAddressRangeAllocator) {
|
|
|
|
// freeing space for internal 32bit allocator
|
|
|
|
uint64_t size = 4 * MemoryConstants::gigaByte - MemoryConstants::pageSize;
|
|
|
|
this->limitedGpuAddressRangeAllocator->free(this->internal32bitAllocator->getBase(), size);
|
|
|
|
|
|
|
|
// freeing space for external 32bit allocator
|
|
|
|
size += MemoryConstants::pageSize;
|
|
|
|
this->limitedGpuAddressRangeAllocator->free(this->allocator32Bit->getBase(), size);
|
|
|
|
}
|
2017-12-21 07:45:38 +08:00
|
|
|
applyCommonCleanup();
|
|
|
|
if (gemCloseWorker) {
|
|
|
|
gemCloseWorker->close(false);
|
|
|
|
}
|
|
|
|
if (pinBB) {
|
|
|
|
unreference(pinBB);
|
|
|
|
pinBB = nullptr;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-25 16:32:56 +08:00
|
|
|
void DrmMemoryManager::initInternalRangeAllocator(size_t gpuRange) {
|
|
|
|
if (gpuRange < MemoryConstants::max48BitAddress || !DebugManager.flags.EnableHostPtrTracking.get()) {
|
|
|
|
// set the allocator with the whole reduced address space range - pageSize (base address) to
|
|
|
|
// avoid starting address of the heap to be 0, which could be interpreted as invalid address
|
|
|
|
// nullPtr.
|
|
|
|
this->limitedGpuAddressRangeAllocator.reset(new AllocatorLimitedRange(MemoryConstants::pageSize, gpuRange + 1 - MemoryConstants::pageSize));
|
|
|
|
|
|
|
|
// 0x1000 ~ 0xFFFFFFFF address space for external 32bit allocator //
|
|
|
|
uint64_t size = 4 * MemoryConstants::gigaByte - MemoryConstants::pageSize;
|
|
|
|
uint64_t allocatorBase = this->limitedGpuAddressRangeAllocator->allocate(size);
|
|
|
|
allocator32Bit.reset(new Allocator32bit(allocatorBase, size));
|
|
|
|
|
|
|
|
// 0x100000000 ~ 0x1FFFFFFFF address space for internal 32bit allocator //
|
|
|
|
size += MemoryConstants::pageSize;
|
|
|
|
allocatorBase = this->limitedGpuAddressRangeAllocator->allocate(size);
|
|
|
|
internal32bitAllocator.reset(new Allocator32bit(allocatorBase, size));
|
|
|
|
} else {
|
|
|
|
// when in full range space, set the internal32bitAllocator using 32bit addressing allocator.
|
|
|
|
internal32bitAllocator.reset(new Allocator32bit);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-26 18:59:46 +08:00
|
|
|
void DrmMemoryManager::eraseSharedBufferObject(NEO::BufferObject *bo) {
|
2017-12-21 07:45:38 +08:00
|
|
|
auto it = std::find(sharingBufferObjects.begin(), sharingBufferObjects.end(), bo);
|
|
|
|
//If an object isReused = true, it must be in the vector
|
|
|
|
DEBUG_BREAK_IF(it == sharingBufferObjects.end());
|
|
|
|
sharingBufferObjects.erase(it);
|
|
|
|
}
|
|
|
|
|
2019-03-26 18:59:46 +08:00
|
|
|
void DrmMemoryManager::pushSharedBufferObject(NEO::BufferObject *bo) {
|
2017-12-21 07:45:38 +08:00
|
|
|
bo->isReused = true;
|
|
|
|
sharingBufferObjects.push_back(bo);
|
|
|
|
}
|
|
|
|
|
2019-03-26 18:59:46 +08:00
|
|
|
uint32_t DrmMemoryManager::unreference(NEO::BufferObject *bo, bool synchronousDestroy) {
|
2017-12-21 07:45:38 +08:00
|
|
|
if (!bo)
|
|
|
|
return -1;
|
|
|
|
|
|
|
|
if (synchronousDestroy) {
|
|
|
|
while (bo->refCount > 1)
|
|
|
|
;
|
|
|
|
}
|
|
|
|
|
2018-08-14 19:14:06 +08:00
|
|
|
std::unique_lock<std::mutex> lock(mtx, std::defer_lock);
|
|
|
|
if (bo->isReused) {
|
|
|
|
lock.lock();
|
|
|
|
}
|
|
|
|
|
2017-12-21 07:45:38 +08:00
|
|
|
uint32_t r = bo->refCount.fetch_sub(1);
|
|
|
|
|
|
|
|
if (r == 1) {
|
|
|
|
auto unmapSize = bo->peekUnmapSize();
|
2019-02-20 22:08:03 +08:00
|
|
|
auto address = bo->isAllocated || unmapSize > 0 ? reinterpret_cast<void *>(bo->gpuAddress) : nullptr;
|
2017-12-21 07:45:38 +08:00
|
|
|
auto allocatorType = bo->peekAllocationType();
|
|
|
|
|
|
|
|
if (bo->isReused) {
|
|
|
|
eraseSharedBufferObject(bo);
|
|
|
|
}
|
|
|
|
|
|
|
|
bo->close();
|
|
|
|
|
2018-08-14 19:14:06 +08:00
|
|
|
if (lock) {
|
|
|
|
lock.unlock();
|
|
|
|
}
|
|
|
|
|
2017-12-21 07:45:38 +08:00
|
|
|
delete bo;
|
|
|
|
if (address) {
|
|
|
|
if (unmapSize) {
|
2018-11-16 02:43:12 +08:00
|
|
|
releaseGpuRange(address, unmapSize, allocatorType);
|
2017-12-21 07:45:38 +08:00
|
|
|
} else {
|
2018-01-22 23:43:26 +08:00
|
|
|
alignedFreeWrapper(address);
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return r;
|
|
|
|
}
|
|
|
|
|
2018-11-16 02:43:12 +08:00
|
|
|
uint64_t DrmMemoryManager::acquireGpuRange(size_t &size, StorageAllocatorType &storageType, bool specificBitness) {
|
|
|
|
if (specificBitness && this->force32bitAllocations) {
|
|
|
|
storageType = BIT32_ALLOCATOR_EXTERNAL;
|
2019-04-25 16:32:56 +08:00
|
|
|
return this->allocator32Bit->allocate(size);
|
2018-11-16 02:43:12 +08:00
|
|
|
}
|
|
|
|
|
2019-04-25 16:32:56 +08:00
|
|
|
if (limitedGpuAddressRangeAllocator.get()) {
|
2018-11-16 02:43:12 +08:00
|
|
|
storageType = INTERNAL_ALLOCATOR_WITH_DYNAMIC_BITRANGE;
|
2019-04-25 16:32:56 +08:00
|
|
|
return limitedGpuAddressRangeAllocator->allocate(size);
|
2018-11-16 02:43:12 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
storageType = MMAP_ALLOCATOR;
|
2019-03-13 22:31:46 +08:00
|
|
|
return reinterpret_cast<uint64_t>(reserveCpuAddressRange(size));
|
2018-11-16 02:43:12 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void DrmMemoryManager::releaseGpuRange(void *address, size_t unmapSize, StorageAllocatorType allocatorType) {
|
|
|
|
if (allocatorType == MMAP_ALLOCATOR) {
|
2019-03-13 22:31:46 +08:00
|
|
|
releaseReservedCpuAddressRange(address, unmapSize);
|
2018-11-16 02:43:12 +08:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint64_t graphicsAddress = static_cast<uint64_t>(reinterpret_cast<uintptr_t>(address));
|
|
|
|
|
|
|
|
if (allocatorType == BIT32_ALLOCATOR_EXTERNAL) {
|
2019-04-25 16:32:56 +08:00
|
|
|
allocator32Bit->free(graphicsAddress, unmapSize);
|
2018-11-16 02:43:12 +08:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (allocatorType == BIT32_ALLOCATOR_INTERNAL) {
|
2019-04-25 16:32:56 +08:00
|
|
|
internal32bitAllocator->free(graphicsAddress, unmapSize);
|
2018-11-16 02:43:12 +08:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
UNRECOVERABLE_IF(allocatorType != INTERNAL_ALLOCATOR_WITH_DYNAMIC_BITRANGE);
|
2019-04-25 16:32:56 +08:00
|
|
|
limitedGpuAddressRangeAllocator->free(graphicsAddress, unmapSize);
|
2018-11-16 02:43:12 +08:00
|
|
|
}
|
|
|
|
|
2019-05-06 20:20:48 +08:00
|
|
|
NEO::BufferObject *DrmMemoryManager::allocUserptr(uintptr_t address, size_t size, uint64_t flags) {
|
2018-03-20 17:49:09 +08:00
|
|
|
drm_i915_gem_userptr userptr = {};
|
2017-12-21 07:45:38 +08:00
|
|
|
userptr.user_ptr = address;
|
|
|
|
userptr.user_size = size;
|
|
|
|
userptr.flags = static_cast<uint32_t>(flags);
|
|
|
|
|
2018-03-20 17:49:09 +08:00
|
|
|
if (this->drm->ioctl(DRM_IOCTL_I915_GEM_USERPTR, &userptr) != 0) {
|
2017-12-21 07:45:38 +08:00
|
|
|
return nullptr;
|
2018-03-20 17:49:09 +08:00
|
|
|
}
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
auto res = new (std::nothrow) BufferObject(this->drm, userptr.handle, false);
|
|
|
|
if (!res) {
|
2017-12-20 21:28:42 +08:00
|
|
|
DEBUG_BREAK_IF(true);
|
2017-12-21 07:45:38 +08:00
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
res->size = size;
|
2019-02-20 22:08:03 +08:00
|
|
|
res->gpuAddress = address;
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2019-02-25 21:11:34 +08:00
|
|
|
void DrmMemoryManager::emitPinningRequest(BufferObject *bo, const AllocationData &allocationData) const {
|
|
|
|
if (forcePinEnabled && pinBB != nullptr && allocationData.flags.forcePin && allocationData.size >= this->pinThreshold) {
|
2019-02-27 18:17:17 +08:00
|
|
|
auto &osContextLinux = static_cast<OsContextLinux &>(getDefaultCommandStreamReceiver(0)->getOsContext());
|
|
|
|
pinBB->pin(&bo, 1, osContextLinux.getDrmContextId());
|
2019-02-25 21:11:34 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
DrmAllocation *DrmMemoryManager::createGraphicsAllocation(OsHandleStorage &handleStorage, const AllocationData &allocationData) {
|
|
|
|
auto hostPtr = const_cast<void *>(allocationData.hostPtr);
|
2019-02-27 18:38:25 +08:00
|
|
|
auto allocation = new DrmAllocation(allocationData.type, nullptr, hostPtr, castToUint64(hostPtr), allocationData.size, MemoryPool::System4KBPages, false);
|
2017-12-21 07:45:38 +08:00
|
|
|
allocation->fragmentsStorage = handleStorage;
|
|
|
|
return allocation;
|
|
|
|
}
|
|
|
|
|
2018-11-30 18:01:33 +08:00
|
|
|
DrmAllocation *DrmMemoryManager::allocateGraphicsMemoryWithAlignment(const AllocationData &allocationData) {
|
2017-12-21 07:45:38 +08:00
|
|
|
const size_t minAlignment = MemoryConstants::allocationAlignment;
|
2018-11-30 18:01:33 +08:00
|
|
|
size_t cAlignment = alignUp(std::max(allocationData.alignment, minAlignment), minAlignment);
|
2017-12-21 07:45:38 +08:00
|
|
|
// When size == 0 allocate allocationAlignment
|
|
|
|
// It's needed to prevent overlapping pages with user pointers
|
2018-11-30 18:01:33 +08:00
|
|
|
size_t cSize = std::max(alignUp(allocationData.size, minAlignment), minAlignment);
|
2017-12-21 07:45:38 +08:00
|
|
|
|
2018-01-22 23:43:26 +08:00
|
|
|
auto res = alignedMallocWrapper(cSize, cAlignment);
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
if (!res)
|
|
|
|
return nullptr;
|
|
|
|
|
2019-05-06 20:20:48 +08:00
|
|
|
BufferObject *bo = allocUserptr(reinterpret_cast<uintptr_t>(res), cSize, 0);
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
if (!bo) {
|
2018-01-22 23:43:26 +08:00
|
|
|
alignedFreeWrapper(res);
|
2017-12-21 07:45:38 +08:00
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
bo->isAllocated = true;
|
2019-02-08 03:29:30 +08:00
|
|
|
|
2019-04-25 16:32:56 +08:00
|
|
|
// if limitedRangeAlloction is enabled, memory allocation for bo in the limited Range heap is required
|
|
|
|
if (limitedGpuAddressRangeAllocator) {
|
2019-02-08 03:29:30 +08:00
|
|
|
StorageAllocatorType allocType;
|
2019-04-25 16:32:56 +08:00
|
|
|
bo->gpuAddress = acquireGpuRange(cSize, allocType, false);
|
2019-02-08 03:29:30 +08:00
|
|
|
if (!bo->gpuAddress) {
|
|
|
|
bo->close();
|
|
|
|
delete bo;
|
|
|
|
alignedFreeWrapper(res);
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
bo->setUnmapSize(cSize);
|
|
|
|
bo->setAllocationType(allocType);
|
|
|
|
}
|
|
|
|
|
2019-02-25 21:11:34 +08:00
|
|
|
emitPinningRequest(bo, allocationData);
|
2019-02-08 03:29:30 +08:00
|
|
|
|
2019-02-27 18:38:25 +08:00
|
|
|
auto allocation = new DrmAllocation(allocationData.type, bo, res, bo->gpuAddress, cSize, MemoryPool::System4KBPages, allocationData.flags.multiOsContextCapable);
|
2019-04-25 16:32:56 +08:00
|
|
|
allocation->setDriverAllocatedCpuPtr(limitedGpuAddressRangeAllocator ? res : nullptr);
|
2019-02-08 03:29:30 +08:00
|
|
|
return allocation;
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
|
|
|
|
2018-11-30 18:01:33 +08:00
|
|
|
DrmAllocation *DrmMemoryManager::allocateGraphicsMemoryWithHostPtr(const AllocationData &allocationData) {
|
|
|
|
auto res = static_cast<DrmAllocation *>(MemoryManager::allocateGraphicsMemoryWithHostPtr(allocationData));
|
2017-12-21 07:45:38 +08:00
|
|
|
|
2019-02-25 21:11:34 +08:00
|
|
|
if (res != nullptr && !validateHostPtrMemory) {
|
|
|
|
emitPinningRequest(res->getBO(), allocationData);
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2019-03-12 19:00:41 +08:00
|
|
|
DrmAllocation *DrmMemoryManager::allocateGraphicsMemoryForNonSvmHostPtr(const AllocationData &allocationData) {
|
|
|
|
if (allocationData.size == 0 || !allocationData.hostPtr)
|
2018-11-16 02:43:12 +08:00
|
|
|
return nullptr;
|
|
|
|
|
2019-03-12 19:00:41 +08:00
|
|
|
auto alignedPtr = alignDown(allocationData.hostPtr, MemoryConstants::pageSize);
|
|
|
|
auto alignedSize = alignSizeWholePage(allocationData.hostPtr, allocationData.size);
|
2019-03-14 00:10:04 +08:00
|
|
|
auto realAllocationSize = alignedSize;
|
2019-03-12 19:00:41 +08:00
|
|
|
auto offsetInPage = ptrDiff(allocationData.hostPtr, alignedPtr);
|
2018-11-16 02:43:12 +08:00
|
|
|
|
|
|
|
StorageAllocatorType allocType;
|
|
|
|
auto gpuVirtualAddress = acquireGpuRange(alignedSize, allocType, false);
|
|
|
|
if (!gpuVirtualAddress) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2019-05-06 20:20:48 +08:00
|
|
|
BufferObject *bo = allocUserptr(reinterpret_cast<uintptr_t>(alignedPtr), realAllocationSize, 0);
|
2018-11-16 02:43:12 +08:00
|
|
|
if (!bo) {
|
|
|
|
releaseGpuRange(reinterpret_cast<void *>(gpuVirtualAddress), alignedSize, allocType);
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
bo->isAllocated = false;
|
|
|
|
bo->setUnmapSize(alignedSize);
|
2019-04-25 16:32:56 +08:00
|
|
|
bo->gpuAddress = gpuVirtualAddress;
|
2018-11-16 02:43:12 +08:00
|
|
|
bo->setAllocationType(allocType);
|
|
|
|
|
2019-03-12 19:00:41 +08:00
|
|
|
auto allocation = new DrmAllocation(allocationData.type, bo, const_cast<void *>(alignedPtr), gpuVirtualAddress,
|
|
|
|
allocationData.size, MemoryPool::System4KBPages, false);
|
2019-02-27 21:59:46 +08:00
|
|
|
allocation->setAllocationOffset(offsetInPage);
|
2018-11-16 02:43:12 +08:00
|
|
|
|
|
|
|
return allocation;
|
|
|
|
}
|
|
|
|
|
2019-02-28 21:12:13 +08:00
|
|
|
DrmAllocation *DrmMemoryManager::allocateGraphicsMemory64kb(const AllocationData &allocationData) {
|
2017-12-21 07:45:38 +08:00
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2019-01-22 19:40:17 +08:00
|
|
|
GraphicsAllocation *DrmMemoryManager::allocateGraphicsMemoryForImageImpl(const AllocationData &allocationData, std::unique_ptr<Gmm> gmm) {
|
|
|
|
if (!GmmHelper::allowTiling(*allocationData.imgInfo->imgDesc)) {
|
|
|
|
auto alloc = allocateGraphicsMemoryWithAlignment(allocationData);
|
2017-12-21 07:45:38 +08:00
|
|
|
if (alloc) {
|
2019-03-12 20:24:58 +08:00
|
|
|
alloc->setDefaultGmm(gmm.release());
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
|
|
|
return alloc;
|
|
|
|
}
|
|
|
|
|
2018-11-16 02:43:12 +08:00
|
|
|
StorageAllocatorType allocatorType = UNKNOWN_ALLOCATOR;
|
2019-01-22 19:40:17 +08:00
|
|
|
uint64_t gpuRange = acquireGpuRange(allocationData.imgInfo->size, allocatorType, false);
|
2018-11-16 02:43:12 +08:00
|
|
|
DEBUG_BREAK_IF(gpuRange == reinterpret_cast<uint64_t>(MAP_FAILED));
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
drm_i915_gem_create create = {0, 0, 0};
|
2019-01-22 19:40:17 +08:00
|
|
|
create.size = allocationData.imgInfo->size;
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
auto ret = this->drm->ioctl(DRM_IOCTL_I915_GEM_CREATE, &create);
|
|
|
|
DEBUG_BREAK_IF(ret != 0);
|
|
|
|
((void)(ret));
|
|
|
|
|
|
|
|
auto bo = new (std::nothrow) BufferObject(this->drm, create.handle, true);
|
|
|
|
if (!bo) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
2019-01-22 19:40:17 +08:00
|
|
|
bo->size = allocationData.imgInfo->size;
|
2019-04-25 16:32:56 +08:00
|
|
|
bo->gpuAddress = gpuRange;
|
|
|
|
|
2019-01-22 19:40:17 +08:00
|
|
|
auto ret2 = bo->setTiling(I915_TILING_Y, static_cast<uint32_t>(allocationData.imgInfo->rowPitch));
|
2017-12-21 07:45:38 +08:00
|
|
|
DEBUG_BREAK_IF(ret2 != true);
|
|
|
|
((void)(ret2));
|
|
|
|
|
2019-01-22 19:40:17 +08:00
|
|
|
bo->setUnmapSize(allocationData.imgInfo->size);
|
2017-12-21 07:45:38 +08:00
|
|
|
|
2019-02-27 18:38:25 +08:00
|
|
|
auto allocation = new DrmAllocation(allocationData.type, bo, nullptr, (uint64_t)gpuRange, allocationData.imgInfo->size, MemoryPool::SystemCpuInaccessible, false);
|
2018-11-16 02:43:12 +08:00
|
|
|
bo->setAllocationType(allocatorType);
|
2019-03-12 20:24:58 +08:00
|
|
|
allocation->setDefaultGmm(gmm.release());
|
2017-12-21 07:45:38 +08:00
|
|
|
return allocation;
|
|
|
|
}
|
|
|
|
|
2018-12-21 17:16:27 +08:00
|
|
|
DrmAllocation *DrmMemoryManager::allocate32BitGraphicsMemoryImpl(const AllocationData &allocationData) {
|
2019-02-21 23:29:05 +08:00
|
|
|
auto internal = useInternal32BitAllocator(allocationData.type);
|
2019-04-25 16:32:56 +08:00
|
|
|
auto allocatorToUse = internal ? internal32bitAllocator.get() : allocator32Bit.get();
|
2019-02-21 23:29:05 +08:00
|
|
|
auto allocatorType = internal ? BIT32_ALLOCATOR_INTERNAL : BIT32_ALLOCATOR_EXTERNAL;
|
2018-02-28 22:12:10 +08:00
|
|
|
|
2018-12-21 17:16:27 +08:00
|
|
|
if (allocationData.hostPtr) {
|
|
|
|
uintptr_t inputPtr = reinterpret_cast<uintptr_t>(allocationData.hostPtr);
|
|
|
|
auto allocationSize = alignSizeWholePage(allocationData.hostPtr, allocationData.size);
|
2017-12-21 07:45:38 +08:00
|
|
|
auto realAllocationSize = allocationSize;
|
2019-04-25 16:32:56 +08:00
|
|
|
auto gpuVirtualAddress = allocatorToUse->allocate(realAllocationSize);
|
2017-12-21 07:45:38 +08:00
|
|
|
if (!gpuVirtualAddress) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
2018-12-21 17:16:27 +08:00
|
|
|
auto alignedUserPointer = reinterpret_cast<uintptr_t>(alignDown(allocationData.hostPtr, MemoryConstants::pageSize));
|
2017-12-21 07:45:38 +08:00
|
|
|
auto inputPointerOffset = inputPtr - alignedUserPointer;
|
|
|
|
|
2019-05-06 20:20:48 +08:00
|
|
|
BufferObject *bo = allocUserptr(alignedUserPointer, allocationSize, 0);
|
2017-12-21 07:45:38 +08:00
|
|
|
if (!bo) {
|
2019-04-25 16:32:56 +08:00
|
|
|
allocatorToUse->free(gpuVirtualAddress, realAllocationSize);
|
2017-12-21 07:45:38 +08:00
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
bo->isAllocated = false;
|
|
|
|
bo->setUnmapSize(realAllocationSize);
|
2019-04-25 16:32:56 +08:00
|
|
|
bo->gpuAddress = gpuVirtualAddress;
|
2018-07-05 22:31:57 +08:00
|
|
|
bo->setAllocationType(allocatorType);
|
2019-02-27 18:38:25 +08:00
|
|
|
auto allocation = new DrmAllocation(allocationData.type, bo, const_cast<void *>(allocationData.hostPtr), ptrOffset(gpuVirtualAddress, inputPointerOffset),
|
2019-02-08 03:29:30 +08:00
|
|
|
allocationSize, MemoryPool::System4KBPagesWith32BitGpuAddressing, false);
|
2019-02-27 21:59:46 +08:00
|
|
|
allocation->set32BitAllocation(true);
|
2019-04-25 16:32:56 +08:00
|
|
|
allocation->setGpuBaseAddress(allocatorToUse->getBase());
|
2019-02-08 03:29:30 +08:00
|
|
|
return allocation;
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
|
|
|
|
2018-12-21 17:16:27 +08:00
|
|
|
size_t alignedAllocationSize = alignUp(allocationData.size, MemoryConstants::pageSize);
|
2017-12-21 07:45:38 +08:00
|
|
|
auto allocationSize = alignedAllocationSize;
|
2019-04-25 16:32:56 +08:00
|
|
|
auto res = allocatorToUse->allocate(allocationSize);
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
if (!res) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2019-04-25 16:32:56 +08:00
|
|
|
void *ptrAlloc = reinterpret_cast<void *>(res);
|
2018-12-21 01:47:43 +08:00
|
|
|
|
2019-04-25 16:32:56 +08:00
|
|
|
if (limitedGpuAddressRangeAllocator) {
|
|
|
|
ptrAlloc = alignedMallocWrapper(alignedAllocationSize, MemoryConstants::allocationAlignment);
|
|
|
|
|
|
|
|
if (!ptrAlloc) {
|
|
|
|
allocatorToUse->free(res, allocationSize);
|
|
|
|
return nullptr;
|
|
|
|
}
|
2018-12-21 01:47:43 +08:00
|
|
|
}
|
|
|
|
|
2019-05-06 20:20:48 +08:00
|
|
|
BufferObject *bo = allocUserptr(reinterpret_cast<uintptr_t>(ptrAlloc), alignedAllocationSize, 0);
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
if (!bo) {
|
2019-04-25 16:32:56 +08:00
|
|
|
if (limitedGpuAddressRangeAllocator) {
|
|
|
|
alignedFreeWrapper(ptrAlloc);
|
|
|
|
}
|
|
|
|
allocatorToUse->free(res, allocationSize);
|
2017-12-21 07:45:38 +08:00
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
bo->isAllocated = true;
|
|
|
|
bo->setUnmapSize(allocationSize);
|
|
|
|
|
2018-07-05 22:31:57 +08:00
|
|
|
bo->setAllocationType(allocatorType);
|
2019-04-25 16:32:56 +08:00
|
|
|
bo->gpuAddress = res;
|
2018-02-28 22:12:10 +08:00
|
|
|
|
2019-04-25 16:32:56 +08:00
|
|
|
// softpin to the GPU address, res if it uses limitedRange Allocation
|
2019-02-27 18:38:25 +08:00
|
|
|
auto allocation = new DrmAllocation(allocationData.type, bo, ptrAlloc, res, alignedAllocationSize,
|
2019-02-08 03:29:30 +08:00
|
|
|
MemoryPool::System4KBPagesWith32BitGpuAddressing, false);
|
2018-12-21 01:47:43 +08:00
|
|
|
|
2019-02-27 21:59:46 +08:00
|
|
|
allocation->set32BitAllocation(true);
|
2019-04-25 16:32:56 +08:00
|
|
|
allocation->setGpuBaseAddress(allocatorToUse->getBase());
|
|
|
|
allocation->setDriverAllocatedCpuPtr(limitedGpuAddressRangeAllocator ? ptrAlloc : nullptr);
|
2019-02-08 03:29:30 +08:00
|
|
|
return allocation;
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
BufferObject *DrmMemoryManager::findAndReferenceSharedBufferObject(int boHandle) {
|
|
|
|
BufferObject *bo = nullptr;
|
|
|
|
for (const auto &i : sharingBufferObjects) {
|
|
|
|
if (i->handle == static_cast<int>(boHandle)) {
|
|
|
|
bo = i;
|
|
|
|
bo->reference();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return bo;
|
|
|
|
}
|
|
|
|
|
|
|
|
BufferObject *DrmMemoryManager::createSharedBufferObject(int boHandle, size_t size, bool requireSpecificBitness) {
|
2018-03-27 22:43:47 +08:00
|
|
|
uint64_t gpuRange = 0llu;
|
2017-12-21 07:45:38 +08:00
|
|
|
StorageAllocatorType storageType = UNKNOWN_ALLOCATOR;
|
|
|
|
|
2018-11-16 02:43:12 +08:00
|
|
|
gpuRange = acquireGpuRange(size, storageType, requireSpecificBitness);
|
2018-03-27 22:43:47 +08:00
|
|
|
DEBUG_BREAK_IF(gpuRange == reinterpret_cast<uint64_t>(MAP_FAILED));
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
auto bo = new (std::nothrow) BufferObject(this->drm, boHandle, true);
|
|
|
|
if (!bo) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
bo->size = size;
|
2019-04-25 16:32:56 +08:00
|
|
|
bo->gpuAddress = gpuRange;
|
2017-12-21 07:45:38 +08:00
|
|
|
bo->setUnmapSize(size);
|
|
|
|
bo->setAllocationType(storageType);
|
|
|
|
return bo;
|
|
|
|
}
|
|
|
|
|
2019-04-01 20:04:50 +08:00
|
|
|
GraphicsAllocation *DrmMemoryManager::createGraphicsAllocationFromSharedHandle(osHandle handle, const AllocationProperties &properties, bool requireSpecificBitness) {
|
2018-08-29 20:50:36 +08:00
|
|
|
std::unique_lock<std::mutex> lock(mtx);
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
drm_prime_handle openFd = {0, 0, 0};
|
|
|
|
openFd.fd = handle;
|
2018-08-14 19:14:06 +08:00
|
|
|
|
2017-12-21 07:45:38 +08:00
|
|
|
auto ret = this->drm->ioctl(DRM_IOCTL_PRIME_FD_TO_HANDLE, &openFd);
|
2018-08-14 19:14:06 +08:00
|
|
|
|
|
|
|
if (ret != 0) {
|
|
|
|
int err = errno;
|
|
|
|
printDebugString(DebugManager.flags.PrintDebugMessages.get(), stderr, "ioctl(PRIME_FD_TO_HANDLE) failed with %d. errno=%d(%s)\n", ret, err, strerror(err));
|
|
|
|
DEBUG_BREAK_IF(ret != 0);
|
|
|
|
((void)(ret));
|
|
|
|
return nullptr;
|
|
|
|
}
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
auto boHandle = openFd.handle;
|
2018-08-29 20:50:36 +08:00
|
|
|
auto bo = findAndReferenceSharedBufferObject(boHandle);
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
if (bo == nullptr) {
|
|
|
|
size_t size = lseekFunction(handle, 0, SEEK_END);
|
|
|
|
bo = createSharedBufferObject(boHandle, size, requireSpecificBitness);
|
|
|
|
|
|
|
|
if (!bo) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
|
|
|
|
2018-08-29 20:50:36 +08:00
|
|
|
pushSharedBufferObject(bo);
|
2018-08-14 19:14:06 +08:00
|
|
|
}
|
2017-12-21 07:45:38 +08:00
|
|
|
|
2018-08-29 20:50:36 +08:00
|
|
|
lock.unlock();
|
|
|
|
|
2019-04-02 16:53:22 +08:00
|
|
|
auto drmAllocation = new DrmAllocation(properties.allocationType, bo, reinterpret_cast<void *>(bo->gpuAddress), bo->size,
|
2019-02-27 18:38:25 +08:00
|
|
|
handle, MemoryPool::SystemCpuInaccessible, false);
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
if (requireSpecificBitness && this->force32bitAllocations) {
|
2019-02-27 21:59:46 +08:00
|
|
|
drmAllocation->set32BitAllocation(true);
|
2019-04-25 16:32:56 +08:00
|
|
|
drmAllocation->setGpuBaseAddress(getExternalHeapBaseAddress());
|
|
|
|
} else if (this->limitedGpuAddressRangeAllocator.get()) {
|
|
|
|
drmAllocation->setGpuBaseAddress(this->limitedGpuAddressRangeAllocator->getBase());
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
2019-04-02 16:53:22 +08:00
|
|
|
|
|
|
|
if (properties.imgInfo) {
|
|
|
|
drm_i915_gem_get_tiling getTiling = {0};
|
|
|
|
getTiling.handle = boHandle;
|
|
|
|
ret = this->drm->ioctl(DRM_IOCTL_I915_GEM_GET_TILING, &getTiling);
|
|
|
|
|
|
|
|
DEBUG_BREAK_IF(ret != 0);
|
|
|
|
((void)(ret));
|
|
|
|
|
|
|
|
properties.imgInfo->tilingMode = TilingModeHelper::convert(getTiling.tiling_mode);
|
|
|
|
Gmm *gmm = new Gmm(*properties.imgInfo);
|
|
|
|
drmAllocation->setDefaultGmm(gmm);
|
|
|
|
}
|
2017-12-21 07:45:38 +08:00
|
|
|
return drmAllocation;
|
|
|
|
}
|
|
|
|
|
|
|
|
GraphicsAllocation *DrmMemoryManager::createPaddedAllocation(GraphicsAllocation *inputGraphicsAllocation, size_t sizeWithPadding) {
|
2018-11-16 02:43:12 +08:00
|
|
|
uint64_t gpuRange = 0llu;
|
|
|
|
StorageAllocatorType storageType = UNKNOWN_ALLOCATOR;
|
|
|
|
|
|
|
|
gpuRange = acquireGpuRange(sizeWithPadding, storageType, false);
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
auto srcPtr = inputGraphicsAllocation->getUnderlyingBuffer();
|
|
|
|
auto srcSize = inputGraphicsAllocation->getUnderlyingBufferSize();
|
|
|
|
auto alignedSrcSize = alignUp(srcSize, MemoryConstants::pageSize);
|
|
|
|
auto alignedPtr = (uintptr_t)alignDown(srcPtr, MemoryConstants::pageSize);
|
|
|
|
auto offset = (uintptr_t)srcPtr - alignedPtr;
|
|
|
|
|
2019-05-06 20:20:48 +08:00
|
|
|
BufferObject *bo = allocUserptr(alignedPtr, alignedSrcSize, 0);
|
2017-12-21 07:45:38 +08:00
|
|
|
if (!bo) {
|
|
|
|
return nullptr;
|
|
|
|
}
|
2019-04-25 16:32:56 +08:00
|
|
|
bo->gpuAddress = gpuRange;
|
2017-12-21 07:45:38 +08:00
|
|
|
bo->setUnmapSize(sizeWithPadding);
|
2018-11-16 02:43:12 +08:00
|
|
|
bo->setAllocationType(storageType);
|
2019-02-27 18:38:25 +08:00
|
|
|
return new DrmAllocation(inputGraphicsAllocation->getAllocationType(), bo, srcPtr, ptrOffset(gpuRange, offset), sizeWithPadding,
|
2019-02-11 17:02:27 +08:00
|
|
|
inputGraphicsAllocation->getMemoryPool(), false);
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
|
|
|
|
2018-05-08 16:00:23 +08:00
|
|
|
void DrmMemoryManager::addAllocationToHostPtrManager(GraphicsAllocation *gfxAllocation) {
|
|
|
|
DrmAllocation *drmMemory = static_cast<DrmAllocation *>(gfxAllocation);
|
|
|
|
FragmentStorage fragment = {};
|
|
|
|
fragment.driverAllocation = true;
|
|
|
|
fragment.fragmentCpuPointer = gfxAllocation->getUnderlyingBuffer();
|
|
|
|
fragment.fragmentSize = alignUp(gfxAllocation->getUnderlyingBufferSize(), MemoryConstants::pageSize);
|
|
|
|
fragment.osInternalStorage = new OsHandle();
|
2018-08-27 17:33:25 +08:00
|
|
|
fragment.residency = new ResidencyData();
|
2018-05-08 16:00:23 +08:00
|
|
|
fragment.osInternalStorage->bo = drmMemory->getBO();
|
2018-10-24 14:46:54 +08:00
|
|
|
hostPtrManager->storeFragment(fragment);
|
2018-05-08 16:00:23 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
void DrmMemoryManager::removeAllocationFromHostPtrManager(GraphicsAllocation *gfxAllocation) {
|
|
|
|
auto buffer = gfxAllocation->getUnderlyingBuffer();
|
2018-10-24 14:46:54 +08:00
|
|
|
auto fragment = hostPtrManager->getFragment(buffer);
|
2018-05-08 16:00:23 +08:00
|
|
|
if (fragment && fragment->driverAllocation) {
|
|
|
|
OsHandle *osStorageToRelease = fragment->osInternalStorage;
|
2018-08-27 17:33:25 +08:00
|
|
|
ResidencyData *residencyDataToRelease = fragment->residency;
|
2018-10-24 14:46:54 +08:00
|
|
|
if (hostPtrManager->releaseHostPtr(buffer)) {
|
2018-05-08 16:00:23 +08:00
|
|
|
delete osStorageToRelease;
|
2018-08-27 17:33:25 +08:00
|
|
|
delete residencyDataToRelease;
|
2018-05-08 16:00:23 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-21 07:45:38 +08:00
|
|
|
void DrmMemoryManager::freeGraphicsMemoryImpl(GraphicsAllocation *gfxAllocation) {
|
|
|
|
DrmAllocation *input;
|
|
|
|
input = static_cast<DrmAllocation *>(gfxAllocation);
|
2019-03-21 21:01:08 +08:00
|
|
|
for (auto handleId = 0u; handleId < maxHandleCount; handleId++) {
|
|
|
|
if (gfxAllocation->getGmm(handleId)) {
|
|
|
|
delete gfxAllocation->getGmm(handleId);
|
|
|
|
}
|
|
|
|
}
|
2017-12-21 07:45:38 +08:00
|
|
|
|
2019-02-27 21:59:46 +08:00
|
|
|
alignedFreeWrapper(gfxAllocation->getDriverAllocatedCpuPtr());
|
2018-12-21 01:47:43 +08:00
|
|
|
|
2017-12-21 07:45:38 +08:00
|
|
|
if (gfxAllocation->fragmentsStorage.fragmentCount) {
|
|
|
|
cleanGraphicsMemoryCreatedFromHostPtr(gfxAllocation);
|
|
|
|
delete gfxAllocation;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
BufferObject *search = input->getBO();
|
|
|
|
|
|
|
|
if (gfxAllocation->peekSharedHandle() != Sharing::nonSharedResource) {
|
|
|
|
closeFunction(gfxAllocation->peekSharedHandle());
|
|
|
|
}
|
2019-03-13 22:31:46 +08:00
|
|
|
void *reserveAddress = gfxAllocation->getReservedAddressPtr();
|
|
|
|
if (reserveAddress) {
|
|
|
|
releaseReservedCpuAddressRange(reserveAddress, gfxAllocation->getReservedAddressSize());
|
|
|
|
}
|
2017-12-21 07:45:38 +08:00
|
|
|
delete gfxAllocation;
|
|
|
|
|
|
|
|
unreference(search);
|
2018-02-27 06:23:43 +08:00
|
|
|
}
|
2017-12-21 07:45:38 +08:00
|
|
|
|
2019-03-04 21:50:26 +08:00
|
|
|
void DrmMemoryManager::handleFenceCompletion(GraphicsAllocation *allocation) {
|
|
|
|
static_cast<DrmAllocation *>(allocation)->getBO()->wait(-1);
|
|
|
|
}
|
|
|
|
|
2017-12-21 07:45:38 +08:00
|
|
|
uint64_t DrmMemoryManager::getSystemSharedMemory() {
|
|
|
|
uint64_t hostMemorySize = MemoryConstants::pageSize * (uint64_t)(sysconf(_SC_PHYS_PAGES));
|
|
|
|
|
2018-03-15 22:54:28 +08:00
|
|
|
drm_i915_gem_context_param getContextParam = {};
|
|
|
|
getContextParam.param = I915_CONTEXT_PARAM_GTT_SIZE;
|
|
|
|
auto ret = drm->ioctl(DRM_IOCTL_I915_GEM_CONTEXT_GETPARAM, &getContextParam);
|
2017-12-21 07:45:38 +08:00
|
|
|
DEBUG_BREAK_IF(ret != 0);
|
|
|
|
((void)(ret));
|
2018-03-15 22:54:28 +08:00
|
|
|
uint64_t gpuMemorySize = getContextParam.value;
|
2017-12-21 07:45:38 +08:00
|
|
|
|
|
|
|
return std::min(hostMemorySize, gpuMemorySize);
|
|
|
|
}
|
|
|
|
|
|
|
|
uint64_t DrmMemoryManager::getMaxApplicationAddress() {
|
2019-03-08 19:21:29 +08:00
|
|
|
return is64bit ? MemoryConstants::max64BitAppAddress : MemoryConstants::max32BitAppAddress;
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
|
|
|
|
2018-03-12 22:24:46 +08:00
|
|
|
uint64_t DrmMemoryManager::getInternalHeapBaseAddress() {
|
2019-04-25 16:32:56 +08:00
|
|
|
return this->internal32bitAllocator->getBase();
|
2018-03-12 22:24:46 +08:00
|
|
|
}
|
|
|
|
|
2019-04-16 17:47:47 +08:00
|
|
|
uint64_t DrmMemoryManager::getExternalHeapBaseAddress() {
|
2019-04-25 16:32:56 +08:00
|
|
|
return this->allocator32Bit->getBase();
|
2019-04-16 17:47:47 +08:00
|
|
|
}
|
|
|
|
|
2018-02-28 19:09:48 +08:00
|
|
|
MemoryManager::AllocationStatus DrmMemoryManager::populateOsHandles(OsHandleStorage &handleStorage) {
|
2018-10-22 20:20:05 +08:00
|
|
|
BufferObject *allocatedBos[maxFragmentsCount];
|
2018-03-27 20:01:04 +08:00
|
|
|
uint32_t numberOfBosAllocated = 0;
|
2018-10-22 20:20:05 +08:00
|
|
|
uint32_t indexesOfAllocatedBos[maxFragmentsCount];
|
2018-02-28 19:09:48 +08:00
|
|
|
|
2018-10-22 20:20:05 +08:00
|
|
|
for (unsigned int i = 0; i < maxFragmentsCount; i++) {
|
2017-12-21 07:45:38 +08:00
|
|
|
// If there is no fragment it means it already exists.
|
|
|
|
if (!handleStorage.fragmentStorageData[i].osHandleStorage && handleStorage.fragmentStorageData[i].fragmentSize) {
|
|
|
|
handleStorage.fragmentStorageData[i].osHandleStorage = new OsHandle();
|
|
|
|
handleStorage.fragmentStorageData[i].residency = new ResidencyData();
|
|
|
|
|
|
|
|
handleStorage.fragmentStorageData[i].osHandleStorage->bo = allocUserptr((uintptr_t)handleStorage.fragmentStorageData[i].cpuPtr,
|
|
|
|
handleStorage.fragmentStorageData[i].fragmentSize,
|
2019-05-06 20:20:48 +08:00
|
|
|
0);
|
2017-12-21 07:45:38 +08:00
|
|
|
if (!handleStorage.fragmentStorageData[i].osHandleStorage->bo) {
|
|
|
|
handleStorage.fragmentStorageData[i].freeTheFragment = true;
|
2018-02-28 19:09:48 +08:00
|
|
|
return AllocationStatus::Error;
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
2018-02-28 19:09:48 +08:00
|
|
|
|
2018-03-15 18:58:31 +08:00
|
|
|
allocatedBos[numberOfBosAllocated] = handleStorage.fragmentStorageData[i].osHandleStorage->bo;
|
2018-03-20 23:06:16 +08:00
|
|
|
indexesOfAllocatedBos[numberOfBosAllocated] = i;
|
2018-02-28 19:09:48 +08:00
|
|
|
numberOfBosAllocated++;
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
|
|
|
}
|
2018-02-28 19:09:48 +08:00
|
|
|
|
|
|
|
if (validateHostPtrMemory) {
|
2019-02-27 18:17:17 +08:00
|
|
|
auto &osContextLinux = static_cast<OsContextLinux &>(getDefaultCommandStreamReceiver(0)->getOsContext());
|
|
|
|
int result = pinBB->pin(allocatedBos, numberOfBosAllocated, osContextLinux.getDrmContextId());
|
2018-02-28 19:09:48 +08:00
|
|
|
|
|
|
|
if (result == EFAULT) {
|
2018-03-20 23:06:16 +08:00
|
|
|
for (uint32_t i = 0; i < numberOfBosAllocated; i++) {
|
|
|
|
handleStorage.fragmentStorageData[indexesOfAllocatedBos[i]].freeTheFragment = true;
|
|
|
|
}
|
2018-02-28 19:09:48 +08:00
|
|
|
return AllocationStatus::InvalidHostPointer;
|
|
|
|
} else if (result != 0) {
|
|
|
|
return AllocationStatus::Error;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-27 20:01:04 +08:00
|
|
|
for (uint32_t i = 0; i < numberOfBosAllocated; i++) {
|
2018-10-24 14:46:54 +08:00
|
|
|
hostPtrManager->storeFragment(handleStorage.fragmentStorageData[indexesOfAllocatedBos[i]]);
|
2018-03-27 20:01:04 +08:00
|
|
|
}
|
2018-02-28 19:09:48 +08:00
|
|
|
return AllocationStatus::Success;
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
2018-03-27 20:01:04 +08:00
|
|
|
|
2017-12-21 07:45:38 +08:00
|
|
|
void DrmMemoryManager::cleanOsHandles(OsHandleStorage &handleStorage) {
|
2018-10-22 20:20:05 +08:00
|
|
|
for (unsigned int i = 0; i < maxFragmentsCount; i++) {
|
2017-12-21 07:45:38 +08:00
|
|
|
if (handleStorage.fragmentStorageData[i].freeTheFragment) {
|
|
|
|
if (handleStorage.fragmentStorageData[i].osHandleStorage->bo) {
|
|
|
|
BufferObject *search = handleStorage.fragmentStorageData[i].osHandleStorage->bo;
|
|
|
|
search->wait(-1);
|
|
|
|
auto refCount = unreference(search, true);
|
|
|
|
DEBUG_BREAK_IF(refCount != 1u);
|
|
|
|
((void)(refCount));
|
|
|
|
}
|
|
|
|
delete handleStorage.fragmentStorageData[i].osHandleStorage;
|
2018-03-27 20:01:04 +08:00
|
|
|
handleStorage.fragmentStorageData[i].osHandleStorage = nullptr;
|
2017-12-21 07:45:38 +08:00
|
|
|
delete handleStorage.fragmentStorageData[i].residency;
|
2018-03-27 20:01:04 +08:00
|
|
|
handleStorage.fragmentStorageData[i].residency = nullptr;
|
2017-12-21 07:45:38 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
BufferObject *DrmMemoryManager::getPinBB() const {
|
|
|
|
return pinBB;
|
|
|
|
}
|
2018-02-27 06:23:43 +08:00
|
|
|
|
|
|
|
bool DrmMemoryManager::setDomainCpu(GraphicsAllocation &graphicsAllocation, bool writeEnable) {
|
|
|
|
DEBUG_BREAK_IF(writeEnable); //unsupported path (for CPU writes call SW_FINISH ioctl in unlockResource)
|
|
|
|
|
|
|
|
auto bo = static_cast<DrmAllocation *>(&graphicsAllocation)->getBO();
|
|
|
|
if (bo == nullptr)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
// move a buffer object to the CPU read, and possibly write domain, including waiting on flushes to occur
|
2018-03-20 17:49:09 +08:00
|
|
|
drm_i915_gem_set_domain set_domain = {};
|
2018-02-27 06:23:43 +08:00
|
|
|
set_domain.handle = bo->peekHandle();
|
|
|
|
set_domain.read_domains = I915_GEM_DOMAIN_CPU;
|
|
|
|
set_domain.write_domain = writeEnable ? I915_GEM_DOMAIN_CPU : 0;
|
|
|
|
|
2018-03-20 17:49:09 +08:00
|
|
|
return drm->ioctl(DRM_IOCTL_I915_GEM_SET_DOMAIN, &set_domain) == 0;
|
2018-02-27 06:23:43 +08:00
|
|
|
}
|
|
|
|
|
2019-01-24 18:51:33 +08:00
|
|
|
void *DrmMemoryManager::lockResourceImpl(GraphicsAllocation &graphicsAllocation) {
|
|
|
|
auto cpuPtr = graphicsAllocation.getUnderlyingBuffer();
|
2018-02-27 06:23:43 +08:00
|
|
|
if (cpuPtr != nullptr) {
|
2019-01-24 18:51:33 +08:00
|
|
|
auto success = setDomainCpu(graphicsAllocation, false);
|
2018-02-27 06:23:43 +08:00
|
|
|
DEBUG_BREAK_IF(!success);
|
|
|
|
(void)success;
|
|
|
|
return cpuPtr;
|
|
|
|
}
|
|
|
|
|
2019-01-24 18:51:33 +08:00
|
|
|
auto bo = static_cast<DrmAllocation &>(graphicsAllocation).getBO();
|
2018-02-27 06:23:43 +08:00
|
|
|
if (bo == nullptr)
|
|
|
|
return nullptr;
|
|
|
|
|
2018-03-20 17:49:09 +08:00
|
|
|
drm_i915_gem_mmap mmap_arg = {};
|
2018-02-27 06:23:43 +08:00
|
|
|
mmap_arg.handle = bo->peekHandle();
|
|
|
|
mmap_arg.size = bo->peekSize();
|
2018-03-20 17:49:09 +08:00
|
|
|
if (drm->ioctl(DRM_IOCTL_I915_GEM_MMAP, &mmap_arg) != 0) {
|
2018-02-27 06:23:43 +08:00
|
|
|
return nullptr;
|
2018-03-20 17:49:09 +08:00
|
|
|
}
|
2018-02-27 06:23:43 +08:00
|
|
|
|
|
|
|
bo->setLockedAddress(reinterpret_cast<void *>(mmap_arg.addr_ptr));
|
|
|
|
|
2019-01-24 18:51:33 +08:00
|
|
|
auto success = setDomainCpu(graphicsAllocation, false);
|
2018-02-27 06:23:43 +08:00
|
|
|
DEBUG_BREAK_IF(!success);
|
|
|
|
(void)success;
|
|
|
|
|
|
|
|
return bo->peekLockedAddress();
|
|
|
|
}
|
|
|
|
|
2019-01-24 18:51:33 +08:00
|
|
|
void DrmMemoryManager::unlockResourceImpl(GraphicsAllocation &graphicsAllocation) {
|
|
|
|
auto cpuPtr = graphicsAllocation.getUnderlyingBuffer();
|
2018-02-27 06:23:43 +08:00
|
|
|
if (cpuPtr != nullptr) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2019-01-24 18:51:33 +08:00
|
|
|
auto bo = static_cast<DrmAllocation &>(graphicsAllocation).getBO();
|
2018-02-27 06:23:43 +08:00
|
|
|
if (bo == nullptr)
|
|
|
|
return;
|
|
|
|
|
2019-03-13 22:31:46 +08:00
|
|
|
releaseReservedCpuAddressRange(bo->peekLockedAddress(), bo->peekSize());
|
2018-02-27 06:23:43 +08:00
|
|
|
|
|
|
|
bo->setLockedAddress(nullptr);
|
|
|
|
}
|
2019-03-13 22:31:46 +08:00
|
|
|
void *DrmMemoryManager::reserveCpuAddressRange(size_t size) {
|
|
|
|
void *reservePtr = mmapFunction(nullptr, size, PROT_NONE, MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, -1, 0);
|
|
|
|
return reservePtr;
|
|
|
|
}
|
|
|
|
|
|
|
|
void DrmMemoryManager::releaseReservedCpuAddressRange(void *reserved, size_t size) {
|
|
|
|
munmapFunction(reserved, size);
|
|
|
|
}
|
2019-03-26 18:59:46 +08:00
|
|
|
} // namespace NEO
|