performance: Reuse GPU timestamp instead of KMD escape

This can be enabled only if related
debug flag will be set.

Related-To: NEO-10615

Signed-off-by: Szymon Morek <szymon.morek@intel.com>
This commit is contained in:
Szymon Morek
2024-04-30 10:59:04 +00:00
committed by Compute-Runtime-Automation
parent c1004b77bf
commit 83e8ae4a20
17 changed files with 601 additions and 74 deletions

View File

@@ -50,11 +50,11 @@ const char *MockDevice::getProductAbbrev() const {
MockDevice::MockDevice(ExecutionEnvironment *executionEnvironment, uint32_t rootDeviceIndex)
: RootDevice(executionEnvironment, rootDeviceIndex) {
UltDeviceFactory::initializeMemoryManager(*executionEnvironment);
auto &hwInfo = getHardwareInfo();
if (!getOSTime()) {
getRootDeviceEnvironmentRef().osTime = MockOSTime::create();
getRootDeviceEnvironmentRef().osTime->setDeviceTimerResolution(hwInfo);
}
auto &hwInfo = getHardwareInfo();
executionEnvironment->rootDeviceEnvironments[rootDeviceIndex]->setHwInfoAndInitHelpers(&hwInfo);
executionEnvironment->rootDeviceEnvironments[rootDeviceIndex]->initGmm();
if (!executionEnvironment->rootDeviceEnvironments[rootDeviceIndex]->memoryOperationsInterface) {