mirror of
https://github.com/intel/compute-runtime.git
synced 2026-01-10 07:08:04 +08:00
performance: Reuse GPU timestamps by default on Windows
Related-To: NEO-10615 Signed-off-by: Szymon Morek <szymon.morek@intel.com>
This commit is contained in:
committed by
Compute-Runtime-Automation
parent
1c98420518
commit
bca3fecaa0
@@ -390,7 +390,7 @@ TEST_F(DrmTimeTest, whenGettingMaxGpuTimeStampValueWhenForceFlagSetThenCallToKmd
|
||||
EXPECT_EQ(deviceTime->getGpuCpuTimeImplCalled, 2u);
|
||||
}
|
||||
|
||||
TEST_F(DrmTimeTest, givenReusingTimestampsDisabledWhenGetTimestampRefreshTimeoutThenReturnCorrectValue) {
|
||||
TEST_F(DrmTimeTest, givenReusingTimestampsDisabledWhenGetGpuCpuTimeThenAlwaysCallKmd) {
|
||||
DebugManagerStateRestore restore;
|
||||
debugManager.flags.EnableReusingGpuTimestamps.set(0);
|
||||
// Recreate mock to apply debug flag
|
||||
@@ -398,5 +398,11 @@ TEST_F(DrmTimeTest, givenReusingTimestampsDisabledWhenGetTimestampRefreshTimeout
|
||||
osTime = MockOSTimeLinux::create(*rootDeviceEnvironment.osInterface);
|
||||
osTime->setResolutionFunc(resolutionFuncTrue);
|
||||
osTime->setGetTimeFunc(getTimeFuncTrue);
|
||||
EXPECT_EQ(0ul, osTime->getTimestampRefreshTimeout());
|
||||
auto deviceTime = osTime->getDeviceTime();
|
||||
TimeStampData gpuCpuTime;
|
||||
osTime->getGpuCpuTime(&gpuCpuTime);
|
||||
EXPECT_EQ(deviceTime->getGpuCpuTimeImplCalled, 1u);
|
||||
|
||||
osTime->getGpuCpuTime(&gpuCpuTime);
|
||||
EXPECT_EQ(deviceTime->getGpuCpuTimeImplCalled, 2u);
|
||||
}
|
||||
|
||||
@@ -53,6 +53,8 @@ struct OSTimeWinTest : public ::testing::Test {
|
||||
rootDeviceEnvironment.osInterface = std::make_unique<OSInterface>();
|
||||
rootDeviceEnvironment.osInterface->setDriverModel(std::unique_ptr<DriverModel>(wddm));
|
||||
osTime = std::unique_ptr<MockOSTimeWin>(new MockOSTimeWin(*rootDeviceEnvironment.osInterface));
|
||||
auto hwInfo = rootDeviceEnvironment.getHardwareInfo();
|
||||
osTime->setDeviceTimerResolution(*hwInfo);
|
||||
}
|
||||
|
||||
void TearDown() override {
|
||||
@@ -62,8 +64,11 @@ struct OSTimeWinTest : public ::testing::Test {
|
||||
};
|
||||
|
||||
TEST_F(OSTimeWinTest, given36BitGpuTimeStampWhenGpuTimeStampOverflowThenGpuTimeDoesNotDecrease) {
|
||||
auto &rootDeviceEnvironment = *executionEnvironment.rootDeviceEnvironments[0];
|
||||
auto hwInfo = rootDeviceEnvironment.getHardwareInfo();
|
||||
auto deviceTime = new MockDeviceTimeWin();
|
||||
osTime->deviceTime.reset(deviceTime);
|
||||
osTime->setDeviceTimerResolution(*hwInfo);
|
||||
|
||||
TimeStampData gpuCpuTime = {0ull, 0ull};
|
||||
|
||||
@@ -95,8 +100,11 @@ TEST_F(OSTimeWinTest, given36BitGpuTimeStampWhenGpuTimeStampOverflowThenGpuTimeD
|
||||
}
|
||||
|
||||
TEST_F(OSTimeWinTest, given64BitGpuTimeStampWhenGpuTimeStampOverflowThenOverflowsAreNotDetected) {
|
||||
auto &rootDeviceEnvironment = *executionEnvironment.rootDeviceEnvironments[0];
|
||||
auto hwInfo = rootDeviceEnvironment.getHardwareInfo();
|
||||
auto deviceTime = new MockDeviceTimeWin();
|
||||
osTime->deviceTime.reset(deviceTime);
|
||||
osTime->setDeviceTimerResolution(*hwInfo);
|
||||
|
||||
TimeStampData gpuCpuTime = {0ull, 0ull};
|
||||
|
||||
@@ -183,9 +191,12 @@ TEST(OSTimeWinTests, givenOSInterfaceWhenGetGpuCpuTimeThenReturnsSuccess) {
|
||||
auto wddm = new WddmMock(rootDeviceEnvironment);
|
||||
TimeStampData gpuCpuTime01 = {};
|
||||
TimeStampData gpuCpuTime02 = {};
|
||||
std::unique_ptr<OSInterface> osInterface(new OSInterface());
|
||||
osInterface->setDriverModel(std::unique_ptr<DriverModel>(wddm));
|
||||
auto osTime = OSTime::create(osInterface.get());
|
||||
rootDeviceEnvironment.osInterface = std::make_unique<OSInterface>();
|
||||
rootDeviceEnvironment.osInterface->setDriverModel(std::unique_ptr<DriverModel>(wddm));
|
||||
wddm->init();
|
||||
auto osTime = OSTime::create(rootDeviceEnvironment.osInterface.get());
|
||||
auto hwInfo = rootDeviceEnvironment.getHardwareInfo();
|
||||
osTime->setDeviceTimerResolution(*hwInfo);
|
||||
auto success = osTime->getGpuCpuTime(&gpuCpuTime01);
|
||||
EXPECT_TRUE(success);
|
||||
EXPECT_NE(0u, gpuCpuTime01.cpuTimeinNS);
|
||||
@@ -208,8 +219,6 @@ TEST_F(OSTimeWinTest, whenGettingMaxGpuTimeStampValueThenHwInfoBasedValueIsRetur
|
||||
}
|
||||
|
||||
TEST_F(OSTimeWinTest, whenGettingMaxGpuTimeStampValueWithinIntervalThenReuseFromPreviousCall) {
|
||||
DebugManagerStateRestore restore;
|
||||
debugManager.flags.EnableReusingGpuTimestamps.set(true);
|
||||
osTime->overrideQueryPerformanceCounterFunction(queryPerformanceCounterMock);
|
||||
LARGE_INTEGER frequency = {};
|
||||
frequency.QuadPart = NSEC_PER_SEC;
|
||||
@@ -245,8 +254,6 @@ TEST_F(OSTimeWinTest, whenGettingMaxGpuTimeStampValueWithinIntervalThenReuseFrom
|
||||
}
|
||||
|
||||
TEST_F(OSTimeWinTest, whenGettingGpuTimeStampValueAfterIntervalThenCallToKmdAndAdaptTimeout) {
|
||||
DebugManagerStateRestore restore;
|
||||
debugManager.flags.EnableReusingGpuTimestamps.set(true);
|
||||
osTime->overrideQueryPerformanceCounterFunction(queryPerformanceCounterMock);
|
||||
LARGE_INTEGER frequency = {};
|
||||
frequency.QuadPart = NSEC_PER_SEC;
|
||||
@@ -304,8 +311,6 @@ TEST_F(OSTimeWinTest, whenGetGpuCpuTimeFailedThenReturnFalse) {
|
||||
}
|
||||
|
||||
TEST_F(OSTimeWinTest, whenGettingMaxGpuTimeStampValueAfterFlagSetThenCallToKmd) {
|
||||
DebugManagerStateRestore restore;
|
||||
debugManager.flags.EnableReusingGpuTimestamps.set(true);
|
||||
TimeStampData gpuCpuTime;
|
||||
auto &rootDeviceEnvironment = *executionEnvironment.rootDeviceEnvironments[0];
|
||||
auto hwInfo = rootDeviceEnvironment.getHardwareInfo();
|
||||
@@ -324,8 +329,6 @@ TEST_F(OSTimeWinTest, whenGettingMaxGpuTimeStampValueAfterFlagSetThenCallToKmd)
|
||||
}
|
||||
|
||||
TEST_F(OSTimeWinTest, whenGettingMaxGpuTimeStampValueWhenForceFlagSetThenCallToKmd) {
|
||||
DebugManagerStateRestore restore;
|
||||
debugManager.flags.EnableReusingGpuTimestamps.set(true);
|
||||
osTime->overrideQueryPerformanceCounterFunction(queryPerformanceCounterMock);
|
||||
LARGE_INTEGER frequency = {};
|
||||
frequency.QuadPart = NSEC_PER_SEC;
|
||||
@@ -349,7 +352,7 @@ TEST_F(OSTimeWinTest, whenGettingMaxGpuTimeStampValueWhenForceFlagSetThenCallToK
|
||||
EXPECT_EQ(deviceTime->getGpuCpuTimeImplCalled, 2u);
|
||||
}
|
||||
|
||||
TEST_F(OSTimeWinTest, givenReusingTimestampsDisabledWhenGetTimestampRefreshTimeoutThenReturnCorrectValue) {
|
||||
TEST_F(OSTimeWinTest, givenReusingTimestampsDisabledWhenGetGpuCpuTimeThenAlwaysCallKmd) {
|
||||
DebugManagerStateRestore restore;
|
||||
debugManager.flags.EnableReusingGpuTimestamps.set(0);
|
||||
auto &rootDeviceEnvironment = *executionEnvironment.rootDeviceEnvironments[0];
|
||||
@@ -357,5 +360,10 @@ TEST_F(OSTimeWinTest, givenReusingTimestampsDisabledWhenGetTimestampRefreshTimeo
|
||||
auto deviceTime = new MockDeviceTimeWin();
|
||||
osTime->deviceTime.reset(deviceTime);
|
||||
osTime->setDeviceTimerResolution(*hwInfo);
|
||||
EXPECT_EQ(0ul, osTime->getTimestampRefreshTimeout());
|
||||
TimeStampData gpuCpuTime;
|
||||
osTime->getGpuCpuTime(&gpuCpuTime);
|
||||
EXPECT_EQ(deviceTime->getGpuCpuTimeImplCalled, 1u);
|
||||
|
||||
osTime->getGpuCpuTime(&gpuCpuTime);
|
||||
EXPECT_EQ(deviceTime->getGpuCpuTimeImplCalled, 2u);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user