C/C++ code
FILETIME ct; FILETIME et; FILETIME kt; FILETIME ut; __int64 ktStart = 0 ; __int64 utStart = 0 ; __int64 ktStop = 0 ; __int64 utStop = 0 ; LARGE_INTEGER ss,ee,fq; HANDLE cur; int i,j,dump; cur = GetCurrentThread(); QueryPerformanceCounter( & ss); // QueryPerformanceCounter的单位应该是cpu的时间周期时间,1/cpu频率 GetThreadTimes(cur, & ct, & et, & kt, & ut); // 起始时的内核时间和用户时间。不过精度也就10ms级 ktStart = (((__int64)kt.dwHighDateTime) << 32 ) + (__int64)kt.dwLowDateTime; utStart = (((__int64)ut.dwHighDateTime) << 32 ) + (__int64)ut.dwLowDateTime; // put your code here, such as: Sleep( 10000 ); // sleep消耗的不是程序所用的时间。scanf函数等待的时间等也一样不是程序消耗的时间 for (i = 0 ;i < 10000 ;i ++ ) { dump = 0 ; for (j = 0 ;j < 10000 ;j ++ ) { dump += j * j; } } // finish GetThreadTimes(cur, & ct, & et, & kt, & ut); // 结束时的内核时间和用户时间 ktStop = (((__int64)kt.dwHighDateTime) << 32 ) + (__int64)kt.dwLowDateTime; utStop = (((__int64)ut.dwHighDateTime) << 32 ) + (__int64)ut.dwLowDateTime; printf( " Kernel Time: %llu ms/n " , (ktStop - ktStart) / 10000 ); // FILETIME的单位是100ns,除10000变ms printf( " User Time: %llu ms/n " , (utStop - utStart) / 10000 ); QueryPerformanceCounter( & ee); QueryPerformanceFrequency( & fq); // QueryPerformanceCounter高精度的,但记得并不是代码实际消耗的时间。会把sleep的那10秒也计入。 printf( " Time: %llu us/n " , (ee.QuadPart - ss.QuadPart) / (fq.QuadPart / 1000000 ));