#include<time.h>
int main(){
clock_t begin_time, end_clock;
begin_time = clock();
end_clock = clock();
cout << "Running time is: " << static_cast<double>(end_clock - begin_time) / CLOCKS_PER_SEC << "s" << endl;//输出运行时间为秒
cout << "Running time is: " << static_cast<double>(end_clock - begin_time) / CLOCKS_PER_SEC*1000 << "ms" << endl;//输出运行时间为毫秒
}
#include <iostream>
#include <time.h>
using namespace std;
int main() {
time_t tt;
time( &tt );
tt = tt + 8*3600; // transform the time zone
tm* t= gmtime( &tt );
cout << tt << endl;
printf("%d-%02d-%02d %02d:%02d:%02d\n",
t->tm_year + 1900,
t->tm_mon + 1,
t->tm_mday,
t->tm_hour,
t->tm_min,
t->tm_sec);
return 0;
}
linux
#include <sys/time.h>
void f()
{
//...
}
int main()
{
struct timeval t1, t2;
gettimeofday(&t1, NULL);
f();
gettimeofday(&t2, NULL);
//那么函数f运行所花的时间为
//deltaT = (t2.tv_sec-t1.tv_sec) * 1000000 + t2.tv_usec-t1.tv_usec 微秒
return 0;
}
linux
#include <time.h>
void f()
{
//...
}
int main()
{
timespec t1, t2;
clock_gettime(CLOCK_MONOTONIC, &t1);
f();
clock_gettime(CLOCK_MONOTONIC, &t2);
//那么f所花时间为
//deltaT = (t2.tv_sec - t1.tv_sec) * 10^9 + t2.tv_nsec - t1.tv_nsec 纳秒
return 0;
}