testGetPerformance: Gbit is 10^9 and not 2^30
This commit is contained in:
@@ -203,7 +203,7 @@ public:
|
||||
duration = seconds + nseconds/1000000.0;
|
||||
|
||||
double getPerSec = iterations*channels/duration;
|
||||
double gbit = getPerSec*arraySize*sizeof(double)*8/(1024*1024*1024); // * bits / giga
|
||||
double gbit = getPerSec*arraySize*sizeof(double)*8/(1000*1000*1000); // * bits / giga; NO, it's really 1000 and not 102:
|
||||
if (verbose)
|
||||
printf("%5.6f seconds, %.3f (x %d = %.3f) gets/s, data throughput %5.3f Gbits/s\n",
|
||||
duration, iterations/duration, channels, getPerSec, gbit);
|
||||
|
||||
Reference in New Issue
Block a user