几种语言循环1000000000次的时间

先申明这个并不是测试各种语言的好坏,顶多是和编译优化程度有关。

c语言:

#include <stdio.h>
#include <time.h>


int main(void) {


long a=0;
clock_t start, stop;

double duration;

start = clock();
for(int i=0;i<1000000000;i++){
a++;
}
stop = clock();
duration = ((double)(stop - start))/CLOCKS_PER_SEC;
printf("%f",duration);

return 0;
}

 

结果是2.9s

 

c++

 

#include <iostream>
#include <time.h>
using namespace std;

int main() {
long a=0;
clock_t start, ends;

double duration;

start = clock();
for(int i=0;i<1000000000;i++){
a++;
}
ends = clock();

cout <<"Running Time : "<<(double)(ends - start)/ CLOCKS_PER_SEC << endl;

return 0;
}

 

结果 3.0s

 

php7:

<?php


$t1 = microtime(true);
for($i=0;$i<1000000000;$i++){

}
$t2 = microtime(true);
echo '耗时'.round($t2-$t1,3).'秒';



?>

 

结果是11s

 

java 1.8:

 

import java.io.*;
import java.lang.*;
import java.util.*;
class test
{




public static void main (String[] args) throws java.lang.Exception
{
long start= System.currentTimeMillis();
long a=0;
for(long i=0;i<1000000000;i++){
a++;
}
long end= System.currentTimeMillis();

System.out.println(a);
System.out.println(end-start);
}
}

 

结果是 0.449s

 

nodejs:

 

var start = new Date().getTime();

var a=0;
for(var i=0;i<1000000000;i++){
a++;
}

var end = new Date().getTime();

console.log((end - start)+"ms") ;

 

结果是 1.17s

 

c#:

 

using System;
using System.Diagnostics;
public class Test
{
public static void Main()
{
Stopwatch sw = new Stopwatch();

long a=0;
sw.Start();

for(long i=0;i<1000000000;i++){
a++;
}

sw.Stop();
TimeSpan ts2 = sw.Elapsed;
Console.WriteLine("Stopwatch总共花费{0}ms.", ts2.TotalMilliseconds);
}
}

 

结果0.541s

 

groovy:

 

using System;
using System.Diagnostics;
public class Test
{
public static void Main()
{
Stopwatch sw = new Stopwatch();

long a=0;
sw.Start();

for(long i=0;i<1000000000;i++){
a++;
}

sw.Stop();
TimeSpan ts2 = sw.Elapsed;
Console.WriteLine("Stopwatch总共花费{0}ms.", ts2.TotalMilliseconds);
}
}

 

结果:6.2s

 

python3 :

#!/usr/bin/env python
# _*_ coding:utf-8 _*_

import time
# from numba import jit
#
#
# @jit
def test():
a=0



start=time.time()
while a < 1000000000:
a=a+1
end=time.time()

print (end-start)

print (a)

if __name__=="__main__":
test()

结果 90s

不过以上python代码去掉jit的注释使用jit编译后执行结果是 0.035s

 

posted @ 2018-05-16 18:56  奋斗的菜鸡  阅读(1166)  评论(0编辑  收藏  举报