Calculate runtime of a C sort algorithm

I am having a doubt about how to get the runtime only in the sort algorithm. I ran a lot on the internet and found a lot of superficial, nothing that would help me in what I need.

I have a question where I have to analyze the time that the algorithm only takes to sort an external file (in this case I am using numbers from 1-1000 random and without repetition in the test.txt). The code is running, however every time I try with a test file.txt that contains unordered values below 1000 characters, the runtime is zeroed. When a classmate talked to the teacher who was testing with 10,000 characters, he smiled and spoke to testing from 10, 20, 50, 100, 150, 200... and that's where it goes. Is there any way to count the time the algorithm takes to execute so that it works on both low inputs and larger inputs?

#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include <sys/time.h>

#define TAM 1000

void SelectionSort_1();


int main (){        

    int iCont, jCont, aux = 0, vetor[TAM];

    FILE *ent;

    ent = fopen("teste.txt", "r");

    if(ent == NULL){
        printf("Erro! Nao consegui abrir o arquivo...\n");
        exit(1);
    }

    for(iCont = 0; iCont < TAM; iCont++){
        //printf("Lendo posicao %d\n", iCont);
        if(fscanf(ent, "%d", &vetor[iCont]) == EOF){
                setbuf(stdin, NULL);
            printf("Opa, tentei ler alem do final do arquivo...\n");
            break;
        }
    }

    SelectionSort_1(vetor);


    fclose( ent );

    printf("\n\nOrdenado: \n\n");
    for(iCont = 0; iCont < TAM; iCont++){
        printf("%d ", vetor[iCont]);
    }

    printf("\n\n\n\n");

    return 0;
}

void SelectionSort_1(int vetor[]){ 
    int iCont, jCont, min, aux = 0;

struct timeval  tv1, tv2;
gettimeofday(&tv1, NULL);

    for(iCont = 0; iCont < TAM - 1; iCont++){
        min = iCont;
        for(jCont = iCont + 1; jCont < TAM; jCont++){
            if(vetor[jCont] < vetor[min]) 
                min = jCont;
        }
    if(vetor[iCont] != vetor[min]){
        aux = vetor[iCont];
        vetor[iCont] = vetor[min];
        vetor[min] = aux;
     }
  }

gettimeofday(&tv2, NULL);
printf ("Total time = %.8f seconds\n",
         (double) (tv2.tv_usec - tv1.tv_usec) / 1000000 +
         (double) (tv2.tv_sec - tv1.tv_sec));
}
Author: Jefferson Quesado, 2018-07-13

1 answers

Has the clock() function of the timer library.h, with it you get the amount of processor clocks:)

To use it is easy, first you capture the clock before starting the ordering and then just when the ordering is finished, and to get the time between them you subtract the clocks and divide by the constant CLOCKS_PER_SEC, very similar to this code.

/* clock example: frequency of primes */
#include <stdio.h>      /* printf */
#include <time.h>       /* clock_t, clock, CLOCKS_PER_SEC */
#include <math.h>       /* sqrt */

int frequency_of_primes (int n) {
  int i,j;
  int freq=n-1;
  for (i=2; i<=n; ++i) for (j=sqrt(i);j>1;--j) if (i%j==0) {--freq; break;}
  return freq;
}

int main ()
{
  clock_t t;
  int f;
  t = clock();
  printf ("Calculating...\n");
  f = frequency_of_primes (99999);
  printf ("The number of primes lower than 100,000 is: %d\n",f);
  t = clock() - t;
  printf ("It took me %d clicks (%f seconds).\n",t,((float)t)/CLOCKS_PER_SEC);
  return 0;
}
 1
Author: Luiz Filipe, 2018-07-15 14:07:41