Feat: run benchmarks on internal tests

This commit is contained in:
Dorian Zedler 2022-07-07 17:45:33 +02:00
parent 44bff07e02
commit 2d255e851a
Signed by: dorian
GPG key ID: 989DE36109AFA354
3 changed files with 64 additions and 40 deletions

View file

@ -20,8 +20,9 @@ enum argumentParseResult {
struct configuration {
char* filename;
int implementationToUse;
int doBenchmark;
bool doBenchmark;
int benchmarkingCycles;
bool runTests;
};
/**

View file

@ -18,9 +18,12 @@ int stringToInt(char *string) {
}
void help(char *progname) {
fprintf(
stderr,
"usage: %s [-V implementation] [-B [repetitons]] [-h] [--help] file\n",
fprintf(stdout,
"Usage: %s [options...] <file>\n\
-V, --version <version> use a specific implementation\n\
-B, --benchmark [repetitons] benchmark the execution and optionally repeat it\n\
-T, --test run self-test. If set, no file is needed\n\
-h, --help print this help page\n",
progname);
}
@ -31,30 +34,34 @@ enum argumentParseResult parseArguments(int argc, char **argv,
}
c->implementationToUse = 0;
c->doBenchmark = 0;
c->doBenchmark = false;
c->benchmarkingCycles = 1;
c->runTests = false;
int opt;
while (1) {
static struct option longOptions[] = {{"help", no_argument, NULL, 'h'},
static struct option longOptions[] = {
{"version", required_argument, NULL, 'V'},
{"benchmark", optional_argument, NULL, 'B'},
{"test", optional_argument, NULL, 'T'},
{"help", no_argument, NULL, 'h'},
{NULL}};
int longOptionIndex = 0;
opt = getopt_long(argc, argv, "V:B::h", longOptions, &longOptionIndex);
opt = getopt_long(argc, argv, "V:B::Th", longOptions, &longOptionIndex);
if (-1 == opt) break;
switch (opt) {
case 'B':
c->doBenchmark = 1;
c->doBenchmark = true;
if (optarg != NULL) {
c->benchmarkingCycles = stringToInt(optarg);
if (errno == 0) break;
fprintf(stderr,
"%s: invalid argument, has to be int -- 'B' got '%s'\n",
argv[0], optarg);
help(argv[0]);
return RESULT_EXIT_FAILURE;
}
break;
@ -63,24 +70,25 @@ enum argumentParseResult parseArguments(int argc, char **argv,
if (errno == 0) break;
fprintf(stderr, "%s: invalid argument, has to be int -- 'V' got '%s'\n",
argv[0], optarg);
help(argv[0]);
return RESULT_EXIT_FAILURE;
case 'T':
c->runTests = true;
break;
case 'h':
help(argv[0]);
return RESULT_EXIT_SUCCESS;
default:
help(argv[0]);
return RESULT_EXIT_FAILURE;
}
}
if (argc > optind) {
c->filename = argv[optind];
} else {
} else if (!c->runTests) {
fprintf(stderr, "%s: missing poisional argument -- 'file'\n", argv[0]);
help(argv[0]);
return RESULT_EXIT_FAILURE;
}

View file

@ -5,37 +5,51 @@
#include "../lib/md2.h"
// Returns true when val is approx. equal to exp.
static bool runTest(const char* message, const char* expectedHash) {
static bool runTest(struct configuration* c, const char* message,
const char* expectedHash) {
uint8_t out[16];
double duration =
run_benchmark(1, md2_hash, strlen(message), (uint8_t*)message, out);
double duration = 0.0;
if (c->doBenchmark) {
duration = run_benchmark(c->benchmarkingCycles, md2_hash, strlen(message),
(uint8_t*)message, out);
} else {
md2_hash(strlen(message), (uint8_t*)message, out);
}
char hash[32];
md2_encode_hash(out, hash);
bool ok = !strncmp(hash, expectedHash, 32);
printf("%s: md2(%s) %s == %s, took: %f\n", "not ok" + (4 * ok), message, hash,
expectedHash, duration);
printf("%s: md2(%s) %s == %s", "not ok" + (4 * ok), message, hash,
expectedHash);
if (c->doBenchmark) {
printf("took: %f", duration);
}
printf("\n");
return ok;
}
unsigned runTests(void) {
unsigned runTests(struct configuration* c) {
unsigned failed = 0;
// src: https://datatracker.ietf.org/doc/html/rfc1319#appendix-A.5
failed += !runTest("", "8350e5a3e24c153df2275c9f80692773");
failed += !runTest("a", "32ec01ec4a6dac72c0ab96fb34c0b5d1");
failed += !runTest("abc", "da853b0d3f88d99b30283a69e6ded6bb");
failed += !runTest("message digest", "ab4f496bfb2a530b219ff33031fe06b0");
failed += !runTest("jebdjcslfhwfdig", "e1b69085c6f6e36cb8fe8d98ed3f2c35");
failed += !runTest("0123456789abcde", "d95629645108a20ab4d70e8545e0723b");
failed += !runTest("0123456789abcdef", "12c8dfa285f14e1af8c5254e7092d0d3");
failed += !runTest("0123456789abcdefg", "e4d0efded5ef7b6843a5ba47e1171347");
failed += !runTest("abcdefghijklmnopqrstuvwxyz",
"4e8ddff3650292ab5a4108c3aa47940b");
failed += !runTest(c, "", "8350e5a3e24c153df2275c9f80692773");
failed += !runTest(c, "a", "32ec01ec4a6dac72c0ab96fb34c0b5d1");
failed += !runTest(c, "abc", "da853b0d3f88d99b30283a69e6ded6bb");
failed += !runTest(c, "message digest", "ab4f496bfb2a530b219ff33031fe06b0");
failed += !runTest(c, "jebdjcslfhwfdig", "e1b69085c6f6e36cb8fe8d98ed3f2c35");
failed += !runTest(c, "0123456789abcde", "d95629645108a20ab4d70e8545e0723b");
failed += !runTest(c, "0123456789abcdef", "12c8dfa285f14e1af8c5254e7092d0d3");
failed +=
!runTest("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
!runTest(c, "0123456789abcdefg", "e4d0efded5ef7b6843a5ba47e1171347");
failed += !runTest(c, "abcdefghijklmnopqrstuvwxyz",
"4e8ddff3650292ab5a4108c3aa47940b");
failed += !runTest(
c, "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
"da33def2a42df13975352846c30338cd");
failed += !runTest(
c,
"123456789012345678901234567890123456789012345678901234567890123456789012"
"34567890",
"d5976f79d83d3a0dc9806c3c66f3efd8");
@ -69,12 +83,16 @@ int main(int argc, char** argv) {
}
printf(
"Hashing file: %s\nUsing implementation: %d, doing benchmark: %d, "
"Using implementation: %d, doing benchmark: %d, "
"benchmark cycles: %d\n",
c.filename, c.implementationToUse, c.doBenchmark, c.benchmarkingCycles);
c.implementationToUse, c.doBenchmark, c.benchmarkingCycles);
// runTests();
// return 0;
if (c.runTests) {
printf("Running tests...\n\n");
return runTests(&c);
}
printf("Hashing file %s...\n\n", c.filename);
size_t len;
uint8_t* data = read_file(c.filename, &len);
if (data == NULL) {
@ -82,9 +100,6 @@ int main(int argc, char** argv) {
return EXIT_FAILURE;
}
printf("File read with size: %zu\n", len);
printf("\n");
uint8_t out[16];
char hash[32];
if (c.doBenchmark) {