diff options
| author | mzuenni <mzuenni@users.noreply.github.com> | 2024-07-28 22:54:40 +0200 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2024-07-28 22:54:40 +0200 |
| commit | 8d11c6c8213f46f0fa19826917c255edd5d43cb1 (patch) | |
| tree | 96d75baff33d5a04b5a60f1a41f514a26c716874 /test/graph/havelHakimi.cpp | |
| parent | 8c33b4e0d3030cfed17fc64b4fe41133339f6d87 (diff) | |
Test (#4)
* update
* moved content in subdir
* rename file
* add test setup
* add test setup
* add github action
* automaticly test all cpp files
* timeout after 10s
* setulimit and dont zero memory
* test build pdf
* install latexmk
* update
* update
* ngerman
* fonts
* removed old code
* add first test
* added tests
* test in sorted order
* more tests
* simplified test
* more tests
* fix suffix tree
* fixes and improvements
* done ust lst directly
* fix swap
* add links to pdf
* fix constants
* add primorial
* add comment
* various improvements
* more tests
* added missing stuf
* more tests
* fix tests
* more tests
* more tests
* more tests
* fix recursion?
* test trie
* more tests
* only use python temporarily for listings
* only use python temporarily for listings
* more tests
* fix longestCommonSubstring
* more tests
* more tests
* made code more similiar
* fix?
* more tests
* more tests
* more tests
* add ahoCorasick test + limit 4GB stack size
* more tests
* fix test
* add additional test
* more tests
* more tests
* fix?
* better fix
* fix virtual tree
* more tests
* more tests
* recursive closest pair
* more tests
* decrease limit
* new tests
* more tests
* fix name
* more tests
* add test
* new test
* more tests
* more tests
* more tests
* more tests
* new test and content
* new code
* new code
* larger tests
* fix and test
* new test
* new test
* update pdf
* remove comments
* new test
* more tests
* more testcases
* more tests
* increased limit
* more tests
* more tests
* more tests
* new tests
* more tests
* shortened code
* new test
* add basic tests for bigint
* more tests
* removed old files
* new test
* ignore some files
* more auto more ccw
* fix test
* more tests
* fix
* new tests
* more tests
* more tests
* stronger test
* actually verify delaunay...
* more tests
* fix header
* more tests
* run tests parallel?
* test parralel?
* add --missing
* separate workflows
* test
* is the pdf checked?
* separate workflows
* fix workflow
* more workflows
---------
Co-authored-by: Yidi <noob999noob999@gmail.com>
Diffstat (limited to 'test/graph/havelHakimi.cpp')
| -rw-r--r-- | test/graph/havelHakimi.cpp | 65 |
1 files changed, 65 insertions, 0 deletions
diff --git a/test/graph/havelHakimi.cpp b/test/graph/havelHakimi.cpp new file mode 100644 index 0000000..71476ec --- /dev/null +++ b/test/graph/havelHakimi.cpp @@ -0,0 +1,65 @@ +#include "../util.h" +#include <graph/havelHakimi.cpp> + +void stress_test() { + ll queries = 0; + for (int tries = 0; tries < 200'000; tries++) { + int n = Random::integer<int>(1, 30); + int m = Random::integer<int>(0, n*(n-1) / 2 + 1); + Graph g(n); + g.erdosRenyi(m); + + vector<int> expected(n); + for (int i = 0; i < n; i++) expected[i] = g.deg(i); + + auto res = havelHakimi(expected); + if (sz(res) != n) cerr << "error: wrong number of nodes" << FAIL; + vector<vector<int>> rev(n); + vector<int> got(n); + for (int i = 0; i < n; i++) { + got[i] = sz(res[i]); + for (int j : res[i]) { + if (j < 0 || j >= n) cerr << "error: invalid edge" << FAIL; + rev[j].push_back(i); + } + } + + for (int i = 0; i < n; i++) { + sort(all(res[i])); + sort(all(rev[i])); + if (res[i] != rev[i]) cerr << "error: graph is directed" << FAIL; + for (int j : res[i]) if (j == i) cerr << "error: graph has loop" << FAIL; + for (int j = 1; j < sz(res[i]); j++) { + if (res[i][j] == res[i][j-1]) cerr << "error: multiedge" << FAIL; + } + } + + if (expected != got) cerr << "error" << FAIL; + queries += n; + } + cerr << "tested random queries: " << queries << endl; +} + +constexpr int N = 200'000; +constexpr int M = 1'000'000; +void performance_test() { + timer t; + Graph g(N); + g.erdosRenyi(M); + + vector<int> expected(N); + for (int i = 0; i < N; i++) expected[i] = g.deg(i); + + t.start(); + auto res = havelHakimi(expected); + t.stop(); + hash_t hash = 0; + for (auto& v : res) hash += sz(v); + if (t.time > 500) cerr << "too slow: " << t.time << FAIL; + cerr << "tested performance: " << t.time << "ms (hash: " << hash << ")" << endl; +} + +int main() { + stress_test(); + performance_test(); +} |
