Reduce the size of hashmap_nodes.h file
Instead of keeping the whole array of test_node_t objects, just keep the
hashvalues and generated the rest of the test_node_t on the fly. The
test still works this way and the file size has been reduced from 2M to
90k.
(cherry picked from commit 2310c322c0)
This commit is contained in:
committed by
Nicki Křížek
parent
4f01e8d33c
commit
95faca929e
File diff suppressed because it is too large
Load Diff
@@ -233,17 +233,15 @@ test_hashmap_iterator(bool random_data) {
|
||||
isc_hashmap_create(mctx, HASHMAP_MIN_BITS, &hashmap);
|
||||
assert_non_null(hashmap);
|
||||
|
||||
if (random_data) {
|
||||
for (size_t i = 0; i < count; i++) {
|
||||
/* short keys */
|
||||
snprintf((char *)nodes[i].key, 16, "%u",
|
||||
(unsigned int)i);
|
||||
strlcat((char *)nodes[i].key, " key of a raw hashmap!!",
|
||||
16);
|
||||
for (size_t i = 0; i < count; i++) {
|
||||
/* short keys */
|
||||
snprintf((char *)nodes[i].key, 16, "%u", (unsigned int)i);
|
||||
strlcat((char *)nodes[i].key, " key of a raw hashmap!!", 16);
|
||||
if (random_data) {
|
||||
nodes[i].hashval = isc_hash32(nodes[i].key, 16, true);
|
||||
} else {
|
||||
nodes[i].hashval = test_hashvals[i];
|
||||
}
|
||||
} else {
|
||||
memmove(nodes, test_nodes, sizeof(test_nodes));
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < count; i++) {
|
||||
|
||||
Reference in New Issue
Block a user