Add function to generate uthash after write
authorStefano Babic <sbabic@denx.de>
Fri, 10 Sep 2021 20:42:33 +0000 (22:42 +0200)
committerStefano Babic <sbabic@denx.de>
Tue, 12 Oct 2021 09:11:47 +0000 (11:11 +0200)
uthash are generated only when a zck is read when index is parsed. In
case a comparison is done between a zck and a generated zck, to search
for hash both need to have a uthash database, so add function to create
the database when zck is created.

Add a function to assembly a zck with chunks from another zck (source).
The resulkting zck will contain all chunks from source when the hash
(compressed or not compressed in case the compressors are different)
matches.

Signed-off-by: Stefano Babic <sbabic@denx.de>
include/zck.h.in
src/lib/dl/dl.c
src/lib/index/index_common.c
src/lib/index/index_read.c

index 3be82ff2d44b17b212f03a369572edb83a3e231d..64e2ba1266642269fd09f40da505943c248367f5 100644 (file)
@@ -86,7 +86,8 @@ ssize_t zck_write(zckCtx *zck, const char *src, const size_t src_size)
 /* Create a chunk boundary */
 ssize_t zck_end_chunk(zckCtx *zck)
     __attribute__ ((warn_unused_result));
-
+/* Create the database for uthash if not present (done automatically by read */
+bool zck_generate_hashdb(zckCtx *zck);
 
 /*******************************************************************
  * Common functions for finishing a zchunk file
@@ -174,7 +175,8 @@ int zck_failed_chunks(zckCtx *zck)
     __attribute__ ((warn_unused_result));
 /* Reset failed chunks to become missing */
 void zck_reset_failed_chunks(zckCtx *zck);
-
+/* Find chunks from a source */
+bool zck_find_matching_chunks(zckCtx *src, zckCtx *tgt);
 
 /*******************************************************************
  * The functions should be all you need to read and write a zchunk
index 1242669302ab6a6b91ec722d2b526bbb214aec94..303b42b2ee87d53995db07151846476f8ebdaa24 100644 (file)
@@ -256,6 +256,47 @@ bool PUBLIC zck_copy_chunks(zckCtx *src, zckCtx *tgt) {
     return true;
 }
 
+bool PUBLIC zck_find_matching_chunks(zckCtx *src, zckCtx *tgt) {
+
+    if (!src || !tgt)
+        return false;
+
+    zckIndex *src_info = &(src->index);
+    zckIndex *tgt_info = &(tgt->index);
+    zckChunk *tgt_idx = tgt_info->first;
+    while(tgt_idx) {
+        zckChunk *f = NULL;
+        /*
+         * This function can be called multiple time with different
+         * zckCtx *src, and the resulting tgt will have a list with
+         * chunks from different sources. Check first if comparison ran
+         * for the chunk and it was already set as valid
+         */
+        if (tgt_idx->valid)
+            continue;
+        /*
+         * Compare digest for compressed data if the same compressor
+         * was iused
+         */
+        if (src->comp.type == tgt->comp.type) {
+            HASH_FIND(hh, src_info->ht, tgt_idx->digest, tgt_idx->digest_size, f);
+        } else if (src->has_uncompressed_source && tgt->has_uncompressed_source) {
+            HASH_FIND(hhuncomp, src_info->htuncomp, tgt_idx->digest_uncompressed, tgt_idx->digest_size, f);
+        } else {
+
+        }
+        if(f && f->length == tgt_idx->length) {
+            tgt_idx->valid = 1;
+            tgt_idx->src = f;
+        } else {
+            tgt_idx->src = tgt_idx;
+        }
+        tgt_idx = tgt_idx->next;
+    }
+
+    return true;
+}
+
 ssize_t PUBLIC zck_dl_get_bytes_downloaded(zckDL *dl) {
     ALLOCD_INT(NULL, dl);
 
index b20b713e999494fc54d4a2c6f21aab1e42dcd45f..28bb267a433d77ea5ee2fde680eee8b984679bb4 100644 (file)
@@ -48,6 +48,7 @@ void index_clean(zckIndex *index) {
         return;
 
     HASH_CLEAR(hh, index->ht);
+    HASH_CLEAR(hhuncomp, index->htuncomp);
     if(index->first) {
         zckChunk *next;
         zckChunk *tmp=index->first;
index 36357da1d40505f1af9b6c3e25512f3e90f1d2e5..db2e86e47892431136dff60524e23c82fab3ec87 100644 (file)
@@ -302,3 +302,29 @@ void PUBLIC zck_reset_failed_chunks(zckCtx *zck) {
             idx->valid = 0;
     return;
 }
+
+bool PUBLIC zck_generate_hashdb(zckCtx *zck) {
+    if (zck->index.ht || zck->index.htuncomp) {
+        zck_log(ZCK_LOG_ERROR, "Hash DB already present, it could not be created");
+        return false;
+    }
+
+    for(zckChunk *idx = zck->index.first; idx; idx=idx->next) {
+        zckChunk *tmp = NULL;
+        HASH_FIND(hh, zck->index.ht, idx->digest, idx->digest_size, tmp);
+        if(!tmp)
+            HASH_ADD_KEYPTR(hh, zck->index.ht, idx->digest, idx->digest_size,
+                            idx);
+        /*
+         * Do the same if there is uncompressed digest
+         */
+        if (zck->has_uncompressed_source && idx->digest_uncompressed) {
+            HASH_FIND(hhuncomp, zck->index.htuncomp, idx->digest_uncompressed, idx->digest_size, tmp);
+            if(!tmp)
+               HASH_ADD_KEYPTR(hhuncomp, zck->index.htuncomp, idx->digest_uncompressed, idx->digest_size,
+                               idx);
+        }
+    }
+
+    return true;
+}