From: Wayne Davison Date: Fri, 30 Oct 2009 00:35:50 +0000 (-0700) Subject: Fix a hang when dealing with really large numbers of files X-Git-Url: https://mattmccutchen.net/rsync/rsync.git/commitdiff_plain/fe16d9a67db9aeaa424bd35976eefa2a11861a3b Fix a hang when dealing with really large numbers of files in an incremental recursion scan. --- diff --git a/flist.c b/flist.c index 70abe611..ac0a903f 100644 --- a/flist.c +++ b/flist.c @@ -2349,6 +2349,7 @@ struct file_list *send_file_list(int f, int argc, char *argv[]) flist->sorted = flist->files; flist_sort_and_clean(flist, 0); file_total += flist->used; + file_old_total += flist->used; if (numeric_ids <= 0 && !inc_recurse) send_id_list(f); diff --git a/sender.c b/sender.c index 98612965..7aadfcb6 100644 --- a/sender.c +++ b/sender.c @@ -42,6 +42,7 @@ extern int make_backups; extern int inplace; extern int batch_fd; extern int write_batch; +extern int file_old_total; extern struct stats stats; extern struct file_list *cur_flist, *first_flist, *dir_flist; @@ -197,8 +198,11 @@ void send_files(int f_in, int f_out) end_progress(0); } if (inc_recurse && first_flist) { + file_old_total -= first_flist->used; flist_free(first_flist); if (first_flist) { + if (first_flist == cur_flist) + file_old_total = cur_flist->used; write_ndx(f_out, NDX_DONE); continue; }