From ba1c55b3d101581710d4b7c0671bf972ddf8ea2a Mon Sep 17 00:00:00 2001 From: Nick White Date: Fri, 2 May 2008 14:32:27 +0000 Subject: Fixed resume bug, improved download output Fixed bug in resume which caused forced redownloads to create invalid pdfs Changed download output to ensure one line per download (rather than a separate one for progress) Changed download output to display name of file rather than url downloading Removed obselete testfile variable Changed variable name of filename->filepath git-archimport-id: getht@sv.gnu.org/getht--mainline--0.1--patch-46 --- src/download.c | 26 ++++++++++++++------------ src/getht.c | 2 +- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/src/download.c b/src/download.c index 77e65bd..b8fd086 100644 --- a/src/download.c +++ b/src/download.c @@ -43,17 +43,18 @@ extern char proxy_user[STR_MAX]; extern char proxy_pass[STR_MAX]; extern CURL *main_curl_handle; -int save_file(CURL *curl_handle, char *uri, char *filepath, long resume_offset) +int save_file(CURL *curl_handle, char *uri, char *filepath, char *filetitle, long resume_offset) /* Save the file *uri to *filepath */ { - printf("Downloading %s\n",uri); + printf("Downloading %s ",filetitle); + fflush(stdout); if(!curl_handle) curl_handle = main_curl_handle; if(curl_handle) { FILE *file; - file = fopen(filepath, "a"); + file = fopen(filepath, resume_offset?"a":"w"); if(!file) { fprintf(stderr,"Error: cannot open file %s for writing.\n",filepath); @@ -100,7 +101,7 @@ int save_file(CURL *curl_handle, char *uri, char *filepath, long resume_offset) fclose(file); - printf("\n"); + printf("\rDownloaded %s \n",filetitle); } else { fprintf(stderr,"Error: curl failed to initialise.\n"); @@ -126,7 +127,7 @@ int update_progress(void *data, double dltotal, double dlnow, else frac = 0; - printf("\rDownload progress: %3.0lf%% ", frac); + printf("\b\b\b\b\b\b\b: %3.0lf%% ", frac); fflush(stdout); return 0; @@ -185,12 +186,12 @@ void downloadissue(CURL *curl_handle, char * directory, iss * issue, int force) sec * cur_section; char newdir[STR_MAX]; char filename[STR_MAX]; - FILE * testfile; + char filepath[STR_MAX]; snprintf(newdir,STR_MAX,"%s/%i_%i-%i",directory, issue->date.year,issue->date.firstmonth,issue->date.lastmonth); - printf("Downloading issue entitled '%s' to '%s'\n",issue->title,newdir); + printf("Downloading %s to %s\n",issue->title, newdir); if(!opendir(newdir)) if(mkdir(newdir, S_IRWXU | S_IRGRP | S_IXGRP | S_IROTH | S_IXOTH)) @@ -204,12 +205,13 @@ void downloadissue(CURL *curl_handle, char * directory, iss * issue, int force) { cur_section = issue->section[count]; - snprintf(filename,STR_MAX,"%s/section_%i.pdf", newdir, cur_section->number); + snprintf(filename,STR_MAX,"section_%i.pdf", cur_section->number); + snprintf(filepath,STR_MAX,"%s/%s", newdir, filename); if(!force){ struct stat fileinfo; /* see if local file exists */ - if(stat(filename, &fileinfo)) - save_file(curl_handle, cur_section->uri, filename, 0); + if(stat(filepath, &fileinfo)) + save_file(curl_handle, cur_section->uri, filepath, filename, 0); else { /* get size of local file */ @@ -222,12 +224,12 @@ void downloadissue(CURL *curl_handle, char * directory, iss * issue, int force) /* if size of local file != size of remote file, resume */ if(remotesize > 0 && localsize < remotesize) - save_file(curl_handle, cur_section->uri, filename, localsize); + save_file(curl_handle, cur_section->uri, filepath, filename, localsize); else printf("Skipping download of completed section %i\n", cur_section->number); } } else - save_file(curl_handle, cur_section->uri, filename, 0); + save_file(curl_handle, cur_section->uri, filepath, filename, 0); } } diff --git a/src/getht.c b/src/getht.c index 5502552..63637bf 100644 --- a/src/getht.c +++ b/src/getht.c @@ -203,7 +203,7 @@ int main(int argc, char *argv[]) int update_contents_files() /* Returns 0 on success, 1 on failure */ { - if(save_file(NULL, issue_uri, issue_xml, 0)) + if(save_file(NULL, issue_uri, issue_xml, "contents", 0)) return 1; else return 0; -- cgit v1.2.3