cURL / Mailing Lists / curl-library / Single Mail

curl-library

Re: Problem saving to file using multi

From: Peter Roomer <proomer_at_mail.com>
Date: Wed, 01 Oct 2014 15:41:18 +0300

Thanks, that cleared up a few things. I made some changes and now it
writes to the file but I also get some error 23 on some downloads. Could
it be that the pagefile is blocked and thus creates the error? Do I need
to use 10 pagefiles for the error to go away or could I just change the
file name for each download? Any help appreciated!.The code now looks as
follows:

#include <errno.h>
#include <stdlib.h>
#include <string.h>
#ifndef WIN32
# include <unistd.h>
#endif
#include <curl/multi.h>

static const char *urls[] = {
   "http://www.microsoft.com",
   "http://www.opensource.org",
   "http://www.google.com",
   "http://www.yahoo.com",
   "http://www.ibm.com",
   "http://www.mysql.com",
   "http://www.oracle.com",
   "http://www.ripe.net",
   "http://www.iana.org",
   "http://www.amazon.com",
   "http://www.netcraft.com",
   "http://www.heise.de",
   "http://www.chip.de",
   "http://www.ca.com",
   "http://www.cnet.com",
   "http://www.news.com",
   "http://www.cnn.com",
   "http://www.wikipedia.org",
   "http://www.dell.com",
   "http://www.hp.com",
   "http://www.cert.org",
   "http://www.mit.edu",
   "http://www.nist.gov",
   "http://www.ebay.com",
   "http://www.playstation.com",
   "http://www.uefa.com",
   "http://www.ieee.org",
   "http://www.apple.com",
   "http://www.nokia.com",
   "http://www.symantec.com",
   "http://www.zdnet.com",
   "http://www.fujitsu.com",
   "http://www.supermicro.com",
   "http://www.hotmail.com",
   "http://www.ecma.com",
   "http://www.bbc.co.uk",
   "http://news.google.com",
   "http://www.foxnews.com",
   "http://www.msn.com",
   "http://www.wired.com",
   "http://www.sky.com",
   "http://www.usatoday.com",
   "http://www.cbs.com",
   "http://www.nbc.com",
   "http://slashdot.org",
   "http://www.bloglines.com",
   "http://www.techweb.com",
   "http://www.newslink.org",
   "http://www.un.org",
};

#define MAX 10 /* number of simultaneous transfers */
#define CNT sizeof(urls)/sizeof(char*) /* total number of transfers to do */

static size_t write_data(void *ptr, size_t size, size_t nmemb, void *stream)
{
   size_t written = fwrite(ptr, size, nmemb, (FILE *)stream);
   return written;
}

static void init(CURLM *cm, FILE* pagefile, int i)
{
   CURL *eh = curl_easy_init();

   /* send all data to this function */
   curl_easy_setopt(eh, CURLOPT_WRITEFUNCTION, write_data);
   curl_easy_setopt(eh, CURLOPT_HEADER, 0L);
   curl_easy_setopt(eh, CURLOPT_URL, urls[i]);
   curl_easy_setopt(eh, CURLOPT_PRIVATE, urls[i]);
   curl_easy_setopt(eh, CURLOPT_WRITEDATA, pagefile);
   curl_easy_setopt(eh, CURLOPT_VERBOSE, 0L);
   curl_easy_setopt(eh, CURLOPT_NOPROGRESS, 1L);

   curl_multi_add_handle(cm, eh);
}

int main(void)
{
   CURLM *cm;
   CURLMsg *msg;
   long L;
   unsigned int C=0;
   int M, Q, U = -1;
   fd_set R, W, E;
   struct timeval T;

   static const char *pagefilename = "page.txt";
   FILE *pagefile;

   curl_global_init(CURL_GLOBAL_ALL);

   cm = curl_multi_init();

   /* we can optionally limit the total amount of connections this multi
handle uses */
   curl_multi_setopt(cm, CURLMOPT_MAXCONNECTS, (long)MAX);

   for (C = 0; C < MAX; ++C) {
     init(cm, pagefile, C);
   }

   while (U) {
       /* open the file */
       pagefile = fopen(pagefilename, "a+");
       if (pagefile) {

         /* write the page body to this file handle */
         //curl_easy_setopt(cm, CURLOPT_WRITEDATA, pagefile);

         /* get it! */
         curl_multi_perform(cm, &U);

         /* close the header file */
         fclose(pagefile);
       } else fprintf(stderr, "Cannot open file!");

     if (U) {
       FD_ZERO(&R);
       FD_ZERO(&W);
       FD_ZERO(&E);

       if (curl_multi_fdset(cm, &R, &W, &E, &M)) {
         fprintf(stderr, "E: curl_multi_fdset\n");
         return EXIT_FAILURE;
       }

       if (curl_multi_timeout(cm, &L)) {
         fprintf(stderr, "E: curl_multi_timeout\n");
         return EXIT_FAILURE;
       }
       if (L == -1)
         L = 100;

       if (M == -1) {
#ifdef WIN32
         Sleep(L);
#else
         sleep(L / 1000);
#endif
       } else {
         T.tv_sec = L/1000;
         T.tv_usec = (L%1000)*1000;

         if (0 > select(M+1, &R, &W, &E, &T)) {
           fprintf(stderr, "E: select(%i,,,,%li): %i: %s\n", M+1, L,
errno, strerror(errno));
           return EXIT_FAILURE;
         }
       }
     }

     while ((msg = curl_multi_info_read(cm, &Q))) {
       if (msg->msg == CURLMSG_DONE) {
         char *url;
         CURL *e = msg->easy_handle;
         curl_easy_getinfo(msg->easy_handle, CURLINFO_PRIVATE, &url);
         fprintf(stderr, "R: %d - %s <%s>\n", msg->data.result,
curl_easy_strerror(msg->data.result), url);
         curl_multi_remove_handle(cm, e);
         curl_easy_cleanup(e);
       }
       else {
         fprintf(stderr, "E: CURLMsg (%d)\n", msg->msg);
       }
       if (C < CNT) {
         init(cm, pagefile, C++);
         U++; /* just to prevent it from remaining at 0 if there are more
                 URLs to get */
       }
     }
   }

   curl_multi_cleanup(cm);
   curl_global_cleanup();

   return EXIT_SUCCESS;
}
-------------------------------------------------------------------
List admin: http://cool.haxx.se/list/listinfo/curl-library
Etiquette: http://curl.haxx.se/mail/etiquette.html
Received on 2014-10-01