10-at-a-time.c 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156
  1. /***************************************************************************
  2. * _ _ ____ _
  3. * Project ___| | | | _ \| |
  4. * / __| | | | |_) | |
  5. * | (__| |_| | _ <| |___
  6. * \___|\___/|_| \_\_____|
  7. *
  8. * Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
  9. *
  10. * This software is licensed as described in the file COPYING, which
  11. * you should have received as part of this distribution. The terms
  12. * are also available at https://curl.se/docs/copyright.html.
  13. *
  14. * You may opt to use, copy, modify, merge, publish, distribute and/or sell
  15. * copies of the Software, and permit persons to whom the Software is
  16. * furnished to do so, under the terms of the COPYING file.
  17. *
  18. * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
  19. * KIND, either express or implied.
  20. *
  21. * SPDX-License-Identifier: curl
  22. *
  23. ***************************************************************************/
  24. /* <DESC>
  25. * Download many files in parallel, in the same thread.
  26. * </DESC>
  27. */
  28. #include <errno.h>
  29. #include <stdlib.h>
  30. #include <string.h>
  31. #ifndef WIN32
  32. # include <unistd.h>
  33. #endif
  34. #include <curl/curl.h>
  35. static const char *urls[] = {
  36. "https://www.microsoft.com",
  37. "https://opensource.org",
  38. "https://www.google.com",
  39. "https://www.yahoo.com",
  40. "https://www.ibm.com",
  41. "https://www.mysql.com",
  42. "https://www.oracle.com",
  43. "https://www.ripe.net",
  44. "https://www.iana.org",
  45. "https://www.amazon.com",
  46. "https://www.netcraft.com",
  47. "https://www.heise.de",
  48. "https://www.chip.de",
  49. "https://www.ca.com",
  50. "https://www.cnet.com",
  51. "https://www.mozilla.org",
  52. "https://www.cnn.com",
  53. "https://www.wikipedia.org",
  54. "https://www.dell.com",
  55. "https://www.hp.com",
  56. "https://www.cert.org",
  57. "https://www.mit.edu",
  58. "https://www.nist.gov",
  59. "https://www.ebay.com",
  60. "https://www.playstation.com",
  61. "https://www.uefa.com",
  62. "https://www.ieee.org",
  63. "https://www.apple.com",
  64. "https://www.symantec.com",
  65. "https://www.zdnet.com",
  66. "https://www.fujitsu.com/global/",
  67. "https://www.supermicro.com",
  68. "https://www.hotmail.com",
  69. "https://www.ietf.org",
  70. "https://www.bbc.co.uk",
  71. "https://news.google.com",
  72. "https://www.foxnews.com",
  73. "https://www.msn.com",
  74. "https://www.wired.com",
  75. "https://www.sky.com",
  76. "https://www.usatoday.com",
  77. "https://www.cbs.com",
  78. "https://www.nbc.com/",
  79. "https://slashdot.org",
  80. "https://www.informationweek.com",
  81. "https://apache.org",
  82. "https://www.un.org",
  83. };
  84. #define MAX_PARALLEL 10 /* number of simultaneous transfers */
  85. #define NUM_URLS sizeof(urls)/sizeof(char *)
  86. static size_t write_cb(char *data, size_t n, size_t l, void *userp)
  87. {
  88. /* take care of the data here, ignored in this example */
  89. (void)data;
  90. (void)userp;
  91. return n*l;
  92. }
  93. static void add_transfer(CURLM *cm, int i, int *left)
  94. {
  95. CURL *eh = curl_easy_init();
  96. curl_easy_setopt(eh, CURLOPT_WRITEFUNCTION, write_cb);
  97. curl_easy_setopt(eh, CURLOPT_URL, urls[i]);
  98. curl_easy_setopt(eh, CURLOPT_PRIVATE, urls[i]);
  99. curl_multi_add_handle(cm, eh);
  100. (*left)++;
  101. }
  102. int main(void)
  103. {
  104. CURLM *cm;
  105. CURLMsg *msg;
  106. unsigned int transfers = 0;
  107. int msgs_left = -1;
  108. int left = 0;
  109. curl_global_init(CURL_GLOBAL_ALL);
  110. cm = curl_multi_init();
  111. /* Limit the amount of simultaneous connections curl should allow: */
  112. curl_multi_setopt(cm, CURLMOPT_MAXCONNECTS, (long)MAX_PARALLEL);
  113. for(transfers = 0; transfers < MAX_PARALLEL && transfers < NUM_URLS;
  114. transfers++)
  115. add_transfer(cm, transfers, &left);
  116. do {
  117. int still_alive = 1;
  118. curl_multi_perform(cm, &still_alive);
  119. while((msg = curl_multi_info_read(cm, &msgs_left))) {
  120. if(msg->msg == CURLMSG_DONE) {
  121. char *url;
  122. CURL *e = msg->easy_handle;
  123. curl_easy_getinfo(msg->easy_handle, CURLINFO_PRIVATE, &url);
  124. fprintf(stderr, "R: %d - %s <%s>\n",
  125. msg->data.result, curl_easy_strerror(msg->data.result), url);
  126. curl_multi_remove_handle(cm, e);
  127. curl_easy_cleanup(e);
  128. left--;
  129. }
  130. else {
  131. fprintf(stderr, "E: CURLMsg (%d)\n", msg->msg);
  132. }
  133. if(transfers < NUM_URLS)
  134. add_transfer(cm, transfers++, &left);
  135. }
  136. if(left)
  137. curl_multi_wait(cm, NULL, 0, 1000, NULL);
  138. } while(left);
  139. curl_multi_cleanup(cm);
  140. curl_global_cleanup();
  141. return EXIT_SUCCESS;
  142. }