From b74c9672c2413714a6b601ce6ef12ad7765e6fa9 Mon Sep 17 00:00:00 2001 From: Glenn Morris Date: Thu, 9 Feb 2012 22:23:47 -0500 Subject: [PATCH] Document url-queue-retrieve * doc/misc/url.texi (Retrieving URLs): Update url-retrieve arguments. Mention url-queue-retrieve. * lisp/url/url-queue.el (url-queue-retrieve): Doc fix. * etc/NEWS: Edits. --- doc/misc/ChangeLog | 5 +++++ doc/misc/url.texi | 14 +++++++++++++- etc/NEWS | 6 ++++-- lisp/url/ChangeLog | 4 ++++ lisp/url/url-queue.el | 9 +++++---- 5 files changed, 31 insertions(+), 7 deletions(-) diff --git a/doc/misc/ChangeLog b/doc/misc/ChangeLog index ff71a6857af..d287b340542 100644 --- a/doc/misc/ChangeLog +++ b/doc/misc/ChangeLog @@ -1,3 +1,8 @@ +2012-02-10 Glenn Morris + + * url.texi (Retrieving URLs): Update url-retrieve arguments. + Mention url-queue-retrieve. + 2012-02-09 Glenn Morris * sem-user.texi (Semantic mode user commands): Typo fix. diff --git a/doc/misc/url.texi b/doc/misc/url.texi index 6464fba53b4..e60aebb665b 100644 --- a/doc/misc/url.texi +++ b/doc/misc/url.texi @@ -201,13 +201,25 @@ data. @var{url} is either a string or a parsed URL structure. Return info, or mailto URLs that need no further processing). @end defun -@defun url-retrieve url callback &optional cbargs +@defun url-retrieve url callback &optional cbargs silent no-cookies Retrieve @var{url} asynchronously and call @var{callback} with args @var{cbargs} when finished. The callback is called when the object has been completely retrieved, with the current buffer containing the object and any MIME headers associated with it. @var{url} is either a string or a parsed URL structure. Returns the buffer @var{url} will load into, or @code{nil} if the process has already completed. +If the optional argument @var{silent} is non-@code{nil}, suppress +progress messages. If the optional argument @var{no-cookies} is +non-@code{nil}, do not store or send cookies. +@end defun + +@vindex url-queue-parallel-processes +@vindex url-queue-timeout +@defun url-queue-retrieve url callback &optional cbargs silent no-cookies +This acts like the @code{url-retrieve} function, but downloads in +parallel. The option @code{url-queue-parallel-processes} controls the +number of concurrent processes, and the option @code{url-queue-timeout} +sets a timeout in seconds. @end defun @node Supported URL Types diff --git a/etc/NEWS b/etc/NEWS index df79f5580db..4be250dcf9a 100644 --- a/etc/NEWS +++ b/etc/NEWS @@ -858,8 +858,9 @@ sql-list-all and sql-list-table. *** The option `ange-ftp-binary-file-name-regexp' has changed its default value to "". -** `url-queue-retrieve' downloads web pages asynchronously, but allow -controlling the degree of parallelism. ++++ +** New function, url-queue-retrieve, fetches URLs asynchronously like +url-retrieve does, but in parallel. ** VC and related modes @@ -921,6 +922,7 @@ You can get a comparable behavior with: --- *** pc-mode.el is obsolete (CUA mode is much more comprehensive). +[gnus.texi, message.texi need updating] *** pgg is obsolete (use EasyPG instead) --- diff --git a/lisp/url/ChangeLog b/lisp/url/ChangeLog index 179148a089d..d7a22b72123 100644 --- a/lisp/url/ChangeLog +++ b/lisp/url/ChangeLog @@ -1,3 +1,7 @@ +2012-02-10 Glenn Morris + + * url-queue.el (url-queue-retrieve): Doc fix. + 2012-02-08 Lars Ingebrigtsen * url-parse.el (url): Add the `use-cookies' slot to the URL struct diff --git a/lisp/url/url-queue.el b/lisp/url/url-queue.el index c9c6c4fe69e..62e5e2f84d4 100644 --- a/lisp/url/url-queue.el +++ b/lisp/url/url-queue.el @@ -56,9 +56,10 @@ ;;;###autoload (defun url-queue-retrieve (url callback &optional cbargs silent inhibit-cookies) "Retrieve URL asynchronously and call CALLBACK with CBARGS when finished. -Like `url-retrieve' (which see for details of the arguments), but -controls the level of parallelism via the -`url-queue-parallel-processes' variable." +This is like `url-retrieve' (which see for details of the arguments), +but downloads in parallel. The variable `url-queue-parallel-processes' +sets the number of concurrent processes. The variable `url-queue-timeout' +sets a timeout." (setq url-queue (append url-queue (list (make-url-queue :url url @@ -127,7 +128,7 @@ controls the level of parallelism via the (push job jobs))) (dolist (job jobs) (setq url-queue (delq job url-queue))))) - + (defun url-queue-start-retrieve (job) (setf (url-queue-buffer job) (ignore-errors -- 2.39.2