diff options
| author | Lars Ingebrigtsen | 2012-02-06 02:13:24 +0100 |
|---|---|---|
| committer | Lars Ingebrigtsen | 2012-02-06 02:13:24 +0100 |
| commit | b6ea20f39c292cb135656f6b014e087f25eaf682 (patch) | |
| tree | df0a04d974b63bd219b63f506698ab3c6f01fc04 | |
| parent | e7bc51d012a620268da85763e8bc233a1132ff3b (diff) | |
| download | emacs-b6ea20f39c292cb135656f6b014e087f25eaf682.tar.gz emacs-b6ea20f39c292cb135656f6b014e087f25eaf682.zip | |
Try to mitigate DNS failures when downloading stuff asynchronously
* url-queue.el (url-queue-setup-runners): New function that uses
`run-with-idle-timer' for extra asynchronicity.
(url-queue-remove-jobs-from-host): New function.
(url-queue-callback-function): Remove jobs from the same host if
connection failed.
| -rw-r--r-- | lisp/url/ChangeLog | 8 | ||||
| -rw-r--r-- | lisp/url/url-queue.el | 45 |
2 files changed, 51 insertions, 2 deletions
diff --git a/lisp/url/ChangeLog b/lisp/url/ChangeLog index 7c92fc33490..9285961fb32 100644 --- a/lisp/url/ChangeLog +++ b/lisp/url/ChangeLog | |||
| @@ -1,3 +1,11 @@ | |||
| 1 | 2012-02-06 Lars Ingebrigtsen <larsi@gnus.org> | ||
| 2 | |||
| 3 | * url-queue.el (url-queue-setup-runners): New function that uses | ||
| 4 | `run-with-idle-timer' for extra asynchronicity. | ||
| 5 | (url-queue-remove-jobs-from-host): New function. | ||
| 6 | (url-queue-callback-function): Remove jobs from the same host if | ||
| 7 | connection failed. | ||
| 8 | |||
| 1 | 2012-01-12 Glenn Morris <rgm@gnu.org> | 9 | 2012-01-12 Glenn Morris <rgm@gnu.org> |
| 2 | 10 | ||
| 3 | * url-auth.el (url-basic-auth, url-digest-auth): | 11 | * url-auth.el (url-basic-auth, url-digest-auth): |
diff --git a/lisp/url/url-queue.el b/lisp/url/url-queue.el index 534c94b4d52..976a26635cd 100644 --- a/lisp/url/url-queue.el +++ b/lisp/url/url-queue.el | |||
| @@ -30,6 +30,7 @@ | |||
| 30 | 30 | ||
| 31 | (eval-when-compile (require 'cl)) | 31 | (eval-when-compile (require 'cl)) |
| 32 | (require 'browse-url) | 32 | (require 'browse-url) |
| 33 | (require 'url-parse) | ||
| 33 | 34 | ||
| 34 | (defcustom url-queue-parallel-processes 6 | 35 | (defcustom url-queue-parallel-processes 6 |
| 35 | "The number of concurrent processes." | 36 | "The number of concurrent processes." |
| @@ -49,7 +50,7 @@ | |||
| 49 | 50 | ||
| 50 | (defstruct url-queue | 51 | (defstruct url-queue |
| 51 | url callback cbargs silentp | 52 | url callback cbargs silentp |
| 52 | buffer start-time) | 53 | buffer start-time pre-triggered) |
| 53 | 54 | ||
| 54 | ;;;###autoload | 55 | ;;;###autoload |
| 55 | (defun url-queue-retrieve (url callback &optional cbargs silent) | 56 | (defun url-queue-retrieve (url callback &optional cbargs silent) |
| @@ -63,7 +64,30 @@ controls the level of parallelism via the | |||
| 63 | :callback callback | 64 | :callback callback |
| 64 | :cbargs cbargs | 65 | :cbargs cbargs |
| 65 | :silentp silent)))) | 66 | :silentp silent)))) |
| 66 | (url-queue-run-queue)) | 67 | (url-queue-setup-runners)) |
| 68 | |||
| 69 | ;; To ensure asynch behaviour, we start the required number of queue | ||
| 70 | ;; runners from `run-with-idle-timer'. So we're basically going | ||
| 71 | ;; through the queue in two ways: 1) synchronously when a program | ||
| 72 | ;; calls `url-queue-retrieve' (which will then start the required | ||
| 73 | ;; number of queue runners), and 2) at the exit of each job, which | ||
| 74 | ;; will then not start any further threads, but just reuse the | ||
| 75 | ;; previous "slot". | ||
| 76 | |||
| 77 | (defun url-queue-setup-runners () | ||
| 78 | (let ((running 0) | ||
| 79 | waiting) | ||
| 80 | (dolist (entry url-queue) | ||
| 81 | (cond | ||
| 82 | ((or (url-queue-start-time entry) | ||
| 83 | (url-queue-pre-triggered entry)) | ||
| 84 | (incf running)) | ||
| 85 | ((not waiting) | ||
| 86 | (setq waiting entry)))) | ||
| 87 | (when (and waiting | ||
| 88 | (< running url-queue-parallel-processes)) | ||
| 89 | (setf (url-queue-pre-triggered waiting) t) | ||
| 90 | (run-with-idle-timer 0.01 nil 'url-queue-run-queue)))) | ||
| 67 | 91 | ||
| 68 | (defun url-queue-run-queue () | 92 | (defun url-queue-run-queue () |
| 69 | (url-queue-prune-old-entries) | 93 | (url-queue-prune-old-entries) |
| @@ -81,10 +105,27 @@ controls the level of parallelism via the | |||
| 81 | (url-queue-start-retrieve waiting)))) | 105 | (url-queue-start-retrieve waiting)))) |
| 82 | 106 | ||
| 83 | (defun url-queue-callback-function (status job) | 107 | (defun url-queue-callback-function (status job) |
| 108 | (when (and (eq (car status) :error) | ||
| 109 | (eq (cadr (cadr status)) 'connection-failed)) | ||
| 110 | ;; If we get a connection error, then flush all other jobs from | ||
| 111 | ;; the host from the queue. This particularly makes sense if the | ||
| 112 | ;; error really is a DNS resolver issue, which happens | ||
| 113 | ;; synchronously and totally halts Emacs. | ||
| 114 | (url-queue-remove-jobs-from-host | ||
| 115 | (plist-get (nthcdr 3 (cadr status)) :host))) | ||
| 84 | (setq url-queue (delq job url-queue)) | 116 | (setq url-queue (delq job url-queue)) |
| 85 | (url-queue-run-queue) | 117 | (url-queue-run-queue) |
| 86 | (apply (url-queue-callback job) (cons status (url-queue-cbargs job)))) | 118 | (apply (url-queue-callback job) (cons status (url-queue-cbargs job)))) |
| 87 | 119 | ||
| 120 | (defun url-queue-remove-jobs-from-host (host) | ||
| 121 | (let ((jobs nil)) | ||
| 122 | (dolist (job url-queue) | ||
| 123 | (when (equal (url-host (url-generic-parse-url (url-queue-url job))) | ||
| 124 | host) | ||
| 125 | (push job jobs))) | ||
| 126 | (dolist (job jobs) | ||
| 127 | (setq url-queue (delq job url-queue))))) | ||
| 128 | |||
| 88 | (defun url-queue-start-retrieve (job) | 129 | (defun url-queue-start-retrieve (job) |
| 89 | (setf (url-queue-buffer job) | 130 | (setf (url-queue-buffer job) |
| 90 | (ignore-errors | 131 | (ignore-errors |