diff options
| author | Joakim Verona | 2012-02-10 08:29:52 +0100 |
|---|---|---|
| committer | Joakim Verona | 2012-02-10 08:29:52 +0100 |
| commit | db2e8ff4fd52d6a06cef414787fd031cc26d43fa (patch) | |
| tree | c0e6fe54a70be21c9efa1f34040ce08499754e74 /lisp/url | |
| parent | 8c5c7f5afa968d06efb6788cf680d5463c389d85 (diff) | |
| parent | 667ced3a2d224b0f2ab3f2da26468791252c234a (diff) | |
| download | emacs-db2e8ff4fd52d6a06cef414787fd031cc26d43fa.tar.gz emacs-db2e8ff4fd52d6a06cef414787fd031cc26d43fa.zip | |
upstream
Diffstat (limited to 'lisp/url')
| -rw-r--r-- | lisp/url/ChangeLog | 16 | ||||
| -rw-r--r-- | lisp/url/url-cache.el | 28 | ||||
| -rw-r--r-- | lisp/url/url-queue.el | 45 | ||||
| -rw-r--r-- | lisp/url/url.el | 7 |
4 files changed, 94 insertions, 2 deletions
diff --git a/lisp/url/ChangeLog b/lisp/url/ChangeLog index 7c92fc33490..f4cca618b49 100644 --- a/lisp/url/ChangeLog +++ b/lisp/url/ChangeLog | |||
| @@ -1,3 +1,19 @@ | |||
| 1 | 2012-02-06 Lars Ingebrigtsen <larsi@gnus.org> | ||
| 2 | |||
| 3 | * url-cache.el (url-cache-prune-cache): New function. | ||
| 4 | (url-cache-prune-cache): Check that the directory exists before | ||
| 5 | trying to delete it. | ||
| 6 | |||
| 7 | * url.el (url-retrieve-number-of-calls): New variable. | ||
| 8 | (url-retrieve-internal): Use it to expire the cache once in a | ||
| 9 | while. | ||
| 10 | |||
| 11 | * url-queue.el (url-queue-setup-runners): New function that uses | ||
| 12 | `run-with-idle-timer' for extra asynchronicity. | ||
| 13 | (url-queue-remove-jobs-from-host): New function. | ||
| 14 | (url-queue-callback-function): Remove jobs from the same host if | ||
| 15 | connection failed. | ||
| 16 | |||
| 1 | 2012-01-12 Glenn Morris <rgm@gnu.org> | 17 | 2012-01-12 Glenn Morris <rgm@gnu.org> |
| 2 | 18 | ||
| 3 | * url-auth.el (url-basic-auth, url-digest-auth): | 19 | * url-auth.el (url-basic-auth, url-digest-auth): |
diff --git a/lisp/url/url-cache.el b/lisp/url/url-cache.el index 20602a2f8ef..6559de4deb7 100644 --- a/lisp/url/url-cache.el +++ b/lisp/url/url-cache.el | |||
| @@ -209,6 +209,34 @@ If `url-standalone-mode' is non-nil, cached items never expire." | |||
| 209 | (seconds-to-time (or expire-time url-cache-expire-time))) | 209 | (seconds-to-time (or expire-time url-cache-expire-time))) |
| 210 | (current-time)))))) | 210 | (current-time)))))) |
| 211 | 211 | ||
| 212 | (defun url-cache-prune-cache (&optional directory) | ||
| 213 | "Remove all expired files from the cache. | ||
| 214 | `url-cache-expire-time' says how old a file has to be to be | ||
| 215 | considered \"expired\"." | ||
| 216 | (let ((current-time (current-time)) | ||
| 217 | (total-files 0) | ||
| 218 | (deleted-files 0)) | ||
| 219 | (setq directory (or directory url-cache-directory)) | ||
| 220 | (when (file-exists-p directory) | ||
| 221 | (dolist (file (directory-files directory t)) | ||
| 222 | (unless (member (file-name-nondirectory file) '("." "..")) | ||
| 223 | (setq total-files (1+ total-files)) | ||
| 224 | (cond | ||
| 225 | ((file-directory-p file) | ||
| 226 | (when (url-cache-prune-cache file) | ||
| 227 | (setq deleted-files (1+ deleted-files)))) | ||
| 228 | ((time-less-p | ||
| 229 | (time-add | ||
| 230 | (nth 5 (file-attributes file)) | ||
| 231 | (seconds-to-time url-cache-expire-time)) | ||
| 232 | current-time) | ||
| 233 | (delete-file file) | ||
| 234 | (setq deleted-files (1+ deleted-files)))))) | ||
| 235 | (if (< deleted-files total-files) | ||
| 236 | nil | ||
| 237 | (delete-directory directory) | ||
| 238 | t)))) | ||
| 239 | |||
| 212 | (provide 'url-cache) | 240 | (provide 'url-cache) |
| 213 | 241 | ||
| 214 | ;;; url-cache.el ends here | 242 | ;;; url-cache.el ends here |
diff --git a/lisp/url/url-queue.el b/lisp/url/url-queue.el index 534c94b4d52..976a26635cd 100644 --- a/lisp/url/url-queue.el +++ b/lisp/url/url-queue.el | |||
| @@ -30,6 +30,7 @@ | |||
| 30 | 30 | ||
| 31 | (eval-when-compile (require 'cl)) | 31 | (eval-when-compile (require 'cl)) |
| 32 | (require 'browse-url) | 32 | (require 'browse-url) |
| 33 | (require 'url-parse) | ||
| 33 | 34 | ||
| 34 | (defcustom url-queue-parallel-processes 6 | 35 | (defcustom url-queue-parallel-processes 6 |
| 35 | "The number of concurrent processes." | 36 | "The number of concurrent processes." |
| @@ -49,7 +50,7 @@ | |||
| 49 | 50 | ||
| 50 | (defstruct url-queue | 51 | (defstruct url-queue |
| 51 | url callback cbargs silentp | 52 | url callback cbargs silentp |
| 52 | buffer start-time) | 53 | buffer start-time pre-triggered) |
| 53 | 54 | ||
| 54 | ;;;###autoload | 55 | ;;;###autoload |
| 55 | (defun url-queue-retrieve (url callback &optional cbargs silent) | 56 | (defun url-queue-retrieve (url callback &optional cbargs silent) |
| @@ -63,7 +64,30 @@ controls the level of parallelism via the | |||
| 63 | :callback callback | 64 | :callback callback |
| 64 | :cbargs cbargs | 65 | :cbargs cbargs |
| 65 | :silentp silent)))) | 66 | :silentp silent)))) |
| 66 | (url-queue-run-queue)) | 67 | (url-queue-setup-runners)) |
| 68 | |||
| 69 | ;; To ensure asynch behaviour, we start the required number of queue | ||
| 70 | ;; runners from `run-with-idle-timer'. So we're basically going | ||
| 71 | ;; through the queue in two ways: 1) synchronously when a program | ||
| 72 | ;; calls `url-queue-retrieve' (which will then start the required | ||
| 73 | ;; number of queue runners), and 2) at the exit of each job, which | ||
| 74 | ;; will then not start any further threads, but just reuse the | ||
| 75 | ;; previous "slot". | ||
| 76 | |||
| 77 | (defun url-queue-setup-runners () | ||
| 78 | (let ((running 0) | ||
| 79 | waiting) | ||
| 80 | (dolist (entry url-queue) | ||
| 81 | (cond | ||
| 82 | ((or (url-queue-start-time entry) | ||
| 83 | (url-queue-pre-triggered entry)) | ||
| 84 | (incf running)) | ||
| 85 | ((not waiting) | ||
| 86 | (setq waiting entry)))) | ||
| 87 | (when (and waiting | ||
| 88 | (< running url-queue-parallel-processes)) | ||
| 89 | (setf (url-queue-pre-triggered waiting) t) | ||
| 90 | (run-with-idle-timer 0.01 nil 'url-queue-run-queue)))) | ||
| 67 | 91 | ||
| 68 | (defun url-queue-run-queue () | 92 | (defun url-queue-run-queue () |
| 69 | (url-queue-prune-old-entries) | 93 | (url-queue-prune-old-entries) |
| @@ -81,10 +105,27 @@ controls the level of parallelism via the | |||
| 81 | (url-queue-start-retrieve waiting)))) | 105 | (url-queue-start-retrieve waiting)))) |
| 82 | 106 | ||
| 83 | (defun url-queue-callback-function (status job) | 107 | (defun url-queue-callback-function (status job) |
| 108 | (when (and (eq (car status) :error) | ||
| 109 | (eq (cadr (cadr status)) 'connection-failed)) | ||
| 110 | ;; If we get a connection error, then flush all other jobs from | ||
| 111 | ;; the host from the queue. This particularly makes sense if the | ||
| 112 | ;; error really is a DNS resolver issue, which happens | ||
| 113 | ;; synchronously and totally halts Emacs. | ||
| 114 | (url-queue-remove-jobs-from-host | ||
| 115 | (plist-get (nthcdr 3 (cadr status)) :host))) | ||
| 84 | (setq url-queue (delq job url-queue)) | 116 | (setq url-queue (delq job url-queue)) |
| 85 | (url-queue-run-queue) | 117 | (url-queue-run-queue) |
| 86 | (apply (url-queue-callback job) (cons status (url-queue-cbargs job)))) | 118 | (apply (url-queue-callback job) (cons status (url-queue-cbargs job)))) |
| 87 | 119 | ||
| 120 | (defun url-queue-remove-jobs-from-host (host) | ||
| 121 | (let ((jobs nil)) | ||
| 122 | (dolist (job url-queue) | ||
| 123 | (when (equal (url-host (url-generic-parse-url (url-queue-url job))) | ||
| 124 | host) | ||
| 125 | (push job jobs))) | ||
| 126 | (dolist (job jobs) | ||
| 127 | (setq url-queue (delq job url-queue))))) | ||
| 128 | |||
| 88 | (defun url-queue-start-retrieve (job) | 129 | (defun url-queue-start-retrieve (job) |
| 89 | (setf (url-queue-buffer job) | 130 | (setf (url-queue-buffer job) |
| 90 | (ignore-errors | 131 | (ignore-errors |
diff --git a/lisp/url/url.el b/lisp/url/url.el index 883e1a0c765..03b66b15232 100644 --- a/lisp/url/url.el +++ b/lisp/url/url.el | |||
| @@ -119,6 +119,9 @@ Sometimes while retrieving a URL, the URL library needs to use another buffer | |||
| 119 | than the one returned initially by `url-retrieve'. In this case, it sets this | 119 | than the one returned initially by `url-retrieve'. In this case, it sets this |
| 120 | variable in the original buffer as a forwarding pointer.") | 120 | variable in the original buffer as a forwarding pointer.") |
| 121 | 121 | ||
| 122 | (defvar url-retrieve-number-of-calls 0) | ||
| 123 | (autoload 'url-cache-prune-cache "url-cache") | ||
| 124 | |||
| 122 | ;;;###autoload | 125 | ;;;###autoload |
| 123 | (defun url-retrieve (url callback &optional cbargs silent) | 126 | (defun url-retrieve (url callback &optional cbargs silent) |
| 124 | "Retrieve URL asynchronously and call CALLBACK with CBARGS when finished. | 127 | "Retrieve URL asynchronously and call CALLBACK with CBARGS when finished. |
| @@ -174,6 +177,10 @@ If SILENT, don't message progress reports and the like." | |||
| 174 | (unless (url-type url) | 177 | (unless (url-type url) |
| 175 | (error "Bad url: %s" (url-recreate-url url))) | 178 | (error "Bad url: %s" (url-recreate-url url))) |
| 176 | (setf (url-silent url) silent) | 179 | (setf (url-silent url) silent) |
| 180 | ;; Once in a while, remove old entries from the URL cache. | ||
| 181 | (when (zerop (% url-retrieve-number-of-calls 1000)) | ||
| 182 | (url-cache-prune-cache)) | ||
| 183 | (setq url-retrieve-number-of-calls (1+ url-retrieve-number-of-calls)) | ||
| 177 | (let ((loader (url-scheme-get-property (url-type url) 'loader)) | 184 | (let ((loader (url-scheme-get-property (url-type url) 'loader)) |
| 178 | (url-using-proxy (if (url-host url) | 185 | (url-using-proxy (if (url-host url) |
| 179 | (url-find-proxy-for-url url (url-host url)))) | 186 | (url-find-proxy-for-url url (url-host url)))) |