+2001-12-09 Katsumi Yamaoka <yamaoka@jpl.org>
+
+ * lisp/nnheader.el (mm-multibyte-string-p): Alias to
+ `multibyte-string-p' or `ignore'.
+
2001-12-09 TSUCHIYA Masatoshi <tsuchiya@namazu.org>
* lisp/gnus-namazu.el (gnus-namazu-summary-buffer-name): New
+2001-12-09 08:00:00 ShengHuo ZHU <zsh@cs.rochester.edu>
+
+ * gnus-sum.el (gnus-summary-buffer-name): Decode group name.
+ * gnus-group.el (gnus-group-name-decode): Decode unibyte
+ strings only.
+ From TSUCHIYA Masatoshi <tsuchiya@namazu.org>
+
+2001-12-08 Nevin Kapur <nevin@jhu.edu>
+
+ * nnmail.el (nnmail-fancy-expiry-targets): New variable.
+ (nnmail-fancy-expiry-target): Use it.
+ Suggestions from Simon Josefsson <jas@extundo.com>.
+
+2001-12-07 14:00:00 ShengHuo ZHU <zsh@cs.rochester.edu>
+
+ * gnus-sum.el (gnus-summary-show-article): Recount lines if not exist.
+
+2001-12-07 10:00:00 ShengHuo ZHU <zsh@cs.rochester.edu>
+
+ * nnwfm.el (nnwfm-create-mapping): Use gnus-url-unhex-string.
+
+ * gnus-util.el (gnus-url-unhex-string): Move here.
+
+2001-12-07 09:00:00 ShengHuo ZHU <zsh@cs.rochester.edu>
+
+ * nnrss.el (nnrss-decode-entities-unibyte-string): Use
+ mm-url-decode-entities-nbsp.
+
+ * nnlistserv.el, nnultimate.el, nnwarchive.el, nnweb.el,
+ webmail.el, nnwfm.el: Use mm-url.
+
+ * mm-url.el (mm-url-fetch-form): Move from nnweb.
+ (mm-url-remove-markup): Move from nnweb.
+ (mm-url-fetch-simple): Move from webmail.
+
+ * nnslashdot.el (nnslashdot-request-post): mm-url-fetch-form.
+
2001-12-07 01:00:00 ShengHuo ZHU <zsh@cs.rochester.edu>
* gnus-sum.el (gnus-summary-print-truncate-and-quote): New.
(progn (require 'shimbun) nil)
(error '("nnshimbun.el")))
(unless (or (condition-case code
- (require 'w3-forms)
+ (require 'w3-parse)
(error
(message "No w3: %s %s retrying..." code
- (locate-library "w3-forms"))
+ (locate-library "w3-parse"))
nil))
;; Maybe mis-configured Makefile is used (e.g.
;; configured for FSFmacs but XEmacs is running).
(copy-sequence load-path))))
(if (let ((load-path lp))
(condition-case nil
- (require 'w3-forms)
+ (require 'w3-parse)
(error nil)))
;; If success, fix `load-path' for compiling.
(progn
(setq load-path lp)
(message " => fixed; W3DIR=%s"
(file-name-directory
- (locate-library "w3-forms")))
+ (locate-library "w3-parse")))
t)
(message " => ignored")
nil)))
- '("nnweb.el" "nnlistserv.el" "nnultimate.el"
- "nnwarchive.el" "webmail.el" "nnwfm.el"))
+ '("nnultimate.el" "webmail.el" "nnwfm.el"))
(condition-case code
(progn (require 'mh-e) nil)
(error
(setq retval (cons (list key val) retval)))))
retval))
-(defun gnus-url-unhex (x)
- (if (> x ?9)
- (if (>= x ?a)
- (+ 10 (- x ?a))
- (+ 10 (- x ?A)))
- (- x ?0)))
-
-(defun gnus-url-unhex-string (str &optional allow-newlines)
- "Remove %XXX embedded spaces, etc in a url.
-If optional second argument ALLOW-NEWLINES is non-nil, then allow the
-decoding of carriage returns and line feeds in the string, which is normally
-forbidden in URL encoding."
- (setq str (or (nnheader-replace-chars-in-string str ?+ ? ) ""))
- (let ((tmp "")
- (case-fold-search t))
- (while (string-match "%[0-9a-f][0-9a-f]" str)
- (let* ((start (match-beginning 0))
- (ch1 (gnus-url-unhex (elt str (+ start 1))))
- (code (+ (* 16 ch1)
- (gnus-url-unhex (elt str (+ start 2))))))
- (setq tmp (concat
- tmp (substring str 0 start)
- (cond
- (allow-newlines
- (char-to-string code))
- ((or (= code ?\n) (= code ?\r))
- " ")
- (t (char-to-string code))))
- str (substring str (match-end 0)))))
- (setq tmp (concat tmp str))
- tmp))
-
(defun gnus-url-mailto (url)
;; Send mail to someone
(when (string-match "mailto:/*\\(.*\\)" url)
result)))
(defun gnus-group-name-decode (string charset)
- (if (and string charset (featurep 'mule))
+ (if (and string charset (featurep 'mule)
+ (not (mm-multibyte-string-p string)))
(decode-coding-string string charset)
string))
(defun gnus-summary-buffer-name (group)
"Return the summary buffer name of GROUP."
- (concat "*Summary " group "*"))
+ (concat "*Summary " (gnus-group-decoded-name group) "*"))
(defun gnus-summary-setup-buffer (group)
"Initialize summary buffer."
(gnus-newsgroup-ignored-charsets 'gnus-all))
(gnus-summary-select-article nil 'force)
(let ((deps gnus-newsgroup-dependencies)
- head header)
+ head header lines)
(save-excursion
(set-buffer gnus-original-article-buffer)
(save-restriction
(message-narrow-to-head)
- (setq head (buffer-string)))
+ (setq head (buffer-string))
+ (goto-char (point-min))
+ (unless (re-search-forward "^lines:[ \t]\\([0-9]+\\)" nil t)
+ (goto-char (point-max))
+ (widen)
+ (setq lines (1- (count-lines (point) (point-max))))))
(with-temp-buffer
(insert (format "211 %d Article retrieved.\n"
(cdr gnus-article-current)))
(insert head)
+ (if lines (insert (format "Lines: %d\n" lines)))
(insert ".\n")
(let ((nntp-server-buffer (current-buffer)))
(setq header (car (gnus-get-newsgroup-headers deps t))))))
If you find some problem with the directory separator character, try
\"[/\\\\\]\" for some systems.")
+(defun gnus-url-unhex (x)
+ (if (> x ?9)
+ (if (>= x ?a)
+ (+ 10 (- x ?a))
+ (+ 10 (- x ?A)))
+ (- x ?0)))
+
+(defun gnus-url-unhex-string (str &optional allow-newlines)
+ "Remove %XXX embedded spaces, etc in a url.
+If optional second argument ALLOW-NEWLINES is non-nil, then allow the
+decoding of carriage returns and line feeds in the string, which is normally
+forbidden in URL encoding."
+ (setq str (or (mm-subst-char-in-string ?+ ? str) ""))
+ (let ((tmp "")
+ (case-fold-search t))
+ (while (string-match "%[0-9a-f][0-9a-f]" str)
+ (let* ((start (match-beginning 0))
+ (ch1 (gnus-url-unhex (elt str (+ start 1))))
+ (code (+ (* 16 ch1)
+ (gnus-url-unhex (elt str (+ start 2))))))
+ (setq tmp (concat
+ tmp (substring str 0 start)
+ (cond
+ (allow-newlines
+ (char-to-string code))
+ ((or (= code ?\n) (= code ?\r))
+ " ")
+ (t (char-to-string code))))
+ str (substring str (match-end 0)))))
+ (setq tmp (concat tmp str))
+ tmp))
+
(provide 'gnus-util)
;;; gnus-util.el ends here
(maybe-fbind '(babel-fetch
babel-wash create-image decode-coding-string display-graphic-p
+ replace-regexp-in-string
bbdb-complete-name
display-time-event-handler
find-image font-create-object gnus-mule-get-coding-system
make-overlay mouse-minibuffer-check mouse-movement-p
mouse-scroll-subr overlay-buffer overlay-end
overlay-get overlay-lists overlay-put
+ overlays-in
overlay-start posn-point posn-window
read-event read-event run-with-idle-timer
set-buffer-multibyte set-char-table-range
(let ((functions-variables
(cond
((featurep 'xemacs)
- '((replace-regexp-in-string)))
+ nil)
((>= emacs-major-version 21)
'((function-max-args smiley-encode-buffer)))
((boundp 'MULE)
'((coding-system-get
compose-mail file-name-extension
find-coding-systems-for-charsets find-coding-systems-region
- function-max-args get-charset-property replace-regexp-in-string
- shell-command-to-string smiley-encode-buffer)))
+ function-max-args get-charset-property shell-command-to-string
+ smiley-encode-buffer)))
(t
'((function-max-args smiley-encode-buffer))))))
(maybe-fbind (car functions-variables))
(if (string-match "^file:/+" url)
(insert-file-contents (substring url (1- (match-end 0))))
(mm-url-insert-file-contents-external url))
- (url-insert-file-contents url)))
+ (require 'url-handlers)
+ (let ((name buffer-file-name))
+ (prog1
+ (url-insert-file-contents url)
+ (setq buffer-file-name name)))))
(defun mm-url-insert-file-contents-external (url)
(let (program args)
(defun mm-url-insert (url &optional follow-refresh)
"Insert the contents from an URL in the current buffer.
If FOLLOW-REFRESH is non-nil, redirect refresh url in META."
- (let ((name buffer-file-name))
- (if follow-refresh
- (save-restriction
- (narrow-to-region (point) (point))
- (mm-url-insert-file-contents url)
- (goto-char (point-min))
- (when (re-search-forward
- "<meta[ \t\r\n]*http-equiv=\"Refresh\"[^>]*URL=\\([^\"]+\\)\"" nil t)
- (let ((url (match-string 1)))
- (delete-region (point-min) (point-max))
- (mm-url-insert url t))))
- (mm-url-insert-file-contents url))
- (setq buffer-file-name name)))
+ (if follow-refresh
+ (save-restriction
+ (narrow-to-region (point) (point))
+ (mm-url-insert-file-contents url)
+ (goto-char (point-min))
+ (when (re-search-forward
+ "<meta[ \t\r\n]*http-equiv=\"Refresh\"[^>]*URL=\\([^\"]+\\)\"" nil t)
+ (let ((url (match-string 1)))
+ (delete-region (point-min) (point-max))
+ (mm-url-insert url t))))
+ (mm-url-insert-file-contents url)))
(defun mm-url-decode-entities ()
"Decode all HTML entities."
(setq elem (char-to-string elem)))
(replace-match elem t t))))
+(defun mm-url-decode-entities-nbsp ()
+ "Decode all HTML entities and to a space."
+ (let ((mm-url-html-entities (cons '(nbsp . 32) mm-url-html-entities)))
+ (mm-url-decode-entities)))
+
(defun mm-url-decode-entities-string (string)
(with-temp-buffer
(insert string)
chunk)
""))
+(defun mm-url-encode-www-form-urlencoded (pairs)
+ "Return PAIRS encoded for forms."
+ (mapconcat
+ (lambda (data)
+ (concat (mm-url-form-encode-xwfu (car data)) "="
+ (mm-url-form-encode-xwfu (cdr data))))
+ pairs "&"))
+
+(defun mm-url-fetch-form (url pairs)
+ "Fetch a form from URL with PAIRS as the data using the POST method."
+ (require 'url-handlers)
+ (let ((url-request-data (mm-url-encode-www-form-urlencoded pairs))
+ (url-request-method "POST")
+ (url-request-extra-headers
+ '(("Content-type" . "application/x-www-form-urlencoded"))))
+ (url-insert-file-contents url)
+ (setq buffer-file-name nil))
+ t)
+
+(defun mm-url-fetch-simple (url content)
+ (require 'url-handlers)
+ (let ((url-request-data content)
+ (url-request-method "POST")
+ (url-request-extra-headers
+ '(("Content-type" . "application/x-www-form-urlencoded"))))
+ (url-insert-file-contents url)
+ (setq buffer-file-name nil))
+ t)
+
+(defun mm-url-remove-markup ()
+ "Remove all HTML markup, leaving just plain text."
+ (goto-char (point-min))
+ (while (search-forward "<!--" nil t)
+ (delete-region (match-beginning 0)
+ (or (search-forward "-->" nil t)
+ (point-max))))
+ (goto-char (point-min))
+ (while (re-search-forward "<[^>]+>" nil t)
+ (replace-match "" t t)))
+
(provide 'mm-url)
;;; mm-url.el ends here
(message "%s(Y/n) Yes" prompt)
t)))
-(defun nnheader-image-load-path (&optional package)
- (let (dir result)
- (dolist (path load-path (nreverse result))
- (if (file-directory-p
- (setq dir (concat (file-name-directory
- (directory-file-name path))
- "etc/" (or package "gnus/"))))
- (push dir result))
- (push path result))))
-(defalias 'mm-image-load-path 'nnheader-image-load-path)
-
-(defalias 'mm-read-coding-system
- (if (or (and (featurep 'xemacs)
- (<= (string-to-number emacs-version) 21.1))
- (boundp 'MULE))
- (lambda (prompt &optional default-coding-system)
- (read-coding-system prompt))
- 'read-coding-system))
+(eval-and-compile
+ (unless (featurep 'mm-util)
+ (defun nnheader-image-load-path (&optional package)
+ (let (dir result)
+ (dolist (path load-path (nreverse result))
+ (if (file-directory-p
+ (setq dir (concat (file-name-directory
+ (directory-file-name path))
+ "etc/" (or package "gnus/"))))
+ (push dir result))
+ (push path result))))
+ (defalias 'mm-image-load-path 'nnheader-image-load-path)
+
+ (defalias 'mm-read-coding-system
+ (if (or (and (featurep 'xemacs)
+ (<= (string-to-number emacs-version) 21.1))
+ (boundp 'MULE))
+ (lambda (prompt &optional default-coding-system)
+ (read-coding-system prompt))
+ 'read-coding-system))
+
+ (defalias 'mm-multibyte-string-p
+ (if (fboundp 'multibyte-string-p)
+ 'multibyte-string-p
+ 'ignore))))
(when (featurep 'xemacs)
(require 'nnheaderxm))
;;; Commentary:
-;; Note: You need to have `url' and `w3' installed for this
-;; backend to work.
-
;;; Code:
(eval-when-compile (require 'cl))
(require 'nnoo)
-(eval-when-compile
- (ignore-errors
- (require 'nnweb)) ; requires W3
- (autoload 'url-insert-file-contents "nnweb"))
+(require 'mm-url)
+(require 'nnweb)
(nnoo-declare nnlistserv
nnweb)
(when (funcall (nnweb-definition 'search) page)
;; Go through all the article hits on this page.
(goto-char (point-min))
- (nnweb-decode-entities)
+ (mm-url-decode-entities)
(goto-char (point-min))
(while (re-search-forward "^<li> *<a href=\"\\([^\"]+\\)\"><b>\\([^\\>]+\\)</b></a> *<[^>]+><i>\\([^>]+\\)<" nil t)
(setq url (match-string 1)
(let ((case-fold-search t)
(headers '(sent name email subject id))
sent name email subject id)
- (nnweb-decode-entities)
+ (mm-url-decode-entities)
(while headers
(goto-char (point-min))
(re-search-forward (format "<!-- %s=\"\\([^\"]+\\)" (car headers) nil t))
(goto-char (point-max))
(search-backward "<!-- body" nil t)
(delete-region (point-max) (progn (beginning-of-line) (point)))
- (nnweb-remove-markup)
+ (mm-url-remove-markup)
(goto-char (point-min))
(insert (format "From: %s <%s>\n" name email)
(format "Subject: %s\n" subject)
(format "Date: %s\n\n" sent))))
(defun nnlistserv-kk-search (search)
- (url-insert-file-contents
+ (mm-url-insert
(concat (format (nnweb-definition 'address) search)
(nnweb-definition 'index)))
t)
(function :format "%v" nnmail-)
string))
+(defcustom nnmail-fancy-expiry-targets nil
+ "Determine expiry target based on articles using fancy techniques.
+
+This is a list of (\"HEADER\" \"REGEXP\" \"TARGET\") entries. If
+`nnmail-expiry-target' is set to the function
+`nnmail-fancy-expiry-target' and HEADER of the article matches REGEXP,
+the message will be expired to a group determined by invoking
+`format-time-string' with TARGET used as the format string and the
+time extracted from the articles' Date header (if missing the current
+time is used).
+
+In the special cases that HEADER is the symbol `to-from', the regexp
+will try to match against both the From and the To header.
+
+Example:
+
+\(setq nnmail-fancy-expiry-targets
+ '((to-from \"boss\" \"nnfolder:Work\")
+ (\"Subject\" \"IMPORTANT\" \"nnfolder:IMPORTANT.%Y.%b\")
+ (\"from\" \".*\" \"nnfolder:Archive-%Y\")))
+
+In this case, articles containing the string \"boss\" in the To or the
+From header will be expired to the group \"nnfolder:Work\";
+articles containing the sting \"IMPORTANT\" in the Subject header will
+be expired to the group \"nnfolder:IMPORTANT.YYYY.MMM\"; and
+everything else will be expired to \"nnfolder:Archive-YYYY\"."
+ :group 'nnmail-expire
+ :type '(repeat (list (choice :tag "Match against"
+ (string :tag "Header")
+ (const to-from))
+ regexp
+ (string :tag "Target group format string"))))
+
(defcustom nnmail-cache-accepted-message-ids nil
"If non-nil, put Message-IDs of Gcc'd articles into the duplicate cache.
If non-nil, also update the cache when copy or move articles."
(unless (eq target 'delete)
(gnus-request-accept-article target nil nil t))))
+(defun nnmail-fancy-expiry-target (group)
+ "Returns a target expiry group determined by `nnmail-fancy-expiry-targets'."
+ (let* (header
+ (case-fold-search nil)
+ (from (or (message-fetch-field "from") ""))
+ (to (or (message-fetch-field "to") ""))
+ (date (date-to-time
+ (or (message-fetch-field "date") (current-time-string))))
+ (target 'delete))
+ (dolist (regexp-target-pair (reverse nnmail-fancy-expiry-targets) target)
+ (setq header (car regexp-target-pair))
+ (cond
+ ;; If the header is to-from then match against the
+ ;; To or From header
+ ((and (equal header 'to-from)
+ (or (string-match (cadr regexp-target-pair) from)
+ (and (string-match message-dont-reply-to-names from)
+ (string-match (cadr regexp-target-pair) to))))
+ (setq target (format-time-string (caddr regexp-target-pair) date)))
+ ((and (not (equal header 'to-from))
+ (string-match (cadr regexp-target-pair)
+ (message-fetch-field header)))
+ (setq target
+ (format-time-string (caddr regexp-target-pair) date)))))))
+
(defun nnmail-check-syntax ()
"Check (and modify) the syntax of the message in the current buffer."
(save-restriction
(defun nnrss-decode-entities-unibyte-string (string)
(mm-with-unibyte-buffer
(insert string)
- (mm-url-decode-entities)
+ (mm-url-decode-entities-nbsp)
(buffer-substring (point-min) (point-max))))
(defalias 'nnrss-insert 'nnrss-insert-w3)
;;; Snarf functions
(defun nnrss-check-group (group server)
- (let ((mm-url-html-entities (cons '(nbsp . 32) mm-url-html-entities))
- file xml subject url extra changed author date)
+ (let (file xml subject url extra changed author date)
(condition-case err
(mm-with-unibyte-buffer
(if (and nnrss-use-local
t)
(deffoo nnslashdot-request-post (&optional server)
- (require 'nnweb)
(nnslashdot-possibly-change-server nil server)
(let ((sid (nnslashdot-sid-strip (message-fetch-field "newsgroups")))
(subject (message-fetch-field "subject"))
(message-goto-body)
(setq body (buffer-substring (point) (point-max)))
(erase-buffer)
- (nnweb-fetch-form
+ (mm-url-fetch-form
"http://slashdot.org/comments.pl"
`(("sid" . ,sid)
("pid" . ,pid)
(require 'gnus)
(require 'nnmail)
(require 'mm-util)
-(eval-when-compile
- (ignore-errors
- (require 'nnweb)))
-;; Report failure to find w3 at load time if appropriate.
-(eval '(require 'nnweb))
+(require 'mm-url)
+(require 'nnweb)
+(autoload 'w3-parse-buffer "w3-parse")
(nnoo-declare nnultimate)
(setq subject (nth 2 (assq (car elem) topics)))
(setq href (nth 3 (assq (car elem) topics)))
(if (= current-page 1)
- (nnweb-insert href)
+ (mm-url-insert href)
(string-match "\\.html$" href)
- (nnweb-insert (concat (substring href 0 (match-beginning 0))
+ (mm-url-insert (concat (substring href 0 (match-beginning 0))
"-" (number-to-string current-page)
(match-string 0 href))))
(goto-char (point-min))
(deffoo nnultimate-request-list (&optional server)
(nnultimate-possibly-change-server nil server)
(mm-with-unibyte-buffer
- (nnweb-insert
+ (mm-url-insert
(if (string-match "/$" nnultimate-address)
(concat nnultimate-address "Ultimate.cgi")
nnultimate-address))
(mm-with-unibyte-buffer
(while furls
(erase-buffer)
- (nnweb-insert (pop furls))
+ (mm-url-insert (pop furls))
(goto-char (point-min))
(setq parse (w3-parse-buffer (current-buffer)))
(setq contents
;;; Commentary:
;; Note: You need to have `url' (w3 0.46) or greater version
-;; installed for this backend to work.
+;; installed for some functions of this backend to work.
;; Todo:
;; 1. To support more web archives.
(require 'nnmail)
(require 'mm-util)
(require 'mail-source)
-(eval-when-compile
- (ignore-errors
- (require 'w3)
- (require 'url)
- (require 'w3-forms)
- (require 'nnweb)))
-;; Report failure to find w3 at load time if appropriate.
-(eval '(progn
- (require 'w3)
- (require 'url)
- (require 'w3-forms)
- (require 'nnweb)))
+(require 'mm-url)
(nnoo-declare nnwarchive)
(format " *nnwarchive %s %s*" nnwarchive-type server)))))
(nnwarchive-set-default nnwarchive-type))
-(defun nnwarchive-encode-www-form-urlencoded (pairs)
- "Return PAIRS encoded for forms."
- (mapconcat
- (function
- (lambda (data)
- (concat (w3-form-encode-xwfu (car data)) "="
- (w3-form-encode-xwfu (cdr data)))))
- pairs "&"))
-
-(defun nnwarchive-fetch-form (url pairs)
- (let ((url-request-data (nnwarchive-encode-www-form-urlencoded pairs))
- (url-request-method "POST")
- (url-request-extra-headers
- '(("Content-type" . "application/x-www-form-urlencoded"))))
- (nnweb-insert url))
- t)
-
(defun nnwarchive-eval (expr)
(cond
((consp expr)
(defun nnwarchive-url (xurl)
(mm-with-unibyte-current-buffer
- (let ((url-confirmation-func 'identity)
+ (let ((url-confirmation-func 'identity) ;; Some hacks.
(url-cookie-multiple-line nil))
(cond
((eq (car xurl) 'post)
(pop xurl)
- (nnwarchive-fetch-form (car xurl) (nnwarchive-eval (cdr xurl))))
+ (mm-url-fetch-form (car xurl) (nnwarchive-eval (cdr xurl))))
(t
- (nnweb-insert (apply 'format (nnwarchive-eval xurl))))))))
+ (mm-url-insert (apply 'format (nnwarchive-eval xurl))))))))
(defun nnwarchive-generate-active ()
(save-excursion
article
(make-full-mail-header
article
- (nnweb-decode-entities-string subject)
- (nnweb-decode-entities-string from)
+ (mm-url-decode-entities-string subject)
+ (mm-url-decode-entities-string from)
date
(concat "<" group "%"
(number-to-string article)
(goto-char (point-min))
(while (re-search-forward "<a[^>]+>\\([^<]+\\)</a>" nil t)
(replace-match "\\1"))
- (nnweb-decode-entities)
+ (mm-url-decode-entities)
(buffer-string))
(defun nnwarchive-egroups-xover-files (group articles)
article
(make-full-mail-header
article
- (nnweb-decode-entities-string subject)
- (nnweb-decode-entities-string from)
+ (mm-url-decode-entities-string subject)
+ (mm-url-decode-entities-string from)
date
(format "<%05d%%%s>\n" (1- article) group)
""
(when (search-forward "X-Head-End" nil t)
(beginning-of-line)
(narrow-to-region (point-min) (point))
- (nnweb-decode-entities)
+ (mm-url-decode-entities)
(goto-char (point-min))
(while (search-forward "<!--X-" nil t)
(replace-match ""))
(search-forward "</ul>" nil t)
(end-of-line)
(narrow-to-region (point-min) (point))
- (nnweb-remove-markup)
- (nnweb-decode-entities)
+ (mm-url-remove-markup)
+ (mm-url-decode-entities)
(goto-char (point-min))
(delete-blank-lines)
(when from
(delete-region (match-beginning 0) (match-end 0))
(save-restriction
(narrow-to-region p (point))
- (nnweb-remove-markup)
- (nnweb-decode-entities)
+ (mm-url-remove-markup)
+ (mm-url-decode-entities)
(goto-char (point-max)))))
((looking-at "<P><A HREF=\"\\([^\"]+\\)")
(setq url (match-string 1))
;;; Commentary:
-;; Note: You need to have `url' and `w3' installed for this
-;; backend to work.
+;; Note: You need to have `w3' installed for some functions to work.
;;; Code:
(require 'gnus-util)
(require 'gnus)
(require 'nnmail)
-(eval-when-compile
- (ignore-errors
- (require 'w3)
- (require 'url)
- (require 'w3-forms)))
-
-;; Report failure to find w3 at load time if appropriate.
-(unless noninteractive
- (eval '(progn
- (require 'w3)
- (require 'url)
- (require 'w3-forms))))
+(require 'mm-util)
+(require 'mm-url)
+(autoload 'w3-parse-buffer "w3-parse")
(nnoo-declare nnweb)
(url (and header (mail-header-xref header))))
(when (or (and url
(mm-with-unibyte-current-buffer
- (nnweb-fetch-url url)))
+ (mm-url-insert url)))
(and (stringp article)
(nnweb-definition 'id t)
(let ((fetch (nnweb-definition 'id))
(when (and fetch art)
(setq url (format fetch art))
(mm-with-unibyte-current-buffer
- (nnweb-fetch-url url))
+ (mm-url-insert url))
(if (nnweb-definition 'reference t)
(setq article
(funcall (nnweb-definition
nnweb-type nnweb-search server))
(current-buffer))))))
-(defun nnweb-fetch-url (url)
- (let (buf)
- (save-excursion
- (if (not nnheader-callback-function)
- (progn
- (with-temp-buffer
- (mm-enable-multibyte)
- (let ((coding-system-for-read 'binary)
- (coding-system-for-write 'binary)
- (input-coding-system 'binary)
- (output-coding-system 'binary)
- (default-process-coding-system 'binary))
- (nnweb-insert url))
- (setq buf (buffer-string)))
- (erase-buffer)
- (insert buf)
- t)
- (nnweb-url-retrieve-asynch
- url 'nnweb-callback (current-buffer) nnheader-callback-function)
- t))))
-
-(defun nnweb-callback (buffer callback)
- (when (gnus-buffer-live-p url-working-buffer)
- (save-excursion
- (set-buffer url-working-buffer)
- (funcall (nnweb-definition 'article))
- (nnweb-decode-entities)
- (set-buffer buffer)
- (goto-char (point-max))
- (insert-buffer-substring url-working-buffer))
- (funcall callback t)
- (gnus-kill-buffer url-working-buffer)))
-
-(defun nnweb-url-retrieve-asynch (url callback &rest data)
- (let ((url-request-method "GET")
- (old-asynch url-be-asynchronous)
- (url-request-data nil)
- (url-request-extra-headers nil)
- (url-working-buffer (generate-new-buffer-name " *nnweb*")))
- (setq-default url-be-asynchronous t)
- (save-excursion
- (set-buffer (get-buffer-create url-working-buffer))
- (setq url-current-callback-data data
- url-be-asynchronous t
- url-current-callback-func callback)
- (url-retrieve url nil))
- (setq-default url-be-asynchronous old-asynch)))
-
-(if (fboundp 'url-retrieve-synchronously)
- (defun nnweb-url-retrieve-asynch (url callback &rest data)
- (url-retrieve url callback data)))
+;; (defun nnweb-fetch-url (url)
+;; (let (buf)
+;; (save-excursion
+;; (if (not nnheader-callback-function)
+;; (progn
+;; (with-temp-buffer
+;; (mm-enable-multibyte)
+;; (let ((coding-system-for-read 'binary)
+;; (coding-system-for-write 'binary)
+;; (input-coding-system 'binary)
+;; (output-coding-system 'binary)
+;; (default-process-coding-system 'binary))
+;; (nnweb-insert url))
+;; (setq buf (buffer-string)))
+;; (erase-buffer)
+;; (insert buf)
+;; t)
+;; (nnweb-url-retrieve-asynch
+;; url 'nnweb-callback (current-buffer) nnheader-callback-function)
+;; t))))
+
+;; (defun nnweb-callback (buffer callback)
+;; (when (gnus-buffer-live-p url-working-buffer)
+;; (save-excursion
+;; (set-buffer url-working-buffer)
+;; (funcall (nnweb-definition 'article))
+;; (nnweb-decode-entities)
+;; (set-buffer buffer)
+;; (goto-char (point-max))
+;; (insert-buffer-substring url-working-buffer))
+;; (funcall callback t)
+;; (gnus-kill-buffer url-working-buffer)))
+
+;; (defun nnweb-url-retrieve-asynch (url callback &rest data)
+;; (let ((url-request-method "GET")
+;; (old-asynch url-be-asynchronous)
+;; (url-request-data nil)
+;; (url-request-extra-headers nil)
+;; (url-working-buffer (generate-new-buffer-name " *nnweb*")))
+;; (setq-default url-be-asynchronous t)
+;; (save-excursion
+;; (set-buffer (get-buffer-create url-working-buffer))
+;; (setq url-current-callback-data data
+;; url-be-asynchronous t
+;; url-current-callback-func callback)
+;; (url-retrieve url nil))
+;; (setq-default url-be-asynchronous old-asynch)))
+
+;; (if (fboundp 'url-retrieve-synchronously)
+;; (defun nnweb-url-retrieve-asynch (url callback &rest data)
+;; (url-retrieve url callback data)))
;;;
;;; DejaNews functions.
;; Yup -- fetch it.
(setq more (match-string 1))
(erase-buffer)
- (url-insert-file-contents more)))
+ (mm-url-insert more)))
;; Return the articles in the right order.
(setq nnweb-articles
(sort (nconc nnweb-articles map) 'car-less-than-car))))))
(defun nnweb-dejanews-search (search)
- (nnweb-insert
+ (mm-url-insert
(concat
(nnweb-definition 'address)
"?"
- (nnweb-encode-www-form-urlencoded
+ (mm-url-encode-www-form-urlencoded
`(("ST" . "PS")
("svcclass" . "dnyr")
("QRY" . ,search)
("ageweight" . "1")))))
t)
-(defun nnweb-dejanewsold-search (search)
- (nnweb-fetch-form
- (nnweb-definition 'address)
- `(("query" . ,search)
- ("defaultOp" . "AND")
- ("svcclass" . "dnold")
- ("maxhits" . "100")
- ("format" . "verbose2")
- ("threaded" . "0")
- ("showsort" . "date")
- ("agesign" . "1")
- ("ageweight" . "1")))
- t)
+;; (defun nnweb-dejanewsold-search (search)
+;; (nnweb-fetch-form
+;; (nnweb-definition 'address)
+;; `(("query" . ,search)
+;; ("defaultOp" . "AND")
+;; ("svcclass" . "dnold")
+;; ("maxhits" . "100")
+;; ("format" . "verbose2")
+;; ("threaded" . "0")
+;; ("showsort" . "date")
+;; ("agesign" . "1")
+;; ("ageweight" . "1")))
+;; t)
(defun nnweb-dejanews-identity (url)
"Return an unique identifier based on URL."
(goto-char (point-min))
(when (looking-at ".*href=\"\\([^\"]+\\)\"")
(setq url (match-string 1)))
- (nnweb-remove-markup)
+ (mm-url-remove-markup)
(goto-char (point-min))
(while (search-forward "\t" nil t)
(replace-match " "))
(let ((body (point-marker)))
(search-forward "</pre>" nil t)
(delete-region (point) (point-max))
- (nnweb-remove-markup)
+ (mm-url-remove-markup)
(goto-char (point-min))
(while (looking-at " *$")
(gnus-delete-line))
(while (search-forward "," nil t)
(replace-match " " t t)))
(widen)
- (nnweb-decode-entities)
+ (mm-url-decode-entities)
(set-marker body nil))))
(defun nnweb-reference-search (search)
- (url-insert-file-contents
+ (mm-url-insert
(concat
(nnweb-definition 'address)
"?"
- (nnweb-encode-www-form-urlencoded
+ (mm-url-encode-www-form-urlencoded
`(("search" . "advanced")
("querytext" . ,search)
("subj" . "")
(goto-char (point-min))
(while (search-forward "<dt>" nil t)
(replace-match "\n<blubb>"))
- (nnweb-decode-entities)
+ (mm-url-decode-entities)
(goto-char (point-min))
(while (re-search-forward "<blubb>.*href=\"\\([^\"]+\\)\"><strong>\\([^>]*\\)</strong></a><dd>\\([^-]+\\)- <b>\\([^<]+\\)<.*href=\"news:\\([^\"]+\\)\">.*\">\\(.+\\)</a><P>"
nil t)
(while (re-search-forward "<A.*\\?id@\\([^\"]+\\)\">[0-9]+</A>" nil t)
(replace-match "<\\1> " t)))
(widen)
- (nnweb-remove-markup)
- (nnweb-decode-entities)))
+ (mm-url-remove-markup)
+ (mm-url-decode-entities)))
(defun nnweb-altavista-search (search &optional part)
- (url-insert-file-contents
+ (mm-url-insert
(concat
(nnweb-definition 'address)
"?"
- (nnweb-encode-www-form-urlencoded
+ (mm-url-encode-www-form-urlencoded
`(("pg" . "aq")
("what" . "news")
,@(when part `(("stq" . ,(int-to-string (* part 30)))))
(goto-char (point-min))
(while (search-forward "<br>" nil t)
(replace-match "\n"))
- (nnweb-remove-markup)
+ (mm-url-remove-markup)
(goto-char (point-min))
(while (re-search-forward "^[ \t]*\n" nil t)
(replace-match ""))
(narrow-to-region (point) (point-max))
(search-forward "</pre>" nil t)
(delete-region (point) (point-max))
- (nnweb-remove-markup)
+ (mm-url-remove-markup)
(widen)))
(defun nnweb-google-parse-1 (&optional Message-ID)
"http://groups.google.com/groups?selm=%s&output=gplain" mid))
(narrow-to-region (search-forward ">" nil t)
(search-forward "</a>" nil t))
- (nnweb-remove-markup)
- (nnweb-decode-entities)
+ (mm-url-remove-markup)
+ (mm-url-decode-entities)
(setq Subject (buffer-string))
(goto-char (point-max))
(widen)
(skip-chars-forward " \t")
(narrow-to-region (point)
(search-forward "</a>" nil t))
- (nnweb-remove-markup)
- (nnweb-decode-entities)
+ (mm-url-remove-markup)
+ (mm-url-decode-entities)
(setq Newsgroups (buffer-string))
(goto-char (point-max))
(widen)
(nconc nnweb-articles map))
(when (setq header (cadar map))
(mm-with-unibyte-current-buffer
- (nnweb-fetch-url (mail-header-xref header)))
+ (mm-url-insert (mail-header-xref header)))
(caar map))))
(defun nnweb-google-create-mapping ()
(sort nnweb-articles 'car-less-than-car))))))
(defun nnweb-google-search (search)
- (nnweb-insert
+ (mm-url-insert
(concat
(nnweb-definition 'address)
"?"
- (nnweb-encode-www-form-urlencoded
+ (mm-url-encode-www-form-urlencoded
`(("q" . ,search)
("num". "100")
("hq" . "")
(mapcar 'nnweb-insert-html (nth 2 parse))
(insert "</" (symbol-name (car parse)) ">\n")))
-(defun nnweb-encode-www-form-urlencoded (pairs)
- "Return PAIRS encoded for forms."
- (mapconcat
- (function
- (lambda (data)
- (concat (w3-form-encode-xwfu (car data)) "="
- (w3-form-encode-xwfu (cdr data)))))
- pairs "&"))
-
-(defun nnweb-fetch-form (url pairs)
- "Fetch a form from URL with PAIRS as the data using the POST method."
- (let ((url-request-data (nnweb-encode-www-form-urlencoded pairs))
- (url-request-method "POST")
- (url-request-extra-headers
- '(("Content-type" . "application/x-www-form-urlencoded"))))
- (url-insert-file-contents url)
- (setq buffer-file-name nil))
- t)
-
-(defun nnweb-decode-entities ()
- "Decode all HTML entities."
- (goto-char (point-min))
- (while (re-search-forward "&\\(#[0-9]+\\|[a-z]+\\);" nil t)
- (let ((elem (if (eq (aref (match-string 1) 0) ?\#)
- (let ((c
- (string-to-number (substring
- (match-string 1) 1))))
- (if (mm-char-or-char-int-p c) c 32))
- (or (cdr (assq (intern (match-string 1))
- w3-html-entities))
- ?#))))
- (unless (stringp elem)
- (setq elem (char-to-string elem)))
- (replace-match elem t t))))
-
-(defun nnweb-decode-entities-string (string)
- (with-temp-buffer
- (insert string)
- (nnweb-decode-entities)
- (buffer-substring (point-min) (point-max))))
-
-(defun nnweb-remove-markup ()
- "Remove all HTML markup, leaving just plain text."
- (goto-char (point-min))
- (while (search-forward "<!--" nil t)
- (delete-region (match-beginning 0)
- (or (search-forward "-->" nil t)
- (point-max))))
- (goto-char (point-min))
- (while (re-search-forward "<[^>]+>" nil t)
- (replace-match "" t t)))
-
-(defun nnweb-insert (url &optional follow-refresh)
- "Insert the contents from an URL in the current buffer.
-If FOLLOW-REFRESH is non-nil, redirect refresh url in META."
- (let ((name buffer-file-name))
- (if follow-refresh
- (save-restriction
- (narrow-to-region (point) (point))
- (url-insert-file-contents url)
- (goto-char (point-min))
- (when (re-search-forward
- "<meta[ \t\r\n]*http-equiv=\"Refresh\"[^>]*URL=\\([^\"]+\\)\"" nil t)
- (let ((url (match-string 1)))
- (delete-region (point-min) (point-max))
- (nnweb-insert url t))))
- (url-insert-file-contents url))
- (setq buffer-file-name name)))
-
(defun nnweb-parse-find (type parse &optional maxdepth)
"Find the element of TYPE in PARSE."
(catch 'found
(require 'gnus)
(require 'nnmail)
(require 'mm-util)
-(eval-when-compile
- (ignore-errors
- (require 'nnweb)))
-;; Report failure to find w3 at load time if appropriate.
-(eval '(require 'nnweb))
+(require 'mm-url)
+(require 'nnweb)
+(autoload 'w3-parse-buffer "w3-parse")
(nnoo-declare nnwfm)
(erase-buffer)
(setq subject (nth 2 (assq (car elem) topics))
thread-id (nth 0 (assq (car elem) topics)))
- (nnweb-insert
+ (mm-url-insert
(concat nnwfm-address
(format "Item.asp?GroupID=%d&ThreadID=%d" sid
thread-id)))
(deffoo nnwfm-request-list (&optional server)
(nnwfm-possibly-change-server nil server)
(mm-with-unibyte-buffer
- (nnweb-insert
+ (mm-url-insert
(if (string-match "/$" nnwfm-address)
(concat nnwfm-address "Group.asp")
nnwfm-address))
(while furls
(erase-buffer)
(push (car furls) fetched-urls)
- (nnweb-insert (pop furls))
+ (mm-url-insert (pop furls))
(goto-char (point-min))
(while (re-search-forward " wr(" nil t)
(forward-char -1)
(when (re-search-forward "href=\"\\(Thread.*DateLast=\\([^\"]+\\)\\)"
nil t)
(setq url (match-string 1)
- time (nnwfm-date-to-time (url-unhex-string (match-string 2))))
+ time (nnwfm-date-to-time (gnus-url-unhex-string
+ (match-string 2))))
(when (and (nnwfm-new-threads-p group time)
(not (member
(setq url (concat
nnwfm-address
- (nnweb-decode-entities-string url)))
+ (mm-url-decode-entities-string url)))
fetched-urls)))
(push url furls))))
;; The main idea here is to map Gnus article numbers to
(require 'gnus)
(require 'nnmail)
(require 'mm-util)
+(require 'mm-url)
(require 'mml)
(eval-when-compile
(ignore-errors
- (require 'w3)
(require 'url)
- (require 'url-cookie)
- (require 'w3-forms)
- (require 'nnweb)))
+ (require 'url-cookie)))
;; Report failure to find w3 at load time if appropriate.
(eval '(progn
- (require 'w3)
(require 'url)
- (require 'url-cookie)
- (require 'w3-forms)
- (require 'nnweb)))
+ (require 'url-cookie)))
;;;
(set (intern (concat "webmail-" (symbol-name var))) (cdr pair))
(set (intern (concat "webmail-" (symbol-name var))) nil)))))
-(defun webmail-encode-www-form-urlencoded (pairs)
- "Return PAIRS encoded for forms."
- (mapconcat
- (function
- (lambda (data)
- (concat (w3-form-encode-xwfu (car data)) "="
- (w3-form-encode-xwfu (cdr data)))))
- pairs "&"))
-
-(defun webmail-fetch-simple (url content)
- (let ((url-request-data content)
- (url-request-method "POST")
- (url-request-extra-headers
- '(("Content-type" . "application/x-www-form-urlencoded"))))
- (nnweb-insert url))
- t)
-
-(defun webmail-fetch-form (url pairs)
- (let ((url-request-data (webmail-encode-www-form-urlencoded pairs))
- (url-request-method "POST")
- (url-request-extra-headers
- '(("Content-type" . "application/x-www-form-urlencoded"))))
- (nnweb-insert url))
- t)
-
(defun webmail-eval (expr)
(cond
((consp expr)
(cond
((eq (car xurl) 'content)
(pop xurl)
- (webmail-fetch-simple (if (stringp (car xurl))
+ (mm-url-fetch-simple (if (stringp (car xurl))
(car xurl)
(apply 'format (webmail-eval (car xurl))))
(apply 'format (webmail-eval (cdr xurl)))))
((eq (car xurl) 'post)
(pop xurl)
- (webmail-fetch-form (car xurl) (webmail-eval (cdr xurl))))
+ (mm-url-fetch-form (car xurl) (webmail-eval (cdr xurl))))
(t
- (nnweb-insert (apply 'format (webmail-eval xurl)))))))
+ (mm-url-insert (apply 'format (webmail-eval xurl)))))))
(defun webmail-init ()
"Initialize buffers and such."
(let ((url (match-string 1)))
(erase-buffer)
(mm-with-unibyte-current-buffer
- (nnweb-insert url)))
+ (mm-url-insert url)))
(goto-char (point-min))))
(defun webmail-fetch (file subtype user password)
(message "Fetching mail #%d..." (setq n (1+ n)))
(erase-buffer)
(mm-with-unibyte-current-buffer
- (nnweb-insert (cdr item)))
+ (mm-url-insert (cdr item)))
(setq id (car item))
(if webmail-article-snarf
(funcall webmail-article-snarf file id))
(if (not (search-forward "</pre>" nil t))
(webmail-error "article@3.1"))
(delete-region (match-beginning 0) (point-max))
- (nnweb-remove-markup)
- (let ((w3-html-entities (cons '(nbsp . 32) w3-html-entities)))
- (nnweb-decode-entities))
+ (mm-url-remove-markup)
+ (mm-url-decode-entities-nbsp)
(goto-char (point-min))
(while (re-search-forward "\r\n?" nil t)
(replace-match "\n"))
(setq p (match-beginning 0))
(search-forward "</a>" nil t)
(delete-region p (match-end 0)))
- (nnweb-remove-markup)
- (let ((w3-html-entities (cons '(nbsp . 32) w3-html-entities)))
- (nnweb-decode-entities))
+ (mm-url-remove-markup)
+ (mm-url-decode-entities-nbsp)
(goto-char (point-min))
(delete-blank-lines)
(goto-char (point-min))
(delete-region p (match-end 0))
(save-excursion
(set-buffer (generate-new-buffer " *webmail-att*"))
- (nnweb-insert attachment)
+ (mm-url-insert attachment)
(push (current-buffer) webmail-buffer-list)
(setq bufname (buffer-name)))
(setq mime t)
(goto-char (match-end 0))
(if (looking-at "$") (forward-char))
(delete-region (point-min) (point))
- (nnweb-remove-markup)
- (let ((w3-html-entities (cons '(nbsp . 32) w3-html-entities)))
- (nnweb-decode-entities))
+ (mm-url-remove-markup)
+ (mm-url-decode-entities-nbsp)
nil)
(t
(setq mime t)
(setq p (match-beginning 0))
(search-forward "</a>" nil t)
(delete-region p (match-end 0)))
- (nnweb-remove-markup)
- (let ((w3-html-entities (cons '(nbsp . 32) w3-html-entities)))
- (nnweb-decode-entities))
+ (mm-url-remove-markup)
+ (mm-url-decode-entities-nbsp)
(goto-char (point-min))
(delete-blank-lines)
(goto-char (point-max))
(if (not (search-forward "</table>" nil t))
(webmail-error "article@5"))
(narrow-to-region p (match-end 0))
- (nnweb-remove-markup)
- (let ((w3-html-entities (cons '(nbsp . 32) w3-html-entities)))
- (nnweb-decode-entities))
+ (mm-url-remove-markup)
+ (mm-url-decode-entities-nbsp)
(goto-char (point-min))
(delete-blank-lines)
(setq ct (mail-fetch-field "content-type")
(widen)
(save-excursion
(set-buffer (generate-new-buffer " *webmail-att*"))
- (nnweb-insert (concat webmail-aux attachment))
+ (mm-url-insert (concat webmail-aux attachment))
(push (current-buffer) webmail-buffer-list)
(setq bufname (buffer-name)))
(insert "<#part")
(goto-char (point-min))
(while (re-search-forward "<br>" nil t)
(replace-match "\n"))
- (nnweb-remove-markup)
- (let ((w3-html-entities (cons '(nbsp . 32) w3-html-entities)))
- (nnweb-decode-entities))
+ (mm-url-remove-markup)
+ (mm-url-decode-entities-nbsp)
nil)
(t
(insert "<#part type=\"text/html\" disposition=inline>")
(goto-char (point-min))
(while (search-forward "<b>" nil t)
(replace-match "\n"))
- (nnweb-remove-markup)
- (let ((w3-html-entities (cons '(nbsp . 32) w3-html-entities)))
- (nnweb-decode-entities))
+ (mm-url-remove-markup)
+ (mm-url-decode-entities-nbsp)
(goto-char (point-min))
(delete-blank-lines)
(goto-char (point-min))
(let (bufname);; Attachment
(save-excursion
(set-buffer (generate-new-buffer " *webmail-att*"))
- (nnweb-insert (concat (car webmail-open-url) attachment))
+ (mm-url-insert (concat (car webmail-open-url) attachment))
(push (current-buffer) webmail-buffer-list)
(setq bufname (buffer-name)))
(insert "<#part type=" type)
(goto-char (point-min))
(while (search-forward "<b>" nil t)
(replace-match "\n"))
- (nnweb-remove-markup)
- (let ((w3-html-entities (cons '(nbsp . 32) w3-html-entities)))
- (nnweb-decode-entities))
+ (mm-url-remove-markup)
+ (mm-url-decode-entities-nbsp)
(goto-char (point-min))
(delete-blank-lines)
(goto-char (point-min))
(let (bufname);; Attachment
(save-excursion
(set-buffer (generate-new-buffer " *webmail-att*"))
- (nnweb-insert (concat (car webmail-open-url) attachment))
+ (mm-url-insert (concat (car webmail-open-url) attachment))
(push (current-buffer) webmail-buffer-list)
(setq bufname (buffer-name)))
(insert "<#part type=" type)
(let ((url (match-string 1)))
(setq base (match-string 2))
(erase-buffer)
- (nnweb-insert url)))
+ (mm-url-insert url)))
(goto-char (point-min))
(when (re-search-forward
"(\\([0-9]+\\) Message.?-[^>]*\\([0-9]+\\) New"
(match-beginning 0)
(point-max)))
(goto-char (point-min))
- (nnweb-remove-markup)
- (let ((w3-html-entities (cons '(nbsp . 32) w3-html-entities)))
- (nnweb-decode-entities))
+ (mm-url-remove-markup)
+ (mm-url-decode-entities-nbsp)
(goto-char (point-max))))
((looking-at "[\t\040\r\n]*<TABLE")
(save-restriction
(delete-region (point-min) (point-max))
(save-excursion
(set-buffer (generate-new-buffer " *webmail-att*"))
- (nnweb-insert url)
+ (mm-url-insert url)
(push (current-buffer) webmail-buffer-list)
(setq bufname (buffer-name)))
(insert "<#part type=\"" type "\"")
(narrow-to-region (point-min) (point))
(while (search-forward "\r\n" nil t)
(replace-match "\n"))
- (nnweb-remove-markup)
- (let ((w3-html-entities (cons '(nbsp . 32) w3-html-entities)))
- (nnweb-decode-entities))
+ (mm-url-remove-markup)
+ (mm-url-decode-entities-nbsp)
(goto-char (point-min))
(while (re-search-forward "\n\n+" nil t)
(replace-match "\n"))
+2001-12-09 Nevin Kapur <nevin@jhu.edu>
+
+ * gnus.texi (Expiring Mail): Add.
+
2001-12-05 Kai Gro\e,A_\e(Bjohann <Kai.Grossjohann@CS.Uni-Dortmund.DE>
* gnus.texi (Splitting in IMAP): Typo. From Colin Marquardt
(setq nnmail-expiry-target "nnml:expired")
@end lisp
+@findex nnmail-fancy-expiry-target
+@vindex nnmail-fancy-expiry-targets
+gnus \e$B$K$OJQ?t\e(B @code{nnmail-fancy-expiry-targets} \e$B$K=>$C$F%a!<%k$r4|8B@Z\e(B
+\e$B$l>C5n$9$k$?$a$N4X?t\e(B @code{nnmail-fancy-expiry-target} \e$B$,$"$j$^$9!#$3$l\e(B
+\e$B$ONc$G$9\e(B:
+
+@lisp
+ (setq nnmail-expiry-target 'nnmail-fancy-expiry-target
+ nnmail-fancy-expiry-targets
+ '((to-from "boss" "nnfolder:Work")
+ ("subject" "IMPORTANT" "nnfolder:IMPORTANT.%Y.%b")
+ ("from" ".*" "nnfolder:Archive-%Y")))
+@end lisp
+
+\e$B$3$N@_Dj$K$h$C$F!"BjL>$K\e(B @code{IMPORTANT} \e$B$r;}$A!"\e(B
+@code{YYYY} \e$BG/\e(B @code{MMM} \e$B7n$KH/?.$5$l$?$$$+$J$k%a!<%k$b!"%0%k!<\e(B
+\e$B%W\e(B @code{nnfolder:IMPORTANT.YYYY.MMM} \e$B$K4|8B@Z$l>C5n\e(B (\e$BLuCm\e(B: \e$B<B:]$NF0:n$O\e(B
+\e$BJ]B8\e(B) \e$B$5$l$^$9!#$b$7\e(B From \e$B$^$?$O\e(B To \e$B%X%C%@!<$,J8;zNs\e(B @code{boss} \e$B$r4^$s\e(B
+\e$B$G$$$?$i\e(B @code{nnfolder:Work} \e$B$K4|8B@Z$l>C5n$5$l$^$9!#$=$l0J30$N$9$Y$F$N\e(B
+\e$B%a!<%k$O\e(B @code{nnfolder:Archive-YYYY} \e$B$K4|8B@Z$l>C5n$5$l$^$9!#\e(B
+
@vindex nnmail-keep-last-article
@code{nnmail-keep-last-article} \e$B$,\e(B @code{nil} \e$B$G$J$$$H!"\e(Bgnus \e$B$O%a!<%k%K%e!<\e(B
\e$B%9%0%k!<%W$N:G8e$N5-;v$r7h$7$F4|8B@Z$l>C5n$7$^$;$s!#$3$l$O\e(B procmail \e$B$NMx\e(B
(setq nnmail-expiry-target "nnml:expired")
@end lisp
+@findex nnmail-fancy-expiry-target
+@vindex nnmail-fancy-expiry-targets
+Gnus provides a function @code{nnmail-fancy-expiry-target} which will
+expire mail to groups according to the variable
+@code{nnmail-fancy-expiry-targets}. Here's an example:
+
+@lisp
+ (setq nnmail-expiry-target 'nnmail-fancy-expiry-target
+ nnmail-fancy-expiry-targets
+ '((to-from "boss" "nnfolder:Work")
+ ("subject" "IMPORTANT" "nnfolder:IMPORTANT.%Y.%b")
+ ("from" ".*" "nnfolder:Archive-%Y")))
+@end lisp
+
+With this setup, any mail that has @code{IMPORTANT} in its Subject
+header and was sent in the year @code{YYYY} and month @code{MMM}, will
+get expired to the group @code{nnfolder:IMPORTANT.YYYY.MMM}. If its
+From or To header contains the string @code{boss}, it will get expired
+to @code{nnfolder:Work}. All other mail will get expired to
+@code{nnfolder:Archive-YYYY}.
@vindex nnmail-keep-last-article
If @code{nnmail-keep-last-article} is non-@code{nil}, Gnus will never