summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAvatar Elias Pipping <pipping@exherbo.org> 2013-02-17 13:29:02 +0100
committerAvatar Elias Pipping <pipping@exherbo.org> 2013-02-17 13:29:02 +0100
commit2f2ab5998fe6e78384a452cf28bf76b3b2ddced1 (patch)
tree692e0d125aa1af1970679fba03e5aac33708777e
parent28ebda4a089856a8b7942b8657d579b3682bb2db (diff)
downloademacs-2f2ab5998fe6e78384a452cf28bf76b3b2ddced1.tar.gz
emacs-2f2ab5998fe6e78384a452cf28bf76b3b2ddced1.tar.xz
Use upstream's patch rather than mine
-rw-r--r--packages/app-emacs/ebib/ebib-1.11.0-r2.exheres-0 (renamed from packages/app-emacs/ebib/ebib-1.11.0-r1.exheres-0)0
-rw-r--r--packages/app-emacs/ebib/files/ebib-doi.patch118
2 files changed, 107 insertions, 11 deletions
diff --git a/packages/app-emacs/ebib/ebib-1.11.0-r1.exheres-0 b/packages/app-emacs/ebib/ebib-1.11.0-r2.exheres-0
index 54758a2..54758a2 100644
--- a/packages/app-emacs/ebib/ebib-1.11.0-r1.exheres-0
+++ b/packages/app-emacs/ebib/ebib-1.11.0-r2.exheres-0
diff --git a/packages/app-emacs/ebib/files/ebib-doi.patch b/packages/app-emacs/ebib/files/ebib-doi.patch
index 4a6e028..6c4e2bb 100644
--- a/packages/app-emacs/ebib/files/ebib-doi.patch
+++ b/packages/app-emacs/ebib/files/ebib-doi.patch
@@ -1,14 +1,110 @@
-Source: Elias Pipping <pipping@exherbo.org>
-Upstream: sent to the mailing list
+Source: https://github.com/joostkremers/ebib/commit/86a9173e489aebe4729cfacf0dc078c4851471a4.diff
+Upstream: yes
Reason: DOIs can contain the characters < and >.
+
+diff --git a/ebib.el b/ebib.el
+index cecbe70..f943a53 100644
--- a/ebib.el
+++ b/ebib.el
-@@ -198,7 +198,7 @@
- :group 'ebib
- :type 'symbol)
-
--(defcustom ebib-url-regexp "\\\\url{\\(.*\\)}\\|https?://[^ '<>\"\n\t\f]+"
-+(defcustom ebib-url-regexp "\\\\url{\\(.*\\)}\\|https?://[^ '\"\n\t\f]+"
- "*Regular expression to extract urls from a field."
- :group 'ebib
- :type 'string)
+@@ -3002,6 +3002,32 @@ Operates either on all entries or on the marked entries."
+ (ebib-fill-entry-buffer)
+ (ebib-fill-index-buffer)))))
+
++(defun ebib-split-urls (urls n)
++ "Split URLS and return the Nth one.
++URLs are split using EBIB-REGEXP-URL. The URL is cleaned up a bit
++before being returned, i.e., an enclosing \\url{...} or <...> is
++removed."
++ (let ((url (nth (1- n)
++ (let ((start 0)
++ (result nil))
++ (while (string-match ebib-url-regexp urls start)
++ (add-to-list 'result (match-string 0 urls) t)
++ (setq start (match-end 0)))
++ result))))
++ (if url
++ (cond
++ ;; first see if the url is contained in \url{...}
++ ((string-match "\\\\url{\\(.*?\\)}" url)
++ (setq url (match-string 1 url)))
++ ;; then check for http(s), or whatever the user customized
++ ((string-match ebib-url-regexp url)
++ (setq url (match-string 0 url)))
++ ;; this clause probably won't be reached, but just in case
++ (t (error "Not a URL: `%s'" url)))
++ ;; otherwise, we didn't find a url
++ (error "No URL found in `%s'" urls))
++ url))
++
+ (defun ebib-browse-url (num)
+ "Asks a browser to load the URL in the standard URL field.
+ The standard URL field (see user option EBIB-STANDARD-URL-FIELD)
+@@ -3014,7 +3040,7 @@ which URL to choose."
+ (let ((urls (to-raw (gethash ebib-standard-url-field
+ (ebib-retrieve-entry (ebib-cur-entry-key) ebib-cur-db)))))
+ (if urls
+- (ebib-call-browser urls num)
++ (ebib-call-browser (ebib-split-urls urls num))
+ (error "Field `%s' is empty" ebib-standard-url-field))))
+ ((default)
+ (beep))))
+@@ -3032,41 +3058,19 @@ sent to the browser."
+ (let ((doi (to-raw (gethash ebib-standard-doi-field
+ (ebib-retrieve-entry (ebib-cur-entry-key) ebib-cur-db)))))
+ (if doi
+- (ebib-call-browser (format ebib-doi-url doi) 1) ; '1' was 'num'
++ (ebib-call-browser (format ebib-doi-url doi))
+ (error "No DOI found in field `%s'" ebib-standard-doi-field))))
+ ((default)
+ (beep))))
+
+-(defun ebib-call-browser (urls n)
+- "Passes the Nth URL in URLS to a browser.
+-URLS must be a string of whitespace-separated URLs."
+- ;; first we extract all valid urls and then pick the nth one
+- (let ((url (nth (1- n)
+- (let ((start 0)
+- (result nil))
+- (while (string-match ebib-url-regexp urls start)
+- (add-to-list 'result (match-string 0 urls) t)
+- (setq start (match-end 0)))
+- result))))
+- (if url
+- (cond
+- ;; first see if the url is contained in \url{...}
+- ((string-match "\\\\url{\\(.*?\\)}" url)
+- (setq url (match-string 1 url)))
+- ;; then check for http(s), or whatever the user customized
+- ((string-match ebib-url-regexp url)
+- (setq url (match-string 0 url)))
+- ;; this clause probably won't be reached, but just in case
+- (t (error "Not a URL: `%s'" url)))
+- ;; otherwise, we didn't find a url
+- (error "No URL found in `%s'" urls))
+- (when url
+- (if (string= ebib-browser-command "")
+- (progn
+- (message "Calling BROWSE-URL on `%s'" url)
+- (browse-url url))
+- (message "Executing `%s %s'" ebib-browser-command url)
+- (start-process "Ebib-browser" nil ebib-browser-command url)))))
++(defun ebib-call-browser (url)
++ "Passes URL to a browser."
++ (if (string= ebib-browser-command "")
++ (progn
++ (message "Calling BROWSE-URL on `%s'" url)
++ (browse-url url))
++ (message "Executing `%s %s'" ebib-browser-command url)
++ (start-process "Ebib-browser" nil ebib-browser-command url)))
+
+ (defun ebib-view-file (num)
+ "Views a file in the standard file field.
+@@ -3617,7 +3621,7 @@ browser."
+ (interactive "p")
+ (let ((urls (to-raw (gethash ebib-current-field ebib-cur-entry-hash))))
+ (if urls
+- (ebib-call-browser urls num)
++ (ebib-call-browser (ebib-split-urls urls num))
+ (error "Field `%s' is empty" ebib-current-field))))
+
+ (defun ebib-view-file-in-field (num)