aboutsummaryrefslogtreecommitdiff
path: root/module/calp/server/routes.scm
diff options
context:
space:
mode:
Diffstat (limited to 'module/calp/server/routes.scm')
-rw-r--r--module/calp/server/routes.scm55
1 files changed, 39 insertions, 16 deletions
diff --git a/module/calp/server/routes.scm b/module/calp/server/routes.scm
index f647b998..389941c7 100644
--- a/module/calp/server/routes.scm
+++ b/module/calp/server/routes.scm
@@ -11,6 +11,7 @@
:use-module ((web response) :select (build-response))
:use-module ((web uri) :select (build-relative-ref))
+ :use-module ((web uri-query) :select (encode-query-parameters))
:use-module (sxml simple)
:use-module (sxml xpath)
@@ -345,6 +346,21 @@
(return (build-response code: 404)
(format #f "No component with UID=~a found." uid))))
+ (GET "/search/text" (q)
+ (return (build-response
+ code: 302
+ headers:
+ `((location
+ . ,(build-relative-ref
+ path: "/search/"
+ query:
+ (encode-query-parameters
+ `((q . (regexp-exec
+ (make-regexp ,(->quoted-string q)
+ regexp/icase)
+ (prop event 'SUMMARY)))))
+ ))))))
+
(GET "/search" (q p)
(define search-term (and=> q prepare-string))
@@ -359,29 +375,36 @@
(define page (string->number (or p "0")))
- ;; TODO Propagate errors
+ (define error #f)
+
(define search-result
- (catch 'max-page
- ;; TODO Get-page only puts a time limiter per page, meaning that
- ;; if a user requests page 1000 the server is stuck trying to
- ;; find that page, which can take up to 1000 * timeslice = 500s = 8min+
- ;; A timeout here, and also an actual multithreaded server should
- ;; solve this.
- (lambda () (get-page paginator page))
- (lambda (err page-number)
- (define location
- (build-relative-ref
- path: r:path ; host: r:host port: r:port
- query: (format #f "~a&p=~a" q= page-number)))
- (return (build-response
- code: 307
- headers: `((location . ,location)))))))
+ (catch #t
+ (lambda ()
+ (catch 'max-page
+ ;; TODO Get-page only puts a time limiter per page, meaning that
+ ;; if a user requests page 1000 the server is stuck trying to
+ ;; find that page, which can take up to 1000 * timeslice = 500s = 8min+
+ ;; A timeout here, and also an actual multithreaded server should
+ ;; solve this.
+ (lambda () (get-page paginator page))
+ (lambda (err page-number)
+ (define location
+ (build-relative-ref
+ path: r:path ; host: r:host port: r:port
+ query: (format #f "~a&p=~a" q= page-number)))
+ (return (build-response
+ code: 307
+ headers: `((location . ,location)))))))
+ (lambda (err callee fmt arg data)
+ (set! error
+ (format #f "~?~%" fmt arg)))))
(return '((content-type application/xhtml+xml))
(with-output-to-string
(lambda ()
(sxml->xml
(search-result-page
+ error
q search-term search-result page paginator q=))))))
;; NOTE this only handles files with extensions. Limited, but since this