aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHugo Hörnquist <hugo@lysator.liu.se>2020-08-10 13:31:56 +0200
committerHugo Hörnquist <hugo@lysator.liu.se>2020-08-10 13:31:56 +0200
commit70e87c195dd3600f91083ca0c6df0b3a043c3009 (patch)
tree65244d428f55c51c4cdc426c7536ba151cf32116
parentGot new timeslice limiter to work, document. (diff)
downloadcalp-70e87c195dd3600f91083ca0c6df0b3a043c3009.tar.gz
calp-70e87c195dd3600f91083ca0c6df0b3a043c3009.tar.xz
Add TODO:s to server.
-rw-r--r--module/entry-points/server.scm10
1 files changed, 10 insertions, 0 deletions
diff --git a/module/entry-points/server.scm b/module/entry-points/server.scm
index f0ebc3e0..5b819176 100644
--- a/module/entry-points/server.scm
+++ b/module/entry-points/server.scm
@@ -62,6 +62,10 @@
(cdr (scandir dir))))))
(define get-query-page
+ ;; A user of the website is able to fill up all of the hosts memory by
+ ;; requesting a bunch of different search pages, and forcing a bunch
+ ;; of pages on each. Clean up this table from time to time, possibly
+ ;; by popularity-rank.
(let ((query-pages (make-hash-table)))
(lambda (search-term)
(aif (hash-ref query-pages search-term)
@@ -287,6 +291,7 @@
(return (build-response code: 404)
(format #f "No component with UID=~a found." uid))))
+ ;; TODO search without query should work
(GET "/search" (q p)
(define search-term (prepare-string q))
@@ -302,6 +307,11 @@
;; TODO Propagate errors
(define search-result
(catch 'max-page
+ ;; TODO Get-page only puts a time limiter per page, meaning that
+ ;; if a user requests page 1000 the server is stuck trying to
+ ;; find that page, which can take up to 1000 * timeslice = 500s = 8min+
+ ;; A timeout here, and also an actual multithreaded server should
+ ;; solve this.
(lambda () (get-page paginator page))
(lambda (err page-number)
(define location