Calcuate savefile paths only once; avoid excessive rehashing when reading track databases

master
y5nw 2021-08-25 13:32:40 +02:00
parent 222df8116d
commit 88d6d9e971
2 changed files with 17 additions and 10 deletions

View File

@ -14,8 +14,11 @@
;; Note: do NOT change *server-port* and *server* at debug time
(defparameter *server-port* nil)
(defparameter *server* nil)
;; Note: please call (ywatds::update-savefile-paths) if you change the world path at runtime
(defparameter *ildb-path* nil)
(defparameter *nodedb-path* nil)
#+sbcl(declaim (sb-ext:always-bound *ildb* *trackdb*
#+sbcl(declaim (sb-ext:always-bound *ildb* *trackdb* *ildb-path* *nodedb-path*
*debugp* *gcp* *world-path* *server-port* *server*))
(defmacro ensure-world-path (path)
@ -27,9 +30,13 @@
(concatenate 'string "advtrains_" name)
`(concatenate 'string "advtrains_" ,name))))
(defun update-savefile-paths ()
(psetf *ildb-path* (savefilepath "interlocking.ls")
*nodedb-path* (savefilepath "ndb4.ls")))
(defun load-data ()
(let* ((ildb (atil:load-ildb (savefilepath "interlocking.ls")))
(tdb (tracks:load-trackdb (savefilepath "ndb4.ls"))))
(let* ((ildb (atil:load-ildb *ildb-path*))
(tdb (tracks:load-trackdb *nodedb-path*)))
(psetf *ildb* ildb *trackdb* tdb)
(when *gcp*
#+sbcl(sb-ext:gc :full t))
@ -158,6 +165,7 @@
(defun start-server ()
(tracks:init-tracks)
(update-savefile-paths)
(if *debugp* (break))
(load-data)
(hunchentoot:start *server*)

View File

@ -11,9 +11,6 @@
(connects (make-empty-connects) :type (vector list 16))
(special nil :type list))
(defmacro make-track-database ()
`(make-hash-table :test #'equalp))
(defmacro get-track (trackdb pos)
`(gethash ,pos ,trackdb))
@ -70,8 +67,10 @@
;;; written based on https://gigamonkeys.com/book/practical-parsing-binary-files.html
;;; note that this implementation only reads nodes that are known to be tracks
(defun read-tracks-from-nodedb (fn)
(let ((tmpdb (make-hash-table :test #'equalp))
(nodes (make-hash-table :test #'eql)))
;; The 1000000 is chosen from a large dataset with ~800k tracks. The idea is to avoid
;; excessive rehashing for large databases.
(let ((tmpdb (make-hash-table :test #'equalp :size 1000000))
(nodes (make-hash-table :test #'eql :size (hash-table-count *track->conns*))))
(with-open-file (stream fn :element-type '(unsigned-byte 8))
(labels ((read-u8 () (read-byte stream nil nil))
(read-u16 () (let ((msb (read-u8)) (lsb (read-u8)))
@ -98,8 +97,8 @@
tmpdb))))
(defun load-trackdb (fn)
(let ((tdb (make-track-database))
(tmpdb (read-tracks-from-nodedb fn)))
(let* ((tmpdb (read-tracks-from-nodedb fn))
(tdb (make-hash-table :test #'equalp :size (hash-table-count tmpdb))))
(loop for pos being the hash-keys of tmpdb using (hash-value clist)
for dirs = (conns-list-dirs clist)
for connects = (make-empty-connects) do