Index: configf.scm ================================================================== --- configf.scm +++ configf.scm @@ -465,11 +465,12 @@ (ref-dat (configf:read-file dat-path #f #t)) (ref-assoc (map (lambda (key) (list key (hash-table-ref ref-dat key))) (hash-table-keys ref-dat)))) ;; (hash-table->alist ref-dat))) - (set! data (append data (list (list sheet-name ref-assoc)))))) + ;; (set! data (append data (list (list sheet-name ref-assoc)))))) + (set! data (cons (list sheet-name ref-assoc) data)))) sheets) (list data "NO ERRORS")))))) ;; map over all pairs in a three level hierarchial alist and apply a function to the keys/val ;; @@ -489,11 +490,12 @@ (let* ((valtmp (assoc varname sectiondat)) (val (if valtmp (cadr valtmp) ""))) (proc sheetname sectionname varname val))) (map car sectiondat)))) (map car sheetdat)))) - (map car data))) + (map car data)) + data) ;;====================================================================== ;; C O N F I G T O / F R O M A L I S T ;;====================================================================== Index: megatest.scm ================================================================== --- megatest.scm +++ megatest.scm @@ -10,10 +10,14 @@ ;; (include "common.scm") ;; (include "megatest-version.scm") (use sqlite3 srfi-1 posix regex regex-case srfi-69 base64 format readline apropos json http-client directory-utils rpc ;; (srfi 18) extras) http-client srfi-18) ;; zmq extras) + +;; Added for csv stuff - will be removed +;; +(use sparse-vectors) (import (prefix sqlite3 sqlite3:)) (import (prefix base64 base64:)) (import (prefix rpc rpc:)) @@ -145,11 +149,11 @@ -ping run-id|host:port : ping server, exit with 0 if found Utilities -env2file fname : write the environment to fname.csh and fname.sh -refdb2dat refdb : convert refdb to sexp or to format specified by -dumpmode - formats: perl, ruby, sqlite3 + formats: perl, ruby, sqlite3, csv -o : output file for refdb2dat (defaults to stdout) -archive targdir : archive runs specified by selectors to targdir using bup Spreadsheet generation -extract-ods fname.ods : extract an open document spreadsheet from the database @@ -319,14 +323,14 @@ ;; (server:kind-run run-id))))) (hash-table-delete! *db-local-sync* run-id))) (mutex-unlock! *db-multi-sync-mutex*)) (hash-table-keys *db-local-sync*)) (if (and debug-mode - (> (- start-time last-time) 14)) + (> (- start-time last-time) 60)) (begin (set! last-time start-time) - (debug:print-info 0 "timestamp -> " (seconds->time-string (current-seconds)) ", time since start -> " (seconds->hr-min-sec (- (current-seconds) *time-zero*)))))) + (debug:print-info 1 "timestamp -> " (seconds->time-string (current-seconds)) ", time since start -> " (seconds->hr-min-sec (- (current-seconds) *time-zero*)))))) ;; keep going unless time to exit ;; (if (not *time-to-exit*) (let delay-loop ((count 0)) @@ -428,24 +432,72 @@ x " => ")) (common:get-disks *configdat*)) "\n")) (set! *didsomething* #t))) + +(define (make-sparse-array) + (let ((a (make-sparse-vector))) + (sparse-vector-set! a 0 (make-sparse-vector)) + a)) + +(define (sparse-array? a) + (and (sparse-vector? a) + (sparse-vector? (sparse-vector-ref a 0)))) + +(define (sparse-array-ref a x y) + (let ((row (sparse-vector-ref a x))) + (if row + (sparse-vector-ref row y) + #f))) + +(define (sparse-array-set! a x y val) + (let ((row (sparse-vector-ref a x))) + (if row + (sparse-vector-set! row y val) + (let ((new-row (make-sparse-vector))) + (sparse-vector-set! a x new-row) + (sparse-vector-set! new-row y val))))) + +;; csv processing record +(define (make-refdb:csv) + (vector + (make-sparse-array) + (make-hash-table) + (make-hash-table) + 0 + 0)) +(define-inline (refdb:csv-get-svec vec) (vector-ref vec 0)) +(define-inline (refdb:csv-get-rows vec) (vector-ref vec 1)) +(define-inline (refdb:csv-get-cols vec) (vector-ref vec 2)) +(define-inline (refdb:csv-get-maxrow vec) (vector-ref vec 3)) +(define-inline (refdb:csv-get-maxcol vec) (vector-ref vec 4)) +(define-inline (refdb:csv-set-svec! vec val)(vector-set! vec 0 val)) +(define-inline (refdb:csv-set-rows! vec val)(vector-set! vec 1 val)) +(define-inline (refdb:csv-set-cols! vec val)(vector-set! vec 2 val)) +(define-inline (refdb:csv-set-maxrow! vec val)(vector-set! vec 3 val)) +(define-inline (refdb:csv-set-maxcol! vec val)(vector-set! vec 4 val)) + +(define (get-dat results sheetname) + (or (hash-table-ref/default results sheetname #f) + (let ((tmp-vec (make-refdb:csv))) + (hash-table-set! results sheetname tmp-vec) + tmp-vec))) (if (args:get-arg "-refdb2dat") (let* ((input-db (args:get-arg "-refdb2dat")) (out-file (args:get-arg "-o")) (out-fmt (or (args:get-arg "-dumpmode") "scheme")) (out-port (if (and out-file - (not (equal? out-fmt "sqlite3"))) + (not (member out-fmt '("sqlite3" "csv")))) (open-output-file out-file) (current-output-port))) (res-data (configf:read-refdb input-db)) (data (car res-data)) (msg (cadr res-data))) (if (not data) - (debug:print 0 data) ;; some error occurred + (debug:print 0 "Bad input? data=" data) ;; some error occurred (with-output-to-port out-port (lambda () (case (string->symbol out-fmt) ((scheme)(pp data)) ((perl) @@ -468,10 +520,81 @@ (lambda (sheetname) (print "data[\"" sheetname "\"] = {}")) initproc2: (lambda (sheetname sectionname) (print "data[\"" sheetname "\"][\"" sectionname "\"] = {}")))) + ((csv) + (let* ((results (make-hash-table)) ;; (make-sparse-array))) + (row-cols (make-hash-table))) ;; hash of hashes where section => ht { row- => num or col- => num + ;; (print "data=") + ;; (pp data) + (configf:map-all-hier-alist + data + (lambda (sheetname sectionname varname val) + ;; (print "sheetname: " sheetname ", sectionname: " sectionname ", varname: " varname ", val: " val) + (let* ((dat (get-dat results sheetname)) + (vec (refdb:csv-get-svec dat)) + (rownames (refdb:csv-get-rows dat)) + (colnames (refdb:csv-get-cols dat)) + (currrown (hash-table-ref/default rownames varname #f)) + (currcoln (hash-table-ref/default colnames sectionname #f)) + (rown (or currrown + (let* ((lastn (refdb:csv-get-maxrow dat)) + (newrown (+ lastn 1))) + (refdb:csv-set-maxrow! dat newrown) + newrown))) + (coln (or currcoln + (let* ((lastn (refdb:csv-get-maxcol dat)) + (newcoln (+ lastn 1))) + (refdb:csv-set-maxcol! dat newcoln) + newcoln)))) + (if (not (sparse-array-ref vec 0 coln)) ;; (eq? rown 0) + (begin + (sparse-array-set! vec 0 coln sectionname) + ;; (print "sparse-array-ref " 0 "," coln "=" (sparse-array-ref vec 0 coln)) + )) + (if (not (sparse-array-ref vec rown 0)) ;; (eq? coln 0) + (begin + (sparse-array-set! vec rown 0 varname) + ;; (print "sparse-array-ref " rown "," 0 "=" (sparse-array-ref vec rown 0)) + )) + (if (not currrown)(hash-table-set! rownames varname rown)) + (if (not currcoln)(hash-table-set! colnames sectionname coln)) + ;; (print "dat=" dat ", rown=" rown ", coln=" coln) + (sparse-array-set! vec rown coln val) + ;; (print "sparse-array-ref " rown "," coln "=" (sparse-array-ref vec rown coln)) + ))) + (for-each + (lambda (sheetname) + (let* ((sheetdat (get-dat results sheetname)) + (svec (refdb:csv-get-svec sheetdat)) + (maxrow (refdb:csv-get-maxrow sheetdat)) + (maxcol (refdb:csv-get-maxcol sheetdat)) + (fname (if out-file + (string-substitute "%s" sheetname out-file) ;; "/foo/bar/%s.csv") + (conc sheetname ".csv")))) + (with-output-to-file fname + (lambda () + ;; (print "Sheetname: " sheetname) + (let loop ((row 0) + (col 0) + (curr-row '()) + (result '())) + (let* ((val (sparse-array-ref svec row col)) + (disp-val (if val + (conc "\"" val "\"") + ""))) + (if (> col 0)(display ",")) + (display disp-val) + (cond + ((> row maxrow)(display "\n") result) + ((>= col maxcol) + (display "\n") + (loop (+ row 1) 0 '() (append result (list curr-row)))) + (else + (loop row (+ col 1) (append curr-row (list val)) result))))))))) + (hash-table-keys results)))) ((sqlite3) (let* ((db-file (or out-file (pathname-file input-db))) (db-exists (file-exists? db-file)) (db (sqlite3:open-database db-file))) (if (not db-exists)(sqlite3:execute db "CREATE TABLE data (sheet,section,var,val);")) Index: txtdb/nada3/RunsToLock.dat ================================================================== --- txtdb/nada3/RunsToLock.dat +++ txtdb/nada3/RunsToLock.dat @@ -1,6 +1,7 @@ [def] +def def ghi jkl qrst uvwx yz12 Index: utils/Makefile.installall ================================================================== --- utils/Makefile.installall +++ utils/Makefile.installall @@ -43,11 +43,11 @@ # Eggs to install (straightforward ones) EGGS=matchable readline apropos base64 regex-literals format regex-case test coops trace csv \ dot-locking posix-utils posix-extras directory-utils hostinfo tcp-server rpc csv-xml fmt \ json md5 awful http-client spiffy uri-common intarweb spiffy-request-vars \ spiffy-directory-listing ssax sxml-serializer sxml-modifications sql-de-lite \ - srfi-19 refdb ini-file + srfi-19 refdb ini-file sparse-vectors # # Derived variables #