Changes In Branch Add_csv_export Excluding Merge-Ins
This is equivalent to a diff from a771721f7f to 290b4a4fc8
2014-12-17
| ||
12:19 | Merged csv conversion code into v1.60 branch check-in: 2c4d4ed884 user: mrwellan tags: v1.60 | |
2014-12-16
| ||
23:36 | Added csv output Closed-Leaf check-in: 290b4a4fc8 user: matt tags: Add_csv_export | |
16:55 | Adding csv export from refdb - partially coded up check-in: 7779baf96f user: mrwellan tags: Add_csv_export | |
2014-12-15
| ||
23:04 | Switch to --strip-path from --strip check-in: a771721f7f user: matt tags: v1.60 | |
22:51 | Flattened unnecessary hierarchy from paths in bup repositories check-in: 4aa76fc692 user: matt tags: v1.60 | |
Modified configf.scm from [a53f7d26e5] to [8f6bce516a].
︙ | ︙ | |||
463 464 465 466 467 468 469 | (lambda (sheet-name) (let* ((dat-path (conc refdb-path "/" sheet-name ".dat")) (ref-dat (configf:read-file dat-path #f #t)) (ref-assoc (map (lambda (key) (list key (hash-table-ref ref-dat key))) (hash-table-keys ref-dat)))) ;; (hash-table->alist ref-dat))) | | > | 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 | (lambda (sheet-name) (let* ((dat-path (conc refdb-path "/" sheet-name ".dat")) (ref-dat (configf:read-file dat-path #f #t)) (ref-assoc (map (lambda (key) (list key (hash-table-ref ref-dat key))) (hash-table-keys ref-dat)))) ;; (hash-table->alist ref-dat))) ;; (set! data (append data (list (list sheet-name ref-assoc)))))) (set! data (cons (list sheet-name ref-assoc) data)))) sheets) (list data "NO ERRORS")))))) ;; map over all pairs in a three level hierarchial alist and apply a function to the keys/val ;; (define (configf:map-all-hier-alist data proc #!key (initproc1 #f)(initproc2 #f)(initproc3 #f)) (for-each |
︙ | ︙ | |||
487 488 489 490 491 492 493 | (for-each (lambda (varname) (let* ((valtmp (assoc varname sectiondat)) (val (if valtmp (cadr valtmp) ""))) (proc sheetname sectionname varname val))) (map car sectiondat)))) (map car sheetdat)))) | | > | 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 | (for-each (lambda (varname) (let* ((valtmp (assoc varname sectiondat)) (val (if valtmp (cadr valtmp) ""))) (proc sheetname sectionname varname val))) (map car sectiondat)))) (map car sheetdat)))) (map car data)) data) ;;====================================================================== ;; C O N F I G T O / F R O M A L I S T ;;====================================================================== (define (configf:config->alist cfgdat) (hash-table->alist cfgdat)) |
︙ | ︙ |
Modified megatest.scm from [3ca80bfb3d] to [c325ba0452].
︙ | ︙ | |||
8 9 10 11 12 13 14 15 16 17 18 19 20 21 | ;; PURPOSE. ;; (include "common.scm") ;; (include "megatest-version.scm") (use sqlite3 srfi-1 posix regex regex-case srfi-69 base64 format readline apropos json http-client directory-utils rpc ;; (srfi 18) extras) http-client srfi-18) ;; zmq extras) (import (prefix sqlite3 sqlite3:)) (import (prefix base64 base64:)) (import (prefix rpc rpc:)) ;; (use zmq) | > > > > | 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 | ;; PURPOSE. ;; (include "common.scm") ;; (include "megatest-version.scm") (use sqlite3 srfi-1 posix regex regex-case srfi-69 base64 format readline apropos json http-client directory-utils rpc ;; (srfi 18) extras) http-client srfi-18) ;; zmq extras) ;; Added for csv stuff - will be removed ;; (use sparse-vectors) (import (prefix sqlite3 sqlite3:)) (import (prefix base64 base64:)) (import (prefix rpc rpc:)) ;; (use zmq) |
︙ | ︙ | |||
143 144 145 146 147 148 149 | -load file.scm : load and run file.scm -mark-incompletes : find and mark incomplete tests -ping run-id|host:port : ping server, exit with 0 if found Utilities -env2file fname : write the environment to fname.csh and fname.sh -refdb2dat refdb : convert refdb to sexp or to format specified by -dumpmode | | | 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 | -load file.scm : load and run file.scm -mark-incompletes : find and mark incomplete tests -ping run-id|host:port : ping server, exit with 0 if found Utilities -env2file fname : write the environment to fname.csh and fname.sh -refdb2dat refdb : convert refdb to sexp or to format specified by -dumpmode formats: perl, ruby, sqlite3, csv -o : output file for refdb2dat (defaults to stdout) -archive targdir : archive runs specified by selectors to targdir using bup Spreadsheet generation -extract-ods fname.ods : extract an open document spreadsheet from the database -pathmod path : insert path, i.e. path/runame/itempath/logfile.html will clear the field if no rundir/testname/itempath/logfile |
︙ | ︙ | |||
317 318 319 320 321 322 323 | ;; (begin ;; (debug:print-info 0 "Sync is taking a long time, start up a server to assist for run " run-id) ;; (server:kind-run run-id))))) (hash-table-delete! *db-local-sync* run-id))) (mutex-unlock! *db-multi-sync-mutex*)) (hash-table-keys *db-local-sync*)) (if (and debug-mode | | | | 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 | ;; (begin ;; (debug:print-info 0 "Sync is taking a long time, start up a server to assist for run " run-id) ;; (server:kind-run run-id))))) (hash-table-delete! *db-local-sync* run-id))) (mutex-unlock! *db-multi-sync-mutex*)) (hash-table-keys *db-local-sync*)) (if (and debug-mode (> (- start-time last-time) 60)) (begin (set! last-time start-time) (debug:print-info 1 "timestamp -> " (seconds->time-string (current-seconds)) ", time since start -> " (seconds->hr-min-sec (- (current-seconds) *time-zero*)))))) ;; keep going unless time to exit ;; (if (not *time-to-exit*) (let delay-loop ((count 0)) (if (and (not *time-to-exit*) (< count 11)) ;; aprox 5-6 seconds |
︙ | ︙ | |||
426 427 428 429 430 431 432 433 434 435 436 437 438 | (map (lambda (x) (string-intersperse x " => ")) (common:get-disks *configdat*)) "\n")) (set! *didsomething* #t))) (if (args:get-arg "-refdb2dat") (let* ((input-db (args:get-arg "-refdb2dat")) (out-file (args:get-arg "-o")) (out-fmt (or (args:get-arg "-dumpmode") "scheme")) (out-port (if (and out-file | > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > | | | 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 | (map (lambda (x) (string-intersperse x " => ")) (common:get-disks *configdat*)) "\n")) (set! *didsomething* #t))) (define (make-sparse-array) (let ((a (make-sparse-vector))) (sparse-vector-set! a 0 (make-sparse-vector)) a)) (define (sparse-array? a) (and (sparse-vector? a) (sparse-vector? (sparse-vector-ref a 0)))) (define (sparse-array-ref a x y) (let ((row (sparse-vector-ref a x))) (if row (sparse-vector-ref row y) #f))) (define (sparse-array-set! a x y val) (let ((row (sparse-vector-ref a x))) (if row (sparse-vector-set! row y val) (let ((new-row (make-sparse-vector))) (sparse-vector-set! a x new-row) (sparse-vector-set! new-row y val))))) ;; csv processing record (define (make-refdb:csv) (vector (make-sparse-array) (make-hash-table) (make-hash-table) 0 0)) (define-inline (refdb:csv-get-svec vec) (vector-ref vec 0)) (define-inline (refdb:csv-get-rows vec) (vector-ref vec 1)) (define-inline (refdb:csv-get-cols vec) (vector-ref vec 2)) (define-inline (refdb:csv-get-maxrow vec) (vector-ref vec 3)) (define-inline (refdb:csv-get-maxcol vec) (vector-ref vec 4)) (define-inline (refdb:csv-set-svec! vec val)(vector-set! vec 0 val)) (define-inline (refdb:csv-set-rows! vec val)(vector-set! vec 1 val)) (define-inline (refdb:csv-set-cols! vec val)(vector-set! vec 2 val)) (define-inline (refdb:csv-set-maxrow! vec val)(vector-set! vec 3 val)) (define-inline (refdb:csv-set-maxcol! vec val)(vector-set! vec 4 val)) (define (get-dat results sheetname) (or (hash-table-ref/default results sheetname #f) (let ((tmp-vec (make-refdb:csv))) (hash-table-set! results sheetname tmp-vec) tmp-vec))) (if (args:get-arg "-refdb2dat") (let* ((input-db (args:get-arg "-refdb2dat")) (out-file (args:get-arg "-o")) (out-fmt (or (args:get-arg "-dumpmode") "scheme")) (out-port (if (and out-file (not (member out-fmt '("sqlite3" "csv")))) (open-output-file out-file) (current-output-port))) (res-data (configf:read-refdb input-db)) (data (car res-data)) (msg (cadr res-data))) (if (not data) (debug:print 0 "Bad input? data=" data) ;; some error occurred (with-output-to-port out-port (lambda () (case (string->symbol out-fmt) ((scheme)(pp data)) ((perl) ;; (print "%hash = (") ;; key1 => 'value1', |
︙ | ︙ | |||
466 467 468 469 470 471 472 473 474 475 476 477 478 479 | (print "data[\"" sheetname "\"][\"" sectionname "\"][\"" varname "\"] = \"" val "\"")) initproc1: (lambda (sheetname) (print "data[\"" sheetname "\"] = {}")) initproc2: (lambda (sheetname sectionname) (print "data[\"" sheetname "\"][\"" sectionname "\"] = {}")))) ((sqlite3) (let* ((db-file (or out-file (pathname-file input-db))) (db-exists (file-exists? db-file)) (db (sqlite3:open-database db-file))) (if (not db-exists)(sqlite3:execute db "CREATE TABLE data (sheet,section,var,val);")) (configf:map-all-hier-alist data | > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > > | 518 519 520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 | (print "data[\"" sheetname "\"][\"" sectionname "\"][\"" varname "\"] = \"" val "\"")) initproc1: (lambda (sheetname) (print "data[\"" sheetname "\"] = {}")) initproc2: (lambda (sheetname sectionname) (print "data[\"" sheetname "\"][\"" sectionname "\"] = {}")))) ((csv) (let* ((results (make-hash-table)) ;; (make-sparse-array))) (row-cols (make-hash-table))) ;; hash of hashes where section => ht { row-<name> => num or col-<name> => num ;; (print "data=") ;; (pp data) (configf:map-all-hier-alist data (lambda (sheetname sectionname varname val) ;; (print "sheetname: " sheetname ", sectionname: " sectionname ", varname: " varname ", val: " val) (let* ((dat (get-dat results sheetname)) (vec (refdb:csv-get-svec dat)) (rownames (refdb:csv-get-rows dat)) (colnames (refdb:csv-get-cols dat)) (currrown (hash-table-ref/default rownames varname #f)) (currcoln (hash-table-ref/default colnames sectionname #f)) (rown (or currrown (let* ((lastn (refdb:csv-get-maxrow dat)) (newrown (+ lastn 1))) (refdb:csv-set-maxrow! dat newrown) newrown))) (coln (or currcoln (let* ((lastn (refdb:csv-get-maxcol dat)) (newcoln (+ lastn 1))) (refdb:csv-set-maxcol! dat newcoln) newcoln)))) (if (not (sparse-array-ref vec 0 coln)) ;; (eq? rown 0) (begin (sparse-array-set! vec 0 coln sectionname) ;; (print "sparse-array-ref " 0 "," coln "=" (sparse-array-ref vec 0 coln)) )) (if (not (sparse-array-ref vec rown 0)) ;; (eq? coln 0) (begin (sparse-array-set! vec rown 0 varname) ;; (print "sparse-array-ref " rown "," 0 "=" (sparse-array-ref vec rown 0)) )) (if (not currrown)(hash-table-set! rownames varname rown)) (if (not currcoln)(hash-table-set! colnames sectionname coln)) ;; (print "dat=" dat ", rown=" rown ", coln=" coln) (sparse-array-set! vec rown coln val) ;; (print "sparse-array-ref " rown "," coln "=" (sparse-array-ref vec rown coln)) ))) (for-each (lambda (sheetname) (let* ((sheetdat (get-dat results sheetname)) (svec (refdb:csv-get-svec sheetdat)) (maxrow (refdb:csv-get-maxrow sheetdat)) (maxcol (refdb:csv-get-maxcol sheetdat)) (fname (if out-file (string-substitute "%s" sheetname out-file) ;; "/foo/bar/%s.csv") (conc sheetname ".csv")))) (with-output-to-file fname (lambda () ;; (print "Sheetname: " sheetname) (let loop ((row 0) (col 0) (curr-row '()) (result '())) (let* ((val (sparse-array-ref svec row col)) (disp-val (if val (conc "\"" val "\"") ""))) (if (> col 0)(display ",")) (display disp-val) (cond ((> row maxrow)(display "\n") result) ((>= col maxcol) (display "\n") (loop (+ row 1) 0 '() (append result (list curr-row)))) (else (loop row (+ col 1) (append curr-row (list val)) result))))))))) (hash-table-keys results)))) ((sqlite3) (let* ((db-file (or out-file (pathname-file input-db))) (db-exists (file-exists? db-file)) (db (sqlite3:open-database db-file))) (if (not db-exists)(sqlite3:execute db "CREATE TABLE data (sheet,section,var,val);")) (configf:map-all-hier-alist data |
︙ | ︙ |
Modified txtdb/nada3/RunsToLock.dat from [9e39b39de9] to [5d15c8e35d].
1 2 3 4 5 6 7 8 | [def] ghi jkl qrst uvwx yz12 [mno] abc | > | 1 2 3 4 5 6 7 8 9 | [def] def def ghi jkl qrst uvwx yz12 [mno] abc |
︙ | ︙ |
Modified utils/Makefile.installall from [507fd637d5] to [63718d9d5f].
︙ | ︙ | |||
41 42 43 44 45 46 47 | IUPBRANCH=iup-3.10.1 # Eggs to install (straightforward ones) EGGS=matchable readline apropos base64 regex-literals format regex-case test coops trace csv \ dot-locking posix-utils posix-extras directory-utils hostinfo tcp-server rpc csv-xml fmt \ json md5 awful http-client spiffy uri-common intarweb spiffy-request-vars \ spiffy-directory-listing ssax sxml-serializer sxml-modifications sql-de-lite \ | | | 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 | IUPBRANCH=iup-3.10.1 # Eggs to install (straightforward ones) EGGS=matchable readline apropos base64 regex-literals format regex-case test coops trace csv \ dot-locking posix-utils posix-extras directory-utils hostinfo tcp-server rpc csv-xml fmt \ json md5 awful http-client spiffy uri-common intarweb spiffy-request-vars \ spiffy-directory-listing ssax sxml-serializer sxml-modifications sql-de-lite \ srfi-19 refdb ini-file sparse-vectors # # Derived variables # ifeq ($(PROXY),) PROX:= |
︙ | ︙ |