Commit addd7b27 authored by Geoff Simmons's avatar Geoff Simmons

Test concurrent reads.

And refactor the testing code to re-use results of a scraped log.
parent ab69cc6e
...@@ -31,6 +31,7 @@ package log ...@@ -31,6 +31,7 @@ package log
import ( import (
"bufio" "bufio"
"errors" "errors"
"fmt"
"os" "os"
"regexp" "regexp"
"strconv" "strconv"
...@@ -43,14 +44,94 @@ const ( ...@@ -43,14 +44,94 @@ const (
failPath = "ifAFileWithThisNameReallyExistsThenTestsFail" failPath = "ifAFileWithThisNameReallyExistsThenTestsFail"
) )
func TestCheckData(t *testing.T) { type testRec struct {
vxid int
tag string
rectype rune
payload string
}
type testTx struct {
level uint
vxid uint32
txtype string
recs []testRec
}
var (
txline = regexp.MustCompile(`^(\*+)\s+<<\s+(\w+)\s+>>\s+(\d+)`)
recline = regexp.MustCompile(`^-+\s+(\d+)\s+(\w+)\s+([b|c|-])\s+(.*)$`)
begin = regexp.MustCompile(`^\w+\s+(\d+)\s+(\S+)$`)
)
func scrapeLog(file string) ([]testTx, error) {
var txn []testTx
f, err := os.Open(file)
if err != nil {
return nil, err
}
lines := bufio.NewScanner(f)
TX:
for lines.Scan() {
flds := txline.FindStringSubmatch(lines.Text())
if flds == nil {
return nil, errors.New("Cannot parse tx line: " +
lines.Text())
}
txVxid, err := strconv.Atoi(flds[3])
if err != nil {
return nil, errors.New("Cannot parse vxid " +
flds[4] + " in tx line: " + lines.Text())
}
tx := testTx{}
tx.vxid = uint32(txVxid)
// NB: this works only for levels up to 3
tx.level = uint(len(flds[1]))
tx.txtype = flds[2]
for lines.Scan() {
// XXX: currently does not work for grouped txn
if lines.Text() == "" {
txn = append(txn, tx)
continue TX
}
flds = recline.FindStringSubmatch(lines.Text())
if flds == nil {
return nil, errors.New("Cannot parse record: " +
lines.Text())
}
rec := testRec{}
rec.vxid, err = strconv.Atoi(flds[1])
if err != nil {
return nil, errors.New("Cannot parse vxid " +
flds[1] + " in rec line: " +
lines.Text())
}
rec.tag = flds[2]
rec.rectype = rune(flds[3][0])
rec.payload = flds[4]
tx.recs = append(tx.recs, rec)
}
}
return txn, nil
}
var expVxidLog []testTx
func TestMain(m *testing.M) {
files := []string{testFile, vxidLog} files := []string{testFile, vxidLog}
for _, file := range files { for _, file := range files {
_, err := os.Stat(file) if _, err := os.Stat(file); err != nil {
if err != nil { fmt.Fprintln(os.Stderr, "Cannot stat "+file+":", err)
t.Fatal("Cannot stat " + file + ": " + err.Error()) os.Exit(1)
} }
} }
var err error
if expVxidLog, err = scrapeLog(vxidLog); err != nil {
fmt.Fprintln(os.Stderr, "Cannot parse "+vxidLog+":", err)
os.Exit(1)
}
os.Exit(m.Run())
} }
var expTxTypeStr = map[TxType]string{ var expTxTypeStr = map[TxType]string{
...@@ -142,77 +223,6 @@ func TestAttachFile(t *testing.T) { ...@@ -142,77 +223,6 @@ func TestAttachFile(t *testing.T) {
} }
} }
type testRec struct {
vxid int
tag string
rectype rune
payload string
}
type testTx struct {
level uint
vxid uint32
txtype string
recs []*testRec
}
var (
txline = regexp.MustCompile(`^(\*+)\s+<<\s+(\w+)\s+>>\s+(\d+)`)
recline = regexp.MustCompile(`^-+\s+(\d+)\s+(\w+)\s+([b|c|-])\s+(.*)$`)
begin = regexp.MustCompile(`^\w+\s+(\d+)\s+(\S+)$`)
)
func scrapeLog(file string) ([]*testTx, error) {
var txn []*testTx
f, err := os.Open(file)
if err != nil {
return nil, err
}
lines := bufio.NewScanner(f)
TX:
for lines.Scan() {
flds := txline.FindStringSubmatch(lines.Text())
if flds == nil {
return nil, errors.New("Cannot parse tx line: " +
lines.Text())
}
txVxid, err := strconv.Atoi(flds[3])
if err != nil {
return nil, errors.New("Cannot parse vxid " +
flds[4] + " in tx line: " + lines.Text())
}
tx := new(testTx)
tx.vxid = uint32(txVxid)
// NB: this works only for levels up to 3
tx.level = uint(len(flds[1]))
tx.txtype = flds[2]
for lines.Scan() {
// XXX: currently does not work for grouped txn
if lines.Text() == "" {
txn = append(txn, tx)
continue TX
}
flds = recline.FindStringSubmatch(lines.Text())
if flds == nil {
return nil, errors.New("Cannot parse record: " +
lines.Text())
}
rec := new(testRec)
rec.vxid, err = strconv.Atoi(flds[1])
if err != nil {
return nil, errors.New("Cannot parse vxid " +
flds[1] + " in rec line: " +
lines.Text())
}
rec.tag = flds[2]
rec.rectype = rune(flds[3][0])
rec.payload = flds[4]
tx.recs = append(tx.recs, rec)
}
}
return txn, nil
}
func TestRead(t *testing.T) { func TestRead(t *testing.T) {
rdHndlr := func(txGrp []Tx, rdstatus Status) bool { rdHndlr := func(txGrp []Tx, rdstatus Status) bool {
return false return false
...@@ -246,43 +256,11 @@ func TestError(t *testing.T) { ...@@ -246,43 +256,11 @@ func TestError(t *testing.T) {
_ = l.Error() _ = l.Error()
} }
func TestDefaultRead(t *testing.T) { func checkTxGroups(t *testing.T, txGrps [][]Tx, expTxn []testTx) {
var txGrps [][]Tx
expTxn, err := scrapeLog(vxidLog)
if err != nil {
t.Fatal(vxidLog + ": " + err.Error())
}
l := New()
defer l.Release()
err = l.AttachFile(testFile)
if err != nil {
t.Fatal("Cannot attach to " + testFile + ": " + err.Error())
}
statusChan := make(chan Status)
rdHndlr := func(txGrp []Tx, rdstatus Status) bool {
if rdstatus != More {
statusChan <- rdstatus
return false
}
txGrps = append(txGrps, txGrp)
return true
}
err = l.Read(rdHndlr, nil)
if err != nil {
t.Fatal("Read(): " + err.Error())
}
status := <-statusChan
if status != Status(EOF) {
t.Errorf("expected EOF status got: %s", status.Error())
}
if len(txGrps) != len(expTxn) { if len(txGrps) != len(expTxn) {
t.Fatalf("number of transaction groups expected=%v got=%v", t.Fatalf("number of transaction groups expected=%v got=%v",
len(expTxn), len(txGrps)) len(expTxn), len(txGrps))
return
} }
for i, txGrp := range txGrps { for i, txGrp := range txGrps {
if len(txGrp) != 1 { if len(txGrp) != 1 {
...@@ -325,7 +303,7 @@ func TestDefaultRead(t *testing.T) { ...@@ -325,7 +303,7 @@ func TestDefaultRead(t *testing.T) {
} }
if len(tx.Records) != len(expTx.recs) { if len(tx.Records) != len(expTx.recs) {
t.Errorf("tx number of records expected=%v got %v", t.Errorf("tx number of records expected=%v got=%v",
len(expTx.recs), len(tx.Records)) len(expTx.recs), len(tx.Records))
continue continue
} }
...@@ -356,3 +334,91 @@ func TestDefaultRead(t *testing.T) { ...@@ -356,3 +334,91 @@ func TestDefaultRead(t *testing.T) {
} }
} }
} }
func TestDefaultRead(t *testing.T) {
var txGrps [][]Tx
l := New()
defer l.Release()
err := l.AttachFile(testFile)
if err != nil {
t.Fatal("Cannot attach to " + testFile + ": " + err.Error())
}
statusChan := make(chan Status)
rdHndlr := func(txGrp []Tx, rdstatus Status) bool {
if rdstatus != More {
statusChan <- rdstatus
return false
}
txGrps = append(txGrps, txGrp)
return true
}
err = l.Read(rdHndlr, nil)
if err != nil {
t.Fatal("Read(): " + err.Error())
}
status := <-statusChan
if status != Status(EOF) {
t.Errorf("expected EOF status got: %s", status.Error())
}
checkTxGroups(t, txGrps, expVxidLog)
}
func readHndl(txGrp []Tx, status Status, txGrps *[][]Tx, ch chan Status) bool {
if status != More {
ch <- status
return false
}
*txGrps = append(*txGrps, txGrp)
return true
}
func TestConcurrentRead(t *testing.T) {
var txGrps1, txGrps2 [][]Tx
l1 := New()
defer l1.Release()
err := l1.AttachFile(testFile)
if err != nil {
t.Fatal("l1 attach to " + testFile + ": " + err.Error())
}
l2 := New()
defer l2.Release()
err = l2.AttachFile(testFile)
if err != nil {
t.Fatal("l2 attach to " + testFile + ": " + err.Error())
}
chan1 := make(chan Status)
chan2 := make(chan Status)
hndlr1 := func(txGrp []Tx, rdstatus Status) bool {
return readHndl(txGrp, rdstatus, &txGrps1, chan1)
}
hndlr2 := func(txGrp []Tx, rdstatus Status) bool {
return readHndl(txGrp, rdstatus, &txGrps2, chan2)
}
err1 := l1.Read(hndlr1, nil)
err2 := l2.Read(hndlr2, nil)
if err1 != nil {
t.Fatal("l1.Read(): " + err.Error())
}
if err2 != nil {
t.Fatal("l2.Read(): " + err.Error())
}
status := <-chan1
if status != Status(EOF) {
t.Errorf("expected EOF status got: %s", status.Error())
}
status = <-chan2
if status != Status(EOF) {
t.Errorf("expected EOF status got: %s", status.Error())
}
checkTxGroups(t, txGrps1, expVxidLog)
checkTxGroups(t, txGrps2, expVxidLog)
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment