Added tests to the spool package, and reworked the code around that.
This commit is contained in:
parent
bcfaffac24
commit
2661ce9a2b
8 changed files with 143 additions and 102 deletions
30
main.go
30
main.go
|
@ -8,13 +8,17 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
|
"path/filepath"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
spoolFileName = "bareos-zabbix-check.spool"
|
||||||
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
var (
|
var (
|
||||||
config config.Config
|
config config.Config
|
||||||
spool spool.Spool
|
|
||||||
errorString string
|
errorString string
|
||||||
missingString string
|
missingString string
|
||||||
)
|
)
|
||||||
|
@ -58,14 +62,24 @@ func main() {
|
||||||
// We will check for errors in loading the spool file only at the end. If all jobs ran successfully without errors
|
// We will check for errors in loading the spool file only at the end. If all jobs ran successfully without errors
|
||||||
// in the state file and we manage to write a new spool file without errors, then we will ignore any error here to
|
// in the state file and we manage to write a new spool file without errors, then we will ignore any error here to
|
||||||
// avoid false positives during backup bootstrap
|
// avoid false positives during backup bootstrap
|
||||||
err = spool.Load(&config)
|
// Open the spool file
|
||||||
|
spoolFile, spoolErr := os.Open(filepath.Join(config.WorkDir(), spoolFileName))
|
||||||
|
var spoolJobs []job.Job
|
||||||
|
if err == nil {
|
||||||
|
defer spoolFile.Close()
|
||||||
|
spoolJobs, spoolErr = spool.Parse(spoolFile)
|
||||||
|
}
|
||||||
|
|
||||||
jobs = job.KeepOldestOnly(append(jobs, spool.Jobs()...))
|
jobs = job.KeepOldestOnly(append(jobs, spoolJobs...))
|
||||||
spool.SetJobs(job.KeepSuccessOnly(jobs))
|
|
||||||
|
|
||||||
// we write this new spool
|
// we write this new spool
|
||||||
if err2 := spool.Save(); err2 != nil {
|
spoolFile, err = os.Create(filepath.Join(config.WorkDir(), spoolFileName))
|
||||||
fmt.Printf("AVERAGE: Error saving the spool file : %s\n", err2)
|
if err == nil {
|
||||||
|
defer spoolFile.Close()
|
||||||
|
err = spool.Serialize(spoolFile, jobs)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("AVERAGE: Error saving the spool file : %s\n", err)
|
||||||
os.Exit(0)
|
os.Exit(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -91,8 +105,8 @@ func main() {
|
||||||
// Finally we output
|
// Finally we output
|
||||||
if errorString != "" || missingString != "" {
|
if errorString != "" || missingString != "" {
|
||||||
fmt.Printf("AVERAGE: %s %s", errorString, missingString)
|
fmt.Printf("AVERAGE: %s %s", errorString, missingString)
|
||||||
if err != nil {
|
if spoolErr != nil {
|
||||||
fmt.Printf(" additionnal errors: %s", err)
|
fmt.Printf(" additionnal errors: %s", spoolErr)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
fmt.Printf("OK")
|
fmt.Printf("OK")
|
||||||
|
|
|
@ -1,40 +0,0 @@
|
||||||
package spool
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bareos-zabbix-check/config"
|
|
||||||
"bareos-zabbix-check/job"
|
|
||||||
"encoding/csv"
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"strconv"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Load loads a spool file in path
|
|
||||||
func (s *Spool) Load(c *config.Config) (err error) {
|
|
||||||
s.config = c
|
|
||||||
// We read the spool
|
|
||||||
file, err := os.Open(filepath.Join(c.WorkDir(), spoolFile))
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("Couldn't open spool file, starting from scratch: %s", err)
|
|
||||||
}
|
|
||||||
defer file.Close()
|
|
||||||
lines, err := csv.NewReader(file).ReadAll()
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("Corrupted spool file, starting from scratch : %s", err)
|
|
||||||
}
|
|
||||||
if c.Verbose() {
|
|
||||||
log.Printf("Spool file content : %v\n", lines)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, line := range lines {
|
|
||||||
var i int
|
|
||||||
i, err = strconv.Atoi(line[1])
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("Corrupted spool file : couldn't parse timestamp entry")
|
|
||||||
}
|
|
||||||
s.jobs = append(s.jobs, job.Job{Name: line[0], Timestamp: uint64(i), Success: true})
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
27
spool/parse.go
Normal file
27
spool/parse.go
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
package spool
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bareos-zabbix-check/job"
|
||||||
|
"encoding/csv"
|
||||||
|
"io"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Parse parses a spool file
|
||||||
|
func Parse(handle io.Reader) (jobs []job.Job, err error) {
|
||||||
|
lines, err := csv.NewReader(handle).ReadAll()
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "Corrupted spool file")
|
||||||
|
}
|
||||||
|
for n := 0; n < len(lines); n++ {
|
||||||
|
line := lines[n]
|
||||||
|
i, err := strconv.Atoi(line[1])
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrapf(err, "Corrupted spool file : couldn't parse timestamp entry : %s", line[1])
|
||||||
|
}
|
||||||
|
jobs = append(jobs, job.Job{Name: line[0], Timestamp: uint64(i), Success: true})
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
41
spool/parse_test.go
Normal file
41
spool/parse_test.go
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
package spool
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bareos-zabbix-check/job"
|
||||||
|
"bytes"
|
||||||
|
"io"
|
||||||
|
"reflect"
|
||||||
|
"testing"
|
||||||
|
"testing/iotest"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestParse(t *testing.T) {
|
||||||
|
readerError := iotest.TimeoutReader(bytes.NewReader([]byte("\n")))
|
||||||
|
readerCorruptedTimestamp := bytes.NewReader([]byte("test,x"))
|
||||||
|
readerOneJob := bytes.NewReader([]byte("test,1"))
|
||||||
|
type args struct {
|
||||||
|
handle io.Reader
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
wantJobs []job.Job
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{"empty", args{readerError}, nil, true},
|
||||||
|
{"corrupted timestamp", args{readerCorruptedTimestamp}, nil, true},
|
||||||
|
{"one job", args{readerOneJob}, []job.Job{{Name: "test", Timestamp: 1, Success: true}}, false},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
gotJobs, err := Parse(tt.args.handle)
|
||||||
|
if (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !reflect.DeepEqual(gotJobs, tt.wantJobs) {
|
||||||
|
t.Errorf("Parse() = %v, want %v", gotJobs, tt.wantJobs)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,28 +0,0 @@
|
||||||
package spool
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/csv"
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Save writes a spool on the disk
|
|
||||||
func (s *Spool) Save() (err error) {
|
|
||||||
file, err := os.Create(filepath.Join(s.config.WorkDir(), spoolFile))
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer file.Close()
|
|
||||||
|
|
||||||
lines := make([][]string, len(s.jobs))
|
|
||||||
var i int = 0
|
|
||||||
for _, job := range s.jobs {
|
|
||||||
lines[i] = make([]string, 2)
|
|
||||||
lines[i][0] = job.Name
|
|
||||||
lines[i][1] = fmt.Sprintf("%d", job.Timestamp)
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
err = csv.NewWriter(file).WriteAll(lines)
|
|
||||||
return
|
|
||||||
}
|
|
20
spool/serialize.go
Normal file
20
spool/serialize.go
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
package spool
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bareos-zabbix-check/job"
|
||||||
|
"encoding/csv"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Serialize writes a spool on the disk
|
||||||
|
func Serialize(handle io.Writer, jobs []job.Job) error {
|
||||||
|
lines := make([][]string, len(jobs))
|
||||||
|
for i := 0; i < len(jobs); i++ {
|
||||||
|
job := jobs[i]
|
||||||
|
lines[i] = make([]string, 2)
|
||||||
|
lines[i][0] = job.Name
|
||||||
|
lines[i][1] = fmt.Sprintf("%d", job.Timestamp)
|
||||||
|
}
|
||||||
|
return csv.NewWriter(handle).WriteAll(lines)
|
||||||
|
}
|
33
spool/serialize_test.go
Normal file
33
spool/serialize_test.go
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
package spool
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bareos-zabbix-check/job"
|
||||||
|
"bytes"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestSerialize(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
jobs []job.Job
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
wantHandle string
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{"One job", args{[]job.Job{{Name: "a", Timestamp: 1}}}, "a,1\n", false},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
handle := &bytes.Buffer{}
|
||||||
|
if err := Serialize(handle, tt.args.jobs); (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("Serialize() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if gotHandle := handle.String(); gotHandle != tt.wantHandle {
|
||||||
|
t.Errorf("Serialize() = %v, want %v", gotHandle, tt.wantHandle)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,26 +0,0 @@
|
||||||
package spool
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bareos-zabbix-check/config"
|
|
||||||
"bareos-zabbix-check/job"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
spoolFile = "bareos-zabbix-check.spool"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Spool is an object for manipulating a bareos spool file
|
|
||||||
type Spool struct {
|
|
||||||
config *config.Config
|
|
||||||
jobs []job.Job
|
|
||||||
}
|
|
||||||
|
|
||||||
// Jobs exports a spool to a jobs list
|
|
||||||
func (s *Spool) Jobs() []job.Job {
|
|
||||||
return s.jobs
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetJobs sets a jobs list
|
|
||||||
func (s *Spool) SetJobs(jobs []job.Job) {
|
|
||||||
s.jobs = jobs
|
|
||||||
}
|
|
Loading…
Add table
Reference in a new issue