aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--main.go30
-rw-r--r--spool/load.go40
-rw-r--r--spool/parse.go27
-rw-r--r--spool/parse_test.go41
-rw-r--r--spool/save.go28
-rw-r--r--spool/serialize.go20
-rw-r--r--spool/serialize_test.go33
-rw-r--r--spool/spool.go26
8 files changed, 143 insertions, 102 deletions
diff --git a/main.go b/main.go
index bc5945c..62347ab 100644
--- a/main.go
+++ b/main.go
@@ -8,13 +8,17 @@ import (
"fmt"
"log"
"os"
+ "path/filepath"
"time"
)
+const (
+ spoolFileName = "bareos-zabbix-check.spool"
+)
+
func main() {
var (
config config.Config
- spool spool.Spool
errorString string
missingString string
)
@@ -58,14 +62,24 @@ func main() {
// We will check for errors in loading the spool file only at the end. If all jobs ran successfully without errors
// in the state file and we manage to write a new spool file without errors, then we will ignore any error here to
// avoid false positives during backup bootstrap
- err = spool.Load(&config)
+ // Open the spool file
+ spoolFile, spoolErr := os.Open(filepath.Join(config.WorkDir(), spoolFileName))
+ var spoolJobs []job.Job
+ if err == nil {
+ defer spoolFile.Close()
+ spoolJobs, spoolErr = spool.Parse(spoolFile)
+ }
- jobs = job.KeepOldestOnly(append(jobs, spool.Jobs()...))
- spool.SetJobs(job.KeepSuccessOnly(jobs))
+ jobs = job.KeepOldestOnly(append(jobs, spoolJobs...))
// we write this new spool
- if err2 := spool.Save(); err2 != nil {
- fmt.Printf("AVERAGE: Error saving the spool file : %s\n", err2)
+ spoolFile, err = os.Create(filepath.Join(config.WorkDir(), spoolFileName))
+ if err == nil {
+ defer spoolFile.Close()
+ err = spool.Serialize(spoolFile, jobs)
+ }
+ if err != nil {
+ fmt.Printf("AVERAGE: Error saving the spool file : %s\n", err)
os.Exit(0)
}
@@ -91,8 +105,8 @@ func main() {
// Finally we output
if errorString != "" || missingString != "" {
fmt.Printf("AVERAGE: %s %s", errorString, missingString)
- if err != nil {
- fmt.Printf(" additionnal errors: %s", err)
+ if spoolErr != nil {
+ fmt.Printf(" additionnal errors: %s", spoolErr)
}
} else {
fmt.Printf("OK")
diff --git a/spool/load.go b/spool/load.go
deleted file mode 100644
index 5b08bda..0000000
--- a/spool/load.go
+++ /dev/null
@@ -1,40 +0,0 @@
-package spool
-
-import (
- "bareos-zabbix-check/config"
- "bareos-zabbix-check/job"
- "encoding/csv"
- "fmt"
- "log"
- "os"
- "path/filepath"
- "strconv"
-)
-
-// Load loads a spool file in path
-func (s *Spool) Load(c *config.Config) (err error) {
- s.config = c
- // We read the spool
- file, err := os.Open(filepath.Join(c.WorkDir(), spoolFile))
- if err != nil {
- return fmt.Errorf("Couldn't open spool file, starting from scratch: %s", err)
- }
- defer file.Close()
- lines, err := csv.NewReader(file).ReadAll()
- if err != nil {
- return fmt.Errorf("Corrupted spool file, starting from scratch : %s", err)
- }
- if c.Verbose() {
- log.Printf("Spool file content : %v\n", lines)
- }
-
- for _, line := range lines {
- var i int
- i, err = strconv.Atoi(line[1])
- if err != nil {
- return fmt.Errorf("Corrupted spool file : couldn't parse timestamp entry")
- }
- s.jobs = append(s.jobs, job.Job{Name: line[0], Timestamp: uint64(i), Success: true})
- }
- return
-}
diff --git a/spool/parse.go b/spool/parse.go
new file mode 100644
index 0000000..5695890
--- /dev/null
+++ b/spool/parse.go
@@ -0,0 +1,27 @@
+package spool
+
+import (
+ "bareos-zabbix-check/job"
+ "encoding/csv"
+ "io"
+ "strconv"
+
+ "github.com/pkg/errors"
+)
+
+// Parse parses a spool file
+func Parse(handle io.Reader) (jobs []job.Job, err error) {
+ lines, err := csv.NewReader(handle).ReadAll()
+ if err != nil {
+ return nil, errors.Wrap(err, "Corrupted spool file")
+ }
+ for n := 0; n < len(lines); n++ {
+ line := lines[n]
+ i, err := strconv.Atoi(line[1])
+ if err != nil {
+ return nil, errors.Wrapf(err, "Corrupted spool file : couldn't parse timestamp entry : %s", line[1])
+ }
+ jobs = append(jobs, job.Job{Name: line[0], Timestamp: uint64(i), Success: true})
+ }
+ return
+}
diff --git a/spool/parse_test.go b/spool/parse_test.go
new file mode 100644
index 0000000..80b961f
--- /dev/null
+++ b/spool/parse_test.go
@@ -0,0 +1,41 @@
+package spool
+
+import (
+ "bareos-zabbix-check/job"
+ "bytes"
+ "io"
+ "reflect"
+ "testing"
+ "testing/iotest"
+)
+
+func TestParse(t *testing.T) {
+ readerError := iotest.TimeoutReader(bytes.NewReader([]byte("\n")))
+ readerCorruptedTimestamp := bytes.NewReader([]byte("test,x"))
+ readerOneJob := bytes.NewReader([]byte("test,1"))
+ type args struct {
+ handle io.Reader
+ }
+ tests := []struct {
+ name string
+ args args
+ wantJobs []job.Job
+ wantErr bool
+ }{
+ {"empty", args{readerError}, nil, true},
+ {"corrupted timestamp", args{readerCorruptedTimestamp}, nil, true},
+ {"one job", args{readerOneJob}, []job.Job{{Name: "test", Timestamp: 1, Success: true}}, false},
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ gotJobs, err := Parse(tt.args.handle)
+ if (err != nil) != tt.wantErr {
+ t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if !reflect.DeepEqual(gotJobs, tt.wantJobs) {
+ t.Errorf("Parse() = %v, want %v", gotJobs, tt.wantJobs)
+ }
+ })
+ }
+}
diff --git a/spool/save.go b/spool/save.go
deleted file mode 100644
index f25b86a..0000000
--- a/spool/save.go
+++ /dev/null
@@ -1,28 +0,0 @@
-package spool
-
-import (
- "encoding/csv"
- "fmt"
- "os"
- "path/filepath"
-)
-
-// Save writes a spool on the disk
-func (s *Spool) Save() (err error) {
- file, err := os.Create(filepath.Join(s.config.WorkDir(), spoolFile))
- if err != nil {
- return
- }
- defer file.Close()
-
- lines := make([][]string, len(s.jobs))
- var i int = 0
- for _, job := range s.jobs {
- lines[i] = make([]string, 2)
- lines[i][0] = job.Name
- lines[i][1] = fmt.Sprintf("%d", job.Timestamp)
- i++
- }
- err = csv.NewWriter(file).WriteAll(lines)
- return
-}
diff --git a/spool/serialize.go b/spool/serialize.go
new file mode 100644
index 0000000..04af8da
--- /dev/null
+++ b/spool/serialize.go
@@ -0,0 +1,20 @@
+package spool
+
+import (
+ "bareos-zabbix-check/job"
+ "encoding/csv"
+ "fmt"
+ "io"
+)
+
+// Serialize writes a spool on the disk
+func Serialize(handle io.Writer, jobs []job.Job) error {
+ lines := make([][]string, len(jobs))
+ for i := 0; i < len(jobs); i++ {
+ job := jobs[i]
+ lines[i] = make([]string, 2)
+ lines[i][0] = job.Name
+ lines[i][1] = fmt.Sprintf("%d", job.Timestamp)
+ }
+ return csv.NewWriter(handle).WriteAll(lines)
+}
diff --git a/spool/serialize_test.go b/spool/serialize_test.go
new file mode 100644
index 0000000..896125c
--- /dev/null
+++ b/spool/serialize_test.go
@@ -0,0 +1,33 @@
+package spool
+
+import (
+ "bareos-zabbix-check/job"
+ "bytes"
+ "testing"
+)
+
+func TestSerialize(t *testing.T) {
+ type args struct {
+ jobs []job.Job
+ }
+ tests := []struct {
+ name string
+ args args
+ wantHandle string
+ wantErr bool
+ }{
+ {"One job", args{[]job.Job{{Name: "a", Timestamp: 1}}}, "a,1\n", false},
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ handle := &bytes.Buffer{}
+ if err := Serialize(handle, tt.args.jobs); (err != nil) != tt.wantErr {
+ t.Errorf("Serialize() error = %v, wantErr %v", err, tt.wantErr)
+ return
+ }
+ if gotHandle := handle.String(); gotHandle != tt.wantHandle {
+ t.Errorf("Serialize() = %v, want %v", gotHandle, tt.wantHandle)
+ }
+ })
+ }
+}
diff --git a/spool/spool.go b/spool/spool.go
deleted file mode 100644
index e095979..0000000
--- a/spool/spool.go
+++ /dev/null
@@ -1,26 +0,0 @@
-package spool
-
-import (
- "bareos-zabbix-check/config"
- "bareos-zabbix-check/job"
-)
-
-const (
- spoolFile = "bareos-zabbix-check.spool"
-)
-
-// Spool is an object for manipulating a bareos spool file
-type Spool struct {
- config *config.Config
- jobs []job.Job
-}
-
-// Jobs exports a spool to a jobs list
-func (s *Spool) Jobs() []job.Job {
- return s.jobs
-}
-
-// SetJobs sets a jobs list
-func (s *Spool) SetJobs(jobs []job.Job) {
- s.jobs = jobs
-}