diff options
Diffstat (limited to '')
-rw-r--r-- | spool/load.go | 40 | ||||
-rw-r--r-- | spool/parse.go | 27 | ||||
-rw-r--r-- | spool/parse_test.go | 41 | ||||
-rw-r--r-- | spool/save.go | 28 | ||||
-rw-r--r-- | spool/serialize.go | 20 | ||||
-rw-r--r-- | spool/serialize_test.go | 33 | ||||
-rw-r--r-- | spool/spool.go | 26 |
7 files changed, 121 insertions, 94 deletions
diff --git a/spool/load.go b/spool/load.go deleted file mode 100644 index 5b08bda..0000000 --- a/spool/load.go +++ /dev/null @@ -1,40 +0,0 @@ -package spool - -import ( - "bareos-zabbix-check/config" - "bareos-zabbix-check/job" - "encoding/csv" - "fmt" - "log" - "os" - "path/filepath" - "strconv" -) - -// Load loads a spool file in path -func (s *Spool) Load(c *config.Config) (err error) { - s.config = c - // We read the spool - file, err := os.Open(filepath.Join(c.WorkDir(), spoolFile)) - if err != nil { - return fmt.Errorf("Couldn't open spool file, starting from scratch: %s", err) - } - defer file.Close() - lines, err := csv.NewReader(file).ReadAll() - if err != nil { - return fmt.Errorf("Corrupted spool file, starting from scratch : %s", err) - } - if c.Verbose() { - log.Printf("Spool file content : %v\n", lines) - } - - for _, line := range lines { - var i int - i, err = strconv.Atoi(line[1]) - if err != nil { - return fmt.Errorf("Corrupted spool file : couldn't parse timestamp entry") - } - s.jobs = append(s.jobs, job.Job{Name: line[0], Timestamp: uint64(i), Success: true}) - } - return -} diff --git a/spool/parse.go b/spool/parse.go new file mode 100644 index 0000000..5695890 --- /dev/null +++ b/spool/parse.go @@ -0,0 +1,27 @@ +package spool + +import ( + "bareos-zabbix-check/job" + "encoding/csv" + "io" + "strconv" + + "github.com/pkg/errors" +) + +// Parse parses a spool file +func Parse(handle io.Reader) (jobs []job.Job, err error) { + lines, err := csv.NewReader(handle).ReadAll() + if err != nil { + return nil, errors.Wrap(err, "Corrupted spool file") + } + for n := 0; n < len(lines); n++ { + line := lines[n] + i, err := strconv.Atoi(line[1]) + if err != nil { + return nil, errors.Wrapf(err, "Corrupted spool file : couldn't parse timestamp entry : %s", line[1]) + } + jobs = append(jobs, job.Job{Name: line[0], Timestamp: uint64(i), Success: true}) + } + return +} diff --git a/spool/parse_test.go b/spool/parse_test.go new file mode 100644 index 0000000..80b961f --- /dev/null +++ b/spool/parse_test.go @@ -0,0 +1,41 @@ +package spool + +import ( + "bareos-zabbix-check/job" + "bytes" + "io" + "reflect" + "testing" + "testing/iotest" +) + +func TestParse(t *testing.T) { + readerError := iotest.TimeoutReader(bytes.NewReader([]byte("\n"))) + readerCorruptedTimestamp := bytes.NewReader([]byte("test,x")) + readerOneJob := bytes.NewReader([]byte("test,1")) + type args struct { + handle io.Reader + } + tests := []struct { + name string + args args + wantJobs []job.Job + wantErr bool + }{ + {"empty", args{readerError}, nil, true}, + {"corrupted timestamp", args{readerCorruptedTimestamp}, nil, true}, + {"one job", args{readerOneJob}, []job.Job{{Name: "test", Timestamp: 1, Success: true}}, false}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotJobs, err := Parse(tt.args.handle) + if (err != nil) != tt.wantErr { + t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr) + return + } + if !reflect.DeepEqual(gotJobs, tt.wantJobs) { + t.Errorf("Parse() = %v, want %v", gotJobs, tt.wantJobs) + } + }) + } +} diff --git a/spool/save.go b/spool/save.go deleted file mode 100644 index f25b86a..0000000 --- a/spool/save.go +++ /dev/null @@ -1,28 +0,0 @@ -package spool - -import ( - "encoding/csv" - "fmt" - "os" - "path/filepath" -) - -// Save writes a spool on the disk -func (s *Spool) Save() (err error) { - file, err := os.Create(filepath.Join(s.config.WorkDir(), spoolFile)) - if err != nil { - return - } - defer file.Close() - - lines := make([][]string, len(s.jobs)) - var i int = 0 - for _, job := range s.jobs { - lines[i] = make([]string, 2) - lines[i][0] = job.Name - lines[i][1] = fmt.Sprintf("%d", job.Timestamp) - i++ - } - err = csv.NewWriter(file).WriteAll(lines) - return -} diff --git a/spool/serialize.go b/spool/serialize.go new file mode 100644 index 0000000..04af8da --- /dev/null +++ b/spool/serialize.go @@ -0,0 +1,20 @@ +package spool + +import ( + "bareos-zabbix-check/job" + "encoding/csv" + "fmt" + "io" +) + +// Serialize writes a spool on the disk +func Serialize(handle io.Writer, jobs []job.Job) error { + lines := make([][]string, len(jobs)) + for i := 0; i < len(jobs); i++ { + job := jobs[i] + lines[i] = make([]string, 2) + lines[i][0] = job.Name + lines[i][1] = fmt.Sprintf("%d", job.Timestamp) + } + return csv.NewWriter(handle).WriteAll(lines) +} diff --git a/spool/serialize_test.go b/spool/serialize_test.go new file mode 100644 index 0000000..896125c --- /dev/null +++ b/spool/serialize_test.go @@ -0,0 +1,33 @@ +package spool + +import ( + "bareos-zabbix-check/job" + "bytes" + "testing" +) + +func TestSerialize(t *testing.T) { + type args struct { + jobs []job.Job + } + tests := []struct { + name string + args args + wantHandle string + wantErr bool + }{ + {"One job", args{[]job.Job{{Name: "a", Timestamp: 1}}}, "a,1\n", false}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + handle := &bytes.Buffer{} + if err := Serialize(handle, tt.args.jobs); (err != nil) != tt.wantErr { + t.Errorf("Serialize() error = %v, wantErr %v", err, tt.wantErr) + return + } + if gotHandle := handle.String(); gotHandle != tt.wantHandle { + t.Errorf("Serialize() = %v, want %v", gotHandle, tt.wantHandle) + } + }) + } +} diff --git a/spool/spool.go b/spool/spool.go deleted file mode 100644 index e095979..0000000 --- a/spool/spool.go +++ /dev/null @@ -1,26 +0,0 @@ -package spool - -import ( - "bareos-zabbix-check/config" - "bareos-zabbix-check/job" -) - -const ( - spoolFile = "bareos-zabbix-check.spool" -) - -// Spool is an object for manipulating a bareos spool file -type Spool struct { - config *config.Config - jobs []job.Job -} - -// Jobs exports a spool to a jobs list -func (s *Spool) Jobs() []job.Job { - return s.jobs -} - -// SetJobs sets a jobs list -func (s *Spool) SetJobs(jobs []job.Job) { - s.jobs = jobs -} |