Updated for go 1.16 modules
This commit is contained in:
parent
8278d7b471
commit
38d9c881b3
25 changed files with 25 additions and 19 deletions
27
pkg/spool/parse.go
Normal file
27
pkg/spool/parse.go
Normal file
|
@ -0,0 +1,27 @@
|
|||
package spool
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"io"
|
||||
"strconv"
|
||||
|
||||
"git.adyxax.org/adyxax/bareos-zabbix-check/pkg/job"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// Parse parses a spool file
|
||||
func Parse(handle io.Reader) (jobs []job.Job, err error) {
|
||||
lines, err := csv.NewReader(handle).ReadAll()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "Corrupted spool file")
|
||||
}
|
||||
for n := 0; n < len(lines); n++ {
|
||||
line := lines[n]
|
||||
i, err := strconv.Atoi(line[1])
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "Corrupted spool file : couldn't parse timestamp entry : %s", line[1])
|
||||
}
|
||||
jobs = append(jobs, job.Job{Name: line[0], Timestamp: uint64(i), Success: true})
|
||||
}
|
||||
return
|
||||
}
|
42
pkg/spool/parse_test.go
Normal file
42
pkg/spool/parse_test.go
Normal file
|
@ -0,0 +1,42 @@
|
|||
package spool
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"reflect"
|
||||
"testing"
|
||||
"testing/iotest"
|
||||
|
||||
"git.adyxax.org/adyxax/bareos-zabbix-check/pkg/job"
|
||||
)
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
readerError := iotest.TimeoutReader(bytes.NewReader([]byte("\n")))
|
||||
readerCorruptedTimestamp := bytes.NewReader([]byte("test,x"))
|
||||
readerOneJob := bytes.NewReader([]byte("test,1"))
|
||||
type args struct {
|
||||
handle io.Reader
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
wantJobs []job.Job
|
||||
wantErr bool
|
||||
}{
|
||||
{"empty", args{readerError}, nil, true},
|
||||
{"corrupted timestamp", args{readerCorruptedTimestamp}, nil, true},
|
||||
{"one job", args{readerOneJob}, []job.Job{{Name: "test", Timestamp: 1, Success: true}}, false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
gotJobs, err := Parse(tt.args.handle)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if !reflect.DeepEqual(gotJobs, tt.wantJobs) {
|
||||
t.Errorf("Parse() = %v, want %v", gotJobs, tt.wantJobs)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
21
pkg/spool/serialize.go
Normal file
21
pkg/spool/serialize.go
Normal file
|
@ -0,0 +1,21 @@
|
|||
package spool
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"git.adyxax.org/adyxax/bareos-zabbix-check/pkg/job"
|
||||
)
|
||||
|
||||
// Serialize writes a spool on the disk
|
||||
func Serialize(handle io.Writer, jobs []job.Job) error {
|
||||
lines := make([][]string, len(jobs))
|
||||
for i := 0; i < len(jobs); i++ {
|
||||
job := jobs[i]
|
||||
lines[i] = make([]string, 2)
|
||||
lines[i][0] = job.Name
|
||||
lines[i][1] = fmt.Sprintf("%d", job.Timestamp)
|
||||
}
|
||||
return csv.NewWriter(handle).WriteAll(lines)
|
||||
}
|
34
pkg/spool/serialize_test.go
Normal file
34
pkg/spool/serialize_test.go
Normal file
|
@ -0,0 +1,34 @@
|
|||
package spool
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"git.adyxax.org/adyxax/bareos-zabbix-check/pkg/job"
|
||||
)
|
||||
|
||||
func TestSerialize(t *testing.T) {
|
||||
type args struct {
|
||||
jobs []job.Job
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
wantHandle string
|
||||
wantErr bool
|
||||
}{
|
||||
{"One job", args{[]job.Job{{Name: "a", Timestamp: 1}}}, "a,1\n", false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
handle := &bytes.Buffer{}
|
||||
if err := Serialize(handle, tt.args.jobs); (err != nil) != tt.wantErr {
|
||||
t.Errorf("Serialize() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
if gotHandle := handle.String(); gotHandle != tt.wantHandle {
|
||||
t.Errorf("Serialize() = %v, want %v", gotHandle, tt.wantHandle)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue