1
0
Fork 0

Updated for go 1.16 modules

This commit is contained in:
Julien Dessaux 2021-04-23 16:51:22 +02:00
parent 8278d7b471
commit 38d9c881b3
25 changed files with 25 additions and 19 deletions

57
pkg/state/header.go Normal file
View file

@ -0,0 +1,57 @@
package state
import (
"bytes"
"encoding/binary"
"fmt"
"io"
"git.adyxax.org/adyxax/bareos-zabbix-check/pkg/utils"
"github.com/pkg/errors"
)
// c.StateFile()HeaderLength : the length of the state file header struct
const headerLength = 14 + 2 + 4 + 4 + 8 + 8 // + 19*8
// Header is a structure to hold the header of the state file. It is statically aligned for amd64 architecture
// This comes from bareos repository file core/src/lib/bsys.cc:525 and core/src/lib/bsys.cc:652
type Header struct {
ID [14]byte
_ int16
Version int32
_ int32
LastJobsAddr uint64
EndOfRecentJobResultsList uint64
//Reserved [19]uint64
}
func (sfh *Header) String() string {
return fmt.Sprintf("ID: \"%s\", Version: %d, LastJobsAddr: %d, EndOfRecentJobResultsList: %d",
string(sfh.ID[:utils.Clen(sfh.ID[:])]), sfh.Version, sfh.LastJobsAddr, sfh.EndOfRecentJobResultsList)
}
// ParseHeader parses a Header struct
func ParseHeader(handle io.Reader) (h *Header, err error) {
// Parsing the state file header
data := make([]byte, headerLength)
n, err := handle.Read(data)
if err != nil {
return nil, errors.Wrap(err, "Corrupted state file")
}
if n != headerLength {
return nil, fmt.Errorf("Corrupted state file : invalid header length")
}
buffer := bytes.NewBuffer(data)
h = &Header{}
_ = binary.Read(buffer, binary.LittleEndian, h) // this call cannot fail since we checked the header length
if id := string(h.ID[:utils.Clen(h.ID[:])]); id != "Bareos State\n" && id != "Bacula State\n" {
return nil, fmt.Errorf("Corrupted state file : Not a bareos or bacula state file : %s", id)
}
if h.Version != 4 {
return nil, fmt.Errorf("Invalid state file : This script only supports bareos state file version 4, got %d", h.Version)
}
if h.LastJobsAddr == 0 {
return nil, fmt.Errorf("No jobs exist in the state file")
}
return
}

109
pkg/state/header_test.go Normal file
View file

@ -0,0 +1,109 @@
package state
import (
"bytes"
"io"
"reflect"
"testing"
)
func Test_header_String(t *testing.T) {
var id [14]byte
copy(id[:], []byte("test"))
type fields struct {
ID [14]byte
Version int32
LastJobsAddr uint64
EndOfRecentJobResultsList uint64
}
tests := []struct {
name string
fields fields
want string
}{
{"default header", fields{ID: id, Version: 1, LastJobsAddr: 2, EndOfRecentJobResultsList: 3}, "ID: \"test\", Version: 1, LastJobsAddr: 2, EndOfRecentJobResultsList: 3"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
sfh := Header{
ID: tt.fields.ID,
Version: tt.fields.Version,
LastJobsAddr: tt.fields.LastJobsAddr,
EndOfRecentJobResultsList: tt.fields.EndOfRecentJobResultsList,
}
if got := sfh.String(); got != tt.want {
t.Errorf("header.String() = %v, want %v", got, tt.want)
}
})
}
}
func Test_parseHeader(t *testing.T) {
readerEmpty := bytes.NewReader([]byte(""))
readerTooSmall := bytes.NewReader([]byte("abcd"))
readerNotBareosNorBacula := bytes.NewReader([]byte{
't', 'e', 's', 't', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // ID
0, 0, // padding
4, 0, 0, 0, // version
0, 0, 0, 0, //padding
0, 0, 0, 0, 0, 0, 0, 0, // last job address
0, 0, 0, 0, 0, 0, 0, 0, // EndOfRecentJobResultsLis
})
readerBadVersion := bytes.NewReader([]byte{
'B', 'a', 'r', 'e', 'o', 's', ' ', 'S', 't', 'a', 't', 'e', '\n', 0, // ID
0, 0, // padding
3, 0, 0, 0, // version
0, 0, 0, 0, //padding
0, 0, 0, 0, 0, 0, 0, 0, // last job address
0, 0, 0, 0, 0, 0, 0, 0, // EndOfRecentJobResultsLis
})
readerNoJobs := bytes.NewReader([]byte{
'B', 'a', 'r', 'e', 'o', 's', ' ', 'S', 't', 'a', 't', 'e', '\n', 0, // ID
0, 0, // padding
4, 0, 0, 0, // version
0, 0, 0, 0, //padding
0, 0, 0, 0, 0, 0, 0, 0, // last job address
0, 0, 0, 0, 0, 0, 0, 0, // EndOfRecentJobResultsList
})
readerValid := bytes.NewReader([]byte{
'B', 'a', 'r', 'e', 'o', 's', ' ', 'S', 't', 'a', 't', 'e', '\n', 0, // ID
0, 0, // padding
4, 0, 0, 0, // version
0, 0, 0, 0, //padding
192, 0, 0, 0, 0, 0, 0, 0, // last job address
254, 0, 0, 0, 0, 0, 0, 0, // EndOfRecentJobResultsList
})
type args struct {
handle io.Reader
}
tests := []struct {
name string
args args
wantH *Header
wantErr bool
}{
{"read error", args{readerEmpty}, nil, true},
{"invalid header length", args{readerTooSmall}, nil, true},
{"reader not bareos nor bacula", args{readerNotBareosNorBacula}, nil, true},
{"reader bad version", args{readerBadVersion}, nil, true},
{"reader no jobs", args{readerNoJobs}, nil, true},
{"reader valid", args{readerValid}, &Header{
ID: [14]byte{'B', 'a', 'r', 'e', 'o', 's', ' ', 'S', 't', 'a', 't', 'e', '\n', 0},
Version: 4,
LastJobsAddr: 192,
EndOfRecentJobResultsList: 254,
}, false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotH, err := ParseHeader(tt.args.handle)
if (err != nil) != tt.wantErr {
t.Errorf("parseHeader() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(gotH, tt.wantH) {
t.Errorf("parseHeader() = %v, want %v", gotH, tt.wantH)
}
})
}
}

96
pkg/state/job.go Normal file
View file

@ -0,0 +1,96 @@
package state
import (
"bytes"
"encoding/binary"
"fmt"
"io"
"regexp"
"time"
"git.adyxax.org/adyxax/bareos-zabbix-check/pkg/job"
"git.adyxax.org/adyxax/bareos-zabbix-check/pkg/utils"
"github.com/pkg/errors"
)
// maxnameLength : the maximum length of a job name, hardcoded in bareos
const maxNameLength = 128
// jobLength : the length of the job result struct
const jobLength = 16 + 4 + 4 + 4 + 4 + 4 + 4 + 4 + 4 + 8 + 8 + 8 + maxNameLength
var jobNameRegex = regexp.MustCompilePOSIX(`^([-A-Za-z0-9_]+)\.[0-9]{4}-[0-9]{2}-[0-9]{2}.*`)
// jobEntry : A structure to hold a job result from the state file
// This comes from bareos repository file core/src/lib/recent_job_results_list.h:29 and file core/src/lib/recent_job_results_list.cc:44
type jobEntry struct {
_ [16]byte
Errors int32
JobType int32
JobStatus int32
JobLevel int32
JobID uint32
VolSessionID uint32
VolSessionTime uint32
JobFiles uint32
JobBytes uint64
StartTime uint64
EndTime uint64
Job [maxNameLength]byte
}
func (je jobEntry) String() string {
var matches = jobNameRegex.FindSubmatchIndex(je.Job[:])
jobNameLen := utils.Clen(je.Job[:])
if len(matches) >= 4 {
jobNameLen = matches[3]
}
return fmt.Sprintf("Errors: %d, JobType: %c, JobStatus: %c, JobLevel: %c, JobID: %d, VolSessionID: %d, VolSessionTime: %d, JobFiles: %d, JobBytes: %d, StartTime: %s, EndTime: %s, Job: %s",
je.Errors, je.JobType, je.JobStatus, je.JobLevel, je.JobID, je.VolSessionID, je.VolSessionTime, je.JobFiles, je.JobBytes, time.Unix(int64(je.StartTime), 0), time.Unix(int64(je.EndTime), 0), je.Job[:jobNameLen])
}
// ParseJobs parses the jobs in a state file
func ParseJobs(handle io.Reader) (jobs []job.Job, err error) {
// We read how many jobs there are in the state file
data := make([]byte, 4)
n, err := handle.Read(data)
if err != nil {
return nil, errors.Wrap(err, "Corrupted state file")
}
if n != 4 {
return nil, fmt.Errorf("Corrupted state file : invalid numberOfJobs read length")
}
buffer := bytes.NewBuffer(data)
var numberOfJobs uint32
_ = binary.Read(buffer, binary.LittleEndian, &numberOfJobs) // this call cannot fail since we checked the header length
// We parse the job entries
for ; numberOfJobs > 0; numberOfJobs-- {
var (
jobResult jobEntry
jobName string
)
data := make([]byte, jobLength)
n, err = handle.Read(data)
if err != nil {
return nil, errors.Wrap(err, "Corrupted state file")
}
if n != jobLength {
return nil, fmt.Errorf("Corrupted state file : invalid job entry")
}
buffer = bytes.NewBuffer(data)
_ = binary.Read(buffer, binary.LittleEndian, &jobResult) // this call cannot fail since we checked the header length
matches := jobNameRegex.FindSubmatchIndex(jobResult.Job[:])
if len(matches) >= 4 {
jobName = string(jobResult.Job[:matches[3]])
} else {
return nil, fmt.Errorf("Couldn't parse job name, this shouldn't happen : %s", jobResult.Job[:])
}
// If the job is of type backup (B == ascii 66)
if jobResult.JobType == 66 {
// If the job is of status success JobStatus is equals to 84 (T == ascii 84)
jobs = append(jobs, job.Job{Name: jobName, Timestamp: jobResult.StartTime, Success: jobResult.JobStatus == 84})
}
}
return
}

175
pkg/state/job_test.go Normal file
View file

@ -0,0 +1,175 @@
package state
import (
"bytes"
"io"
"reflect"
"testing"
"git.adyxax.org/adyxax/bareos-zabbix-check/pkg/job"
)
func Test_jobEntry_String(t *testing.T) {
var badlyNamedJob [128]byte
copy(badlyNamedJob[:], []byte("job_name"))
var normalJob [128]byte
copy(normalJob[:], []byte("normal_name.2012-06-01"))
type fields struct {
Errors int32
JobType int32
JobStatus int32
JobLevel int32
JobID uint32
VolSessionID uint32
VolSessionTime uint32
JobFiles uint32
JobBytes uint64
StartTime uint64
EndTime uint64
Job [maxNameLength]byte
}
tests := []struct {
name string
fields fields
want string
}{
{
"normal job",
fields{Errors: 1, JobType: 'B', JobStatus: 'T', JobLevel: 'F', JobID: 2, VolSessionID: 3, VolSessionTime: 4, JobFiles: 5, JobBytes: 6, Job: badlyNamedJob},
"Errors: 1, JobType: B, JobStatus: T, JobLevel: F, JobID: 2, VolSessionID: 3, VolSessionTime: 4, JobFiles: 5, JobBytes: 6, StartTime: 1970-01-01 01:00:00 +0100 CET, EndTime: 1970-01-01 01:00:00 +0100 CET, Job: job_name",
},
{
"badly named job",
fields{Errors: 1, JobType: 'B', JobStatus: 'T', JobLevel: 'F', JobID: 2, VolSessionID: 3, VolSessionTime: 4, JobFiles: 5, JobBytes: 6, Job: normalJob},
"Errors: 1, JobType: B, JobStatus: T, JobLevel: F, JobID: 2, VolSessionID: 3, VolSessionTime: 4, JobFiles: 5, JobBytes: 6, StartTime: 1970-01-01 01:00:00 +0100 CET, EndTime: 1970-01-01 01:00:00 +0100 CET, Job: normal_name",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
je := jobEntry{
Errors: tt.fields.Errors,
JobType: tt.fields.JobType,
JobStatus: tt.fields.JobStatus,
JobLevel: tt.fields.JobLevel,
JobID: tt.fields.JobID,
VolSessionID: tt.fields.VolSessionID,
VolSessionTime: tt.fields.VolSessionTime,
JobFiles: tt.fields.JobFiles,
JobBytes: tt.fields.JobBytes,
StartTime: tt.fields.StartTime,
EndTime: tt.fields.EndTime,
Job: tt.fields.Job,
}
if got := je.String(); got != tt.want {
t.Errorf("jobEntry.String() = %v, want %v", got, tt.want)
}
})
}
}
func TestParseJobs(t *testing.T) {
readerEmpty := bytes.NewReader([]byte{})
readerTooSmall := bytes.NewReader([]byte{
1, // number of jobs
})
readerJobError := bytes.NewReader([]byte{
1, 0, 0, 0, // number of jobs
})
readerJobTooSmall := bytes.NewReader([]byte{
1, 0, 0, 0, // number of jobs
0,
})
readerInvalidJobName := bytes.NewReader([]byte{
1, 0, 0, 0, // number of jobs
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // pad
0, 0, 0, 0, // Errors
'B', 0, 0, 0, // JobType
'T', 0, 0, 0, // JobStatus
0, 0, 0, 0, // JobLevel
0, 0, 0, 0, // JobID
0, 0, 0, 0, // VolSessionID
0, 0, 0, 0, // VolSessionTime
0, 0, 0, 0, // JobFiles
0, 0, 0, 0, 0, 0, 0, 0, // JobBytes
1, 0, 0, 0, 0, 0, 0, 0, // StartTime
0, 0, 0, 0, 0, 0, 0, 0, // EndTime
't', 'e', 's', 't', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // Job
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
})
readerZeroJobs := bytes.NewReader([]byte{
0, 0, 0, 0, // number of jobs
})
readerOneNonBackupJob := bytes.NewReader([]byte{
1, 0, 0, 0, // number of jobs
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // pad
0, 0, 0, 0, // Errors
'R', 0, 0, 0, // JobType
'T', 0, 0, 0, // JobStatus
0, 0, 0, 0, // JobLevel
0, 0, 0, 0, // JobID
0, 0, 0, 0, // VolSessionID
0, 0, 0, 0, // VolSessionTime
0, 0, 0, 0, // JobFiles
0, 0, 0, 0, 0, 0, 0, 0, // JobBytes
1, 0, 0, 0, 0, 0, 0, 0, // StartTime
0, 0, 0, 0, 0, 0, 0, 0, // EndTime
't', 'e', 's', 't', '.', '2', '0', '1', '2', '-', '0', '2', '-', '0', '1', 0, // Job
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
})
readerOneSuccessfulBackupJob := bytes.NewReader([]byte{
1, 0, 0, 0, // number of jobs
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // pad
0, 0, 0, 0, // Errors
'B', 0, 0, 0, // JobType
'T', 0, 0, 0, // JobStatus
0, 0, 0, 0, // JobLevel
0, 0, 0, 0, // JobID
0, 0, 0, 0, // VolSessionID
0, 0, 0, 0, // VolSessionTime
0, 0, 0, 0, // JobFiles
0, 0, 0, 0, 0, 0, 0, 0, // JobBytes
1, 0, 0, 0, 0, 0, 0, 0, // StartTime
0, 0, 0, 0, 0, 0, 0, 0, // EndTime
't', 'e', 's', 't', '.', '2', '0', '1', '2', '-', '0', '2', '-', '0', '1', 0, // Job
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
})
type args struct {
handle io.Reader
}
tests := []struct {
name string
args args
wantJobs []job.Job
wantErr bool
}{
{"read empty", args{readerEmpty}, nil, true},
{"read too small", args{readerTooSmall}, nil, true},
{"read job error", args{readerJobError}, nil, true},
{"read job too small", args{readerJobTooSmall}, nil, true},
{"read invalid job name", args{readerInvalidJobName}, nil, true},
{"read zero jobs", args{readerZeroJobs}, nil, false},
{"read one non backup job", args{readerOneNonBackupJob}, nil, false},
{"read one successful backup job", args{readerOneSuccessfulBackupJob}, []job.Job{{Name: "test", Timestamp: 1, Success: true}}, false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotJobs, err := ParseJobs(tt.args.handle)
if (err != nil) != tt.wantErr {
t.Errorf("ParseJobs() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(gotJobs, tt.wantJobs) {
t.Errorf("ParseJobs() = %v, want %v", gotJobs, tt.wantJobs)
}
})
}
}