aboutsummaryrefslogtreecommitdiff
path: root/pkg/zabbix/zabbix.go
blob: e4be0031b64152015e6e5f437f9e237511b069c1 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
package zabbix

import (
	"fmt"
	"os"
	"path/filepath"
	"time"

	"git.adyxax.org/adyxax/bareos-zabbix-check/pkg/job"
	"git.adyxax.org/adyxax/bareos-zabbix-check/pkg/spool"
	"git.adyxax.org/adyxax/bareos-zabbix-check/pkg/state"
)

const (
	spoolFileName = "bareos-zabbix-check.spool"
)

var now = uint64(time.Now().Unix())

// Main the true main function of this program
func Main() string {
	err := processFlags()
	if err != nil {
		return fmt.Sprintf("INFO Failed to init program : %s", err)
	}
	// Open the state file
	stateFile, err := os.Open(stateFileName)
	if err != nil {
		return fmt.Sprintf("INFO Could not open state file : %s", err)
	}
	defer stateFile.Close()
	// parse the state file
	header, err := state.ParseHeader(stateFile)
	if err != nil {
		return fmt.Sprintf("INFO Could not parse state file header : %s", err)
	}
	// seek to the job entries in the state file
	offset, err := stateFile.Seek(int64(header.LastJobsAddr), 0)
	if err != nil {
		return fmt.Sprintf("INFO Couldn't seek to jobs position in state file : %s", err)
	}
	if uint64(offset) != header.LastJobsAddr {
		return fmt.Sprint("INFO Truncated state file")
	}
	// Then parse the jobs in the state file
	jobs, err := state.ParseJobs(stateFile)
	if err != nil {
		return fmt.Sprintf("INFO Could not parse jobs in state file : %s", err)
	}

	// We will check for errors in loading the spool file only at the end. If all jobs ran successfully without errors
	// in the state file and we manage to write a new spool file without errors, then we will ignore any error here to
	// avoid false positives during backup bootstrap
	// Open the spool file
	spoolFile, spoolErr := os.Open(filepath.Join(workDir, spoolFileName))
	var spoolJobs []job.Job
	if err == nil {
		defer spoolFile.Close()
		spoolJobs, spoolErr = spool.Parse(spoolFile)
	}

	jobs = job.KeepOldestOnly(append(jobs, spoolJobs...))

	// we write this new spool
	spoolFile, err = os.Create(filepath.Join(workDir, spoolFileName))
	if err == nil {
		defer spoolFile.Close()
		err = spool.Serialize(spoolFile, jobs)
	}
	if err != nil {
		return fmt.Sprintf("AVERAGE: Error saving the spool file : %s\n", err)
	}

	var (
		errorString   string
		missingString string
	)
	// We build the error strings
	for i := 0; i < len(jobs); i++ {
		job := jobs[i]
		if job.Success {
			if job.Timestamp < now-24*3600 {
				if missingString == "" {
					missingString = fmt.Sprintf("missing: %s", job.Name)
				} else {
					missingString = fmt.Sprintf("%s, %s", missingString, job.Name)
				}
			}
		} else {
			if errorString == "" {
				errorString = fmt.Sprintf("errors: %s", job.Name)
			} else {
				errorString = fmt.Sprintf("%s, %s", errorString, job.Name)
			}
		}
	}
	// Finally we output
	if errorString != "" || missingString != "" {
		if spoolErr != nil {
			return fmt.Sprintf("AVERAGE: %s %s %s", errorString, missingString, spoolErr)
		}
		return fmt.Sprintf("AVERAGE: %s %s", errorString, missingString)
	}
	return "OK"
}