diff options
author | Julien Dessaux | 2021-04-23 16:51:22 +0200 |
---|---|---|
committer | Julien Dessaux | 2021-04-23 16:51:22 +0200 |
commit | 38d9c881b3b5ece91f428c87f0b7bb9efb3e88a8 (patch) | |
tree | d3f7167fae6388f4db35a63e660d1816c9c2943e /pkg/zabbix/zabbix.go | |
parent | Fixed wrongfully hardcoded path in tests. (diff) | |
download | bareos-zabbix-check-38d9c881b3b5ece91f428c87f0b7bb9efb3e88a8.tar.gz bareos-zabbix-check-38d9c881b3b5ece91f428c87f0b7bb9efb3e88a8.tar.bz2 bareos-zabbix-check-38d9c881b3b5ece91f428c87f0b7bb9efb3e88a8.zip |
Updated for go 1.16 modules1.2
Diffstat (limited to 'pkg/zabbix/zabbix.go')
-rw-r--r-- | pkg/zabbix/zabbix.go | 105 |
1 files changed, 105 insertions, 0 deletions
diff --git a/pkg/zabbix/zabbix.go b/pkg/zabbix/zabbix.go new file mode 100644 index 0000000..77fef6d --- /dev/null +++ b/pkg/zabbix/zabbix.go @@ -0,0 +1,105 @@ +package zabbix + +import ( + "fmt" + "os" + "path/filepath" + "time" + + "git.adyxax.org/adyxax/bareos-zabbix-check/pkg/job" + "git.adyxax.org/adyxax/bareos-zabbix-check/pkg/spool" + "git.adyxax.org/adyxax/bareos-zabbix-check/pkg/state" +) + +const ( + spoolFileName = "bareos-zabbix-check.spool" +) + +var now = uint64(time.Now().Unix()) + +// Main the true main function of this program +func Main() string { + err := processFlags() + if err != nil { + return fmt.Sprintf("INFO Failed to init programm : %s", err) + } + // Open the state file + stateFile, err := os.Open(stateFileName) + if err != nil { + return fmt.Sprintf("INFO Could not open state file : %s", err) + } + defer stateFile.Close() + // parse the state file + header, err := state.ParseHeader(stateFile) + if err != nil { + return fmt.Sprintf("INFO Could not parse state file header : %s", err) + } + // seek to the job entries in the state file + offset, err := stateFile.Seek(int64(header.LastJobsAddr), 0) + if err != nil { + return fmt.Sprintf("INFO Couldn't seek to jobs position in state file : %s", err) + } + if uint64(offset) != header.LastJobsAddr { + return fmt.Sprint("INFO Truncated state file") + } + // Then parse the jobs in the state file + jobs, err := state.ParseJobs(stateFile) + if err != nil { + return fmt.Sprintf("INFO Could not parse jobs in state file : %s", err) + } + + // We will check for errors in loading the spool file only at the end. If all jobs ran successfully without errors + // in the state file and we manage to write a new spool file without errors, then we will ignore any error here to + // avoid false positives during backup bootstrap + // Open the spool file + spoolFile, spoolErr := os.Open(filepath.Join(workDir, spoolFileName)) + var spoolJobs []job.Job + if err == nil { + defer spoolFile.Close() + spoolJobs, spoolErr = spool.Parse(spoolFile) + } + + jobs = job.KeepOldestOnly(append(jobs, spoolJobs...)) + + // we write this new spool + spoolFile, err = os.Create(filepath.Join(workDir, spoolFileName)) + if err == nil { + defer spoolFile.Close() + err = spool.Serialize(spoolFile, jobs) + } + if err != nil { + return fmt.Sprintf("AVERAGE: Error saving the spool file : %s\n", err) + } + + var ( + errorString string + missingString string + ) + // We build the error strings + for i := 0; i < len(jobs); i++ { + job := jobs[i] + if job.Success { + if job.Timestamp < now-24*3600 { + if missingString == "" { + missingString = fmt.Sprintf("missing: %s", job.Name) + } else { + missingString = fmt.Sprintf("%s, %s", missingString, job.Name) + } + } + } else { + if errorString == "" { + errorString = fmt.Sprintf("errors: %s", job.Name) + } else { + errorString = fmt.Sprintf("%s, %s", errorString, job.Name) + } + } + } + // Finally we output + if errorString != "" || missingString != "" { + if spoolErr != nil { + return fmt.Sprintf("AVERAGE: %s %s %s", errorString, missingString, spoolErr) + } + return fmt.Sprintf("AVERAGE: %s %s", errorString, missingString) + } + return "OK" +} |