Big rafactoring : code split in several modules and some other best practices
This commit is contained in:
parent
e07ce016c4
commit
bea8e5aba8
16 changed files with 506 additions and 386 deletions
40
spool/load.go
Normal file
40
spool/load.go
Normal file
|
@ -0,0 +1,40 @@
|
|||
package spool
|
||||
|
||||
import (
|
||||
"bareos-zabbix-check/config"
|
||||
"bareos-zabbix-check/job"
|
||||
"encoding/csv"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// Load loads a spool file in path
|
||||
func (s *Spool) Load(c *config.Config) (err error) {
|
||||
s.config = c
|
||||
// We read the spool
|
||||
file, err := os.Open(path.Join(c.WorkDir(), spoolFile))
|
||||
if err != nil {
|
||||
return fmt.Errorf("Couldn't open spool file, starting from scratch: %s", err)
|
||||
}
|
||||
defer file.Close()
|
||||
lines, err := csv.NewReader(file).ReadAll()
|
||||
if err != nil {
|
||||
return fmt.Errorf("Corrupted spool file, starting from scratch : %s", err)
|
||||
}
|
||||
if c.Verbose() {
|
||||
log.Printf("Spool file content : %v\n", lines)
|
||||
}
|
||||
|
||||
for _, line := range lines {
|
||||
var i int
|
||||
i, err = strconv.Atoi(line[1])
|
||||
if err != nil {
|
||||
return fmt.Errorf("Corrupted spool file : couldn't parse timestamp entry")
|
||||
}
|
||||
s.jobs = append(s.jobs, job.Job{Name: line[0], Timestamp: uint64(i), Success: true})
|
||||
}
|
||||
return
|
||||
}
|
28
spool/save.go
Normal file
28
spool/save.go
Normal file
|
@ -0,0 +1,28 @@
|
|||
package spool
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
)
|
||||
|
||||
// Save writes a spool on the disk
|
||||
func (s *Spool) Save() (err error) {
|
||||
file, err := os.Create(path.Join(s.config.WorkDir(), spoolFile))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
lines := make([][]string, len(s.jobs))
|
||||
var i int = 0
|
||||
for _, job := range s.jobs {
|
||||
lines[i] = make([]string, 2)
|
||||
lines[i][0] = job.Name
|
||||
lines[i][1] = fmt.Sprintf("%d", job.Timestamp)
|
||||
i++
|
||||
}
|
||||
err = csv.NewWriter(file).WriteAll(lines)
|
||||
return
|
||||
}
|
26
spool/spool.go
Normal file
26
spool/spool.go
Normal file
|
@ -0,0 +1,26 @@
|
|||
package spool
|
||||
|
||||
import (
|
||||
"bareos-zabbix-check/config"
|
||||
"bareos-zabbix-check/job"
|
||||
)
|
||||
|
||||
const (
|
||||
spoolFile = "bareos-zabbix-check.spool"
|
||||
)
|
||||
|
||||
// Spool is an object for manipulating a bareos spool file
|
||||
type Spool struct {
|
||||
config *config.Config
|
||||
jobs []job.Job
|
||||
}
|
||||
|
||||
// Jobs exports a spool to a jobs list
|
||||
func (s *Spool) Jobs() []job.Job {
|
||||
return s.jobs
|
||||
}
|
||||
|
||||
// SetJobs sets a jobs list
|
||||
func (s *Spool) SetJobs(jobs []job.Job) {
|
||||
s.jobs = jobs
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue