I have been writing metrics to a NFS, then reading them from another server.
In my unit tests everything works fine on a local disk, but not when I deploy.
I was previously using JSON streams on NFS, (one object per line), but gob seems like the idiomatic solution.
Since no errors were being thrown, and I just had empty files (my files should never be empty), I decided to try using os.NewFile with some other file descriptors rather than os.Create.
Here is my code which catches the metrics and writes a new file every 5 seconds if there is content.
Basically I would like to know if there is a specific file descriptor to use for this scenario of streaming the data to the NFS.
Thanks,
func (client *Client) batchLogs() {
var received int
var file *os.File
var enc *gob.Encoder
for {
t := time.After(5 * time.Second)
for {
select {
case metric := <- client.batchQueue:
if enc == nil {
file = os.NewFile(
syscall.O_ASYNC,
fmt.Sprintf("%s/%s_%v.gobs", client.volumePath, client.clientID, time.Now().UTC().Unix()),
)
if file == nil {
err := fmt.Errorf("FAILED TO MAKE NEW LOG FILE")
fmt.Println(err)
panic(err)
}
enc = gob.NewEncoder(file)
}
received++
if err := metric.Serialise(enc); err != nil {
panic(err)
}
continue
case <- t:
if file != nil {
if err := file.Close(); err != nil {
fmt.Println(err)
}
}
enc = nil
file = nil
}
break
}
}
}