I am trying to see the compression ratio of a json string. Is this program correct. gzipping the json string yields 94% reduction in size?
package main
import (
"bytes"
"compress/gzip"
"encoding/json"
"fmt"
"strconv"
)
const (
_ = iota
KB ByteSize = 1 << (10 * iota)
MB
)
type ByteSize float64
func (b ByteSize) String() string {
switch {
case b >= MB:
return fmt.Sprintf("%.2fMB", b/MB)
case b >= KB:
return fmt.Sprintf("%.2fKB", b/KB)
}
return fmt.Sprintf("%.2fB", b)
}
type Type struct {
I int64
N string
B bool
M int
}
type Cont struct {
I int64
A int64
B int64
C Type
D string
E int
F string
G int64
H int
J int
}
func main() {
var (
c Cont
tp Type
)
cmap := make(map[string]Cont)
for i := 0; i < 1000; i++ {
tp = Type{
I: int64(8888888888888 + i),
N: "ASASASDFSFSFFFSFS",
B: false,
M: 25,
}
c = Cont{
I: int64(9999999999999 + i),
A: int64(8888888888888),
B: int64(9999999999999 + i),
C: tp,
D: "A",
E: 1,
F: "ASASDASASASASASA",
G: int64(8888888888888),
H: 10,
J: 1,
}
cmap[strconv.FormatInt(c.I, 10)] = c
}
b, err := json.Marshal(&cmap)
if err != nil {
panic(err)
}
fmt.Printf("Json string : %s \n", ByteSize(len(string(b))))
buf := new(bytes.Buffer)
gz := gzip.NewWriter(buf)
_, err = gz.Write(b)
if err != nil {
panic(err)
}
err = gz.Close()
if err != nil {
panic(err)
}
fmt.Printf("Buf : %s \n", ByteSize(buf.Len()))
fmt.Printf("Savings: %.2f %% \n", 100*(1-(float64(buf.Len())/float64(len(string(b))))))
}