-
Notifications
You must be signed in to change notification settings - Fork 3.5k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Uses custom json-iter decoder for log entries.
Previously we were using json.Unmarshal for each line. However json-iter uses a Pool for each calls and I believe this can cause to increase memory usage. For each line we would put in a pool the iterator to re-use it, once put in a pool, the last data is retained, since we handle millions of lines, this can cause problem, using a custom extensions, keep using a pool but at the root object only, not for each line. On top of that we're going to process that json payload 50% faster. ``` ❯ benchcmp before.txt after.txt2 benchmark old ns/op new ns/op delta Benchmark_DecodePushRequest-16 13509236 6677037 -50.57% benchmark old allocs new allocs delta Benchmark_DecodePushRequest-16 106149 38719 -63.52% benchmark old bytes new bytes delta Benchmark_DecodePushRequest-16 10350362 5222989 -49.54% ``` Signed-off-by: Cyril Tovena <cyril.tovena@gmail.com>
- Loading branch information
1 parent
c9b85b3
commit 7385735
Showing
9 changed files
with
163 additions
and
65 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,108 @@ | ||
package loghttp | ||
|
||
import ( | ||
"strconv" | ||
"time" | ||
"unsafe" | ||
|
||
jsoniter "github.com/json-iterator/go" | ||
"github.com/modern-go/reflect2" | ||
) | ||
|
||
func init() { | ||
jsoniter.RegisterExtension(&jsonExtension{}) | ||
} | ||
|
||
// Entry represents a log entry. It includes a log message and the time it occurred at. | ||
type Entry struct { | ||
Timestamp time.Time | ||
Line string | ||
} | ||
|
||
type jsonExtension struct { | ||
jsoniter.DummyExtension | ||
} | ||
|
||
type sliceEntryDecoder struct { | ||
} | ||
|
||
func (sliceEntryDecoder) Decode(ptr unsafe.Pointer, iter *jsoniter.Iterator) { | ||
iter.ReadArrayCB(func(iter *jsoniter.Iterator) bool { | ||
i := 0 | ||
var ts time.Time | ||
var line string | ||
ok := iter.ReadArrayCB(func(iter *jsoniter.Iterator) bool { | ||
var ok bool | ||
switch i { | ||
case 0: | ||
ts, ok = readTimestamp(iter) | ||
i++ | ||
return ok | ||
case 1: | ||
line = iter.ReadString() | ||
i++ | ||
if iter.Error != nil { | ||
return false | ||
} | ||
return true | ||
default: | ||
iter.ReportError("error reading entry", "array must contains 2 values") | ||
return false | ||
} | ||
}) | ||
if ok { | ||
*((*[]Entry)(ptr)) = append(*((*[]Entry)(ptr)), Entry{ | ||
Timestamp: ts, | ||
Line: line, | ||
}) | ||
return true | ||
} | ||
return false | ||
}) | ||
} | ||
|
||
func readTimestamp(iter *jsoniter.Iterator) (time.Time, bool) { | ||
s := iter.ReadString() | ||
if iter.Error != nil { | ||
return time.Time{}, false | ||
} | ||
t, err := strconv.ParseInt(s, 10, 64) | ||
if err != nil { | ||
iter.ReportError("error reading entry timestamp", err.Error()) | ||
return time.Time{}, false | ||
|
||
} | ||
return time.Unix(0, t), true | ||
} | ||
|
||
type entryEncoder struct{} | ||
|
||
func (entryEncoder) IsEmpty(ptr unsafe.Pointer) bool { | ||
// we don't omit-empty with log entries. | ||
return false | ||
} | ||
|
||
func (entryEncoder) Encode(ptr unsafe.Pointer, stream *jsoniter.Stream) { | ||
e := *((*Entry)(ptr)) | ||
stream.WriteArrayStart() | ||
stream.WriteRaw(`"`) | ||
stream.WriteRaw(strconv.FormatInt(e.Timestamp.UnixNano(), 10)) | ||
stream.WriteRaw(`"`) | ||
stream.WriteMore() | ||
stream.WriteStringWithHTMLEscaped(e.Line) | ||
stream.WriteArrayEnd() | ||
} | ||
|
||
func (e *jsonExtension) CreateDecoder(typ reflect2.Type) jsoniter.ValDecoder { | ||
if typ == reflect2.TypeOf([]Entry{}) { | ||
return sliceEntryDecoder{} | ||
} | ||
return nil | ||
} | ||
|
||
func (e *jsonExtension) CreateEncoder(typ reflect2.Type) jsoniter.ValEncoder { | ||
if typ == reflect2.TypeOf(Entry{}) { | ||
return entryEncoder{} | ||
} | ||
return nil | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters