Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove ExperimentalArrow flag #3884

Merged
merged 2 commits into from
Sep 26, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -160,9 +160,6 @@ Flags:
--external-label=KEY=VALUE;...
Label(s) to attach to all profiles in
scraper-only mode.
--experimental-arrow EXPERIMENTAL: Enables Arrow ingestion, this
will reduce CPU usage but will increase memory
usage.
```
<!-- prettier-ignore-end -->

Expand Down
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ require (
github.com/oklog/run v1.1.0
github.com/olekukonko/tablewriter v0.0.5
github.com/parquet-go/parquet-go v0.18.0
github.com/polarsignals/frostdb v0.0.0-20230913161601-5b173f20ed72
github.com/polarsignals/frostdb v0.0.0-20230926084601-c9100f2ac9c7
github.com/prometheus/client_golang v1.16.0
github.com/prometheus/common v0.44.0
github.com/prometheus/prometheus v0.47.0
Expand Down
4 changes: 2 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -755,8 +755,8 @@ github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6J
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/polarsignals/frostdb v0.0.0-20230913161601-5b173f20ed72 h1:L3y6vPOusdrYnh5GKSH4DpQdwSeFcaJITNfOF13/1TQ=
github.com/polarsignals/frostdb v0.0.0-20230913161601-5b173f20ed72/go.mod h1:fT7khtHWo/cEHSi1PgAnBJFMoBwB4T2NG6YCqVY0T1s=
github.com/polarsignals/frostdb v0.0.0-20230926084601-c9100f2ac9c7 h1:w1H1GpnYgbk5WddP7IljZajFlwOIHqUShNXz0IDMGXI=
github.com/polarsignals/frostdb v0.0.0-20230926084601-c9100f2ac9c7/go.mod h1:fT7khtHWo/cEHSi1PgAnBJFMoBwB4T2NG6YCqVY0T1s=
github.com/polarsignals/wal v0.0.0-20230809151629-4d4e3eac6d40 h1:3kD5F5BBrnv2SAnBV7LGrXAhhAL+pZRkk++D4wrAH2c=
github.com/polarsignals/wal v0.0.0-20230809151629-4d4e3eac6d40/go.mod h1:EVDHAAe+7GQ33A1/x+/gE+sBPN4toQ0XG5RoLD49xr8=
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
Expand Down
5 changes: 0 additions & 5 deletions pkg/parca/parca.go
Original file line number Diff line number Diff line change
Expand Up @@ -119,8 +119,6 @@ type Flags struct {
InsecureSkipVerify bool `kong:"help='Skip TLS certificate verification.'"`
ExternalLabel map[string]string `kong:"help='Label(s) to attach to all profiles in scraper-only mode.'"`

ExperimentalArrow bool `default:"false" help:"EXPERIMENTAL: Enables Arrow ingestion, this will reduce CPU usage but will increase memory usage."`

Hidden FlagsHidden `embed:"" prefix:""`
}

Expand Down Expand Up @@ -192,9 +190,6 @@ func Run(ctx context.Context, logger log.Logger, reg *prometheus.Registry, flags
}
}

// Enable arrow ingestion
parcacol.ExperimentalArrow = flags.ExperimentalArrow

if flags.Port != "" {
level.Warn(logger).Log("msg", "flag --port is deprecated, use --http-address instead")
flags.HTTPAddress = flags.Port
Expand Down
50 changes: 15 additions & 35 deletions pkg/parcacol/ingest.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,10 @@ import (
"github.com/parca-dev/parca/pkg/profile"
)

var ExperimentalArrow bool

var ErrMissingNameLabel = errors.New("missing __name__ label")

type Table interface {
Schema() *dynparquet.Schema
Insert(context.Context, []byte) (tx uint64, err error)
InsertRecord(context.Context, arrow.Record) (tx uint64, err error)
}

Expand Down Expand Up @@ -131,43 +128,26 @@ func (ing NormalizedIngester) Ingest(ctx context.Context, series []Series) error

pBuf.Sort()

// Experimental feature that ingests profiles as arrow records.
if ExperimentalArrow {
// Read sorted rows into an arrow record
records, err := ParquetBufToArrowRecord(ctx, pBuf.Buffer, 0)
if err != nil {
return err
}
defer func() {
for _, record := range records {
record.Release()
}
}()

// Read sorted rows into an arrow record
records, err := ParquetBufToArrowRecord(ctx, pBuf.Buffer, 0)
if err != nil {
return err
}
defer func() {
for _, record := range records {
if record.NumRows() == 0 {
return nil
}

if _, err := ing.table.InsertRecord(ctx, record); err != nil {
return err
}
record.Release()
}
return nil
}

buf := ing.bufferPool.Get().(*bytes.Buffer)
buf.Reset()
defer ing.bufferPool.Put(buf)
}()

if err := ing.schema.SerializeBuffer(buf, pBuf.Buffer); err != nil {
return err
}
for _, record := range records {
if record.NumRows() == 0 {
return nil
}

if _, err := ing.table.Insert(ctx, buf.Bytes()); err != nil {
return err
if _, err := ing.table.InsertRecord(ctx, record); err != nil {
return err
}
}

return nil
}

Expand Down
232 changes: 98 additions & 134 deletions pkg/parcacol/ingest_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -96,81 +96,63 @@ func TestPprofToParquet(t *testing.T) {
fileContent, err := os.ReadFile("../query/testdata/alloc_objects.pb.gz")
require.NoError(t, err)

tests := map[string]struct {
arrow bool
}{
"parquet": {false},
"arrow": {true},
table := &fakeTable{
schema: schema,
}

for name, test := range tests {
t.Run(name, func(t *testing.T) {
if test.arrow {
ExperimentalArrow = true
t.Cleanup(func() {
ExperimentalArrow = false
})
}

table := &fakeTable{
schema: schema,
}
req := &profilestorepb.WriteRawRequest{
Series: []*profilestorepb.RawProfileSeries{{
Labels: &profilestorepb.LabelSet{
Labels: []*profilestorepb.Label{
{
Name: "__name__",
Value: "memory",
},
{
Name: "job",
Value: "default",
},
},
req := &profilestorepb.WriteRawRequest{
Series: []*profilestorepb.RawProfileSeries{{
Labels: &profilestorepb.LabelSet{
Labels: []*profilestorepb.Label{
{
Name: "__name__",
Value: "memory",
},
Samples: []*profilestorepb.RawSample{{
RawProfile: fileContent,
}},
}},
}
err := NormalizedIngest(
ctx,
counter,
req,
logger,
table,
schema,
metastore,
&sync.Pool{
New: func() interface{} {
return bytes.NewBuffer(nil)
{
Name: "job",
Value: "default",
},
},
true,
)
require.NoError(t, err)
},
Samples: []*profilestorepb.RawSample{{
RawProfile: fileContent,
}},
}},
}
err = NormalizedIngest(
ctx,
counter,
req,
logger,
table,
schema,
metastore,
&sync.Pool{
New: func() interface{} {
return bytes.NewBuffer(nil)
},
},
true,
)
require.NoError(t, err)

for i, insert := range table.inserts {
serBuf, err := dynparquet.ReaderFromBytes(insert)
require.NoError(t, err)
for i, insert := range table.inserts {
serBuf, err := dynparquet.ReaderFromBytes(insert)
require.NoError(t, err)

rows := serBuf.Reader()
rowBuf := []parquet.Row{{}}
for {
_, err := rows.ReadRows(rowBuf)
if err == io.EOF {
break
}
if err != io.EOF {
if err != nil {
require.NoError(t, os.WriteFile(fmt.Sprintf("test-%d.parquet", i), insert, 0o777))
}
require.NoError(t, err)
}
rows := serBuf.Reader()
rowBuf := []parquet.Row{{}}
for {
_, err := rows.ReadRows(rowBuf)
if err == io.EOF {
break
}
if err != io.EOF {
if err != nil {
require.NoError(t, os.WriteFile(fmt.Sprintf("test-%d.parquet", i), insert, 0o777))
}
require.NoError(t, err)
}
})
}
}
}

Expand Down Expand Up @@ -205,81 +187,63 @@ func TestUncompressedPprofToParquet(t *testing.T) {
require.NoError(t, err)
require.NoError(t, r.Close())

tests := map[string]struct {
arrow bool
}{
"parquet": {false},
"arrow": {true},
table := &fakeTable{
schema: schema,
}

for name, test := range tests {
t.Run(name, func(t *testing.T) {
if test.arrow {
ExperimentalArrow = true
t.Cleanup(func() {
ExperimentalArrow = false
})
}

table := &fakeTable{
schema: schema,
}
req := &profilestorepb.WriteRawRequest{
Series: []*profilestorepb.RawProfileSeries{{
Labels: &profilestorepb.LabelSet{
Labels: []*profilestorepb.Label{
{
Name: "__name__",
Value: "memory",
},
{
Name: "job",
Value: "default",
},
},
req := &profilestorepb.WriteRawRequest{
Series: []*profilestorepb.RawProfileSeries{{
Labels: &profilestorepb.LabelSet{
Labels: []*profilestorepb.Label{
{
Name: "__name__",
Value: "memory",
},
Samples: []*profilestorepb.RawSample{{
RawProfile: fileContent,
}},
}},
}
err := NormalizedIngest(
ctx,
counter,
req,
logger,
table,
schema,
metastore,
&sync.Pool{
New: func() interface{} {
return bytes.NewBuffer(nil)
{
Name: "job",
Value: "default",
},
},
true,
)
require.NoError(t, err)
},
Samples: []*profilestorepb.RawSample{{
RawProfile: fileContent,
}},
}},
}
err = NormalizedIngest(
ctx,
counter,
req,
logger,
table,
schema,
metastore,
&sync.Pool{
New: func() interface{} {
return bytes.NewBuffer(nil)
},
},
true,
)
require.NoError(t, err)

for i, insert := range table.inserts {
serBuf, err := dynparquet.ReaderFromBytes(insert)
require.NoError(t, err)
for i, insert := range table.inserts {
serBuf, err := dynparquet.ReaderFromBytes(insert)
require.NoError(t, err)

rows := serBuf.Reader()
rowBuf := []parquet.Row{{}}
for {
_, err := rows.ReadRows(rowBuf)
if err == io.EOF {
break
}
if err != io.EOF {
if err != nil {
require.NoError(t, os.WriteFile(fmt.Sprintf("test-%d.parquet", i), insert, 0o777))
}
require.NoError(t, err)
}
rows := serBuf.Reader()
rowBuf := []parquet.Row{{}}
for {
_, err := rows.ReadRows(rowBuf)
if err == io.EOF {
break
}
if err != io.EOF {
if err != nil {
require.NoError(t, os.WriteFile(fmt.Sprintf("test-%d.parquet", i), insert, 0o777))
}
require.NoError(t, err)
}
})
}
}
}

Expand Down
Loading