Skip to content

Commit

Permalink
Add scrape go trace profile as bytes
Browse files Browse the repository at this point in the history
  • Loading branch information
povilasv committed May 9, 2020
1 parent 7c0d12e commit 855c7af
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 17 deletions.
28 changes: 22 additions & 6 deletions scrape/scrape.go
Original file line number Diff line number Diff line change
Expand Up @@ -340,7 +340,7 @@ func (sp *scrapePool) sync(targets []*Target) {

// A scraper retrieves samples and accepts a status report at the end.
type scraper interface {
scrape(ctx context.Context, w io.Writer) error
scrape(ctx context.Context, w io.Writer, scrapeType string) error
offset(interval time.Duration) time.Duration
}

Expand All @@ -356,7 +356,7 @@ type targetScraper struct {

var userAgentHeader = fmt.Sprintf("conprof/%s", version.Version)

func (s *targetScraper) scrape(ctx context.Context, w io.Writer) error {
func (s *targetScraper) scrape(ctx context.Context, w io.Writer, scrapeType string) error {
if s.req == nil {
req, err := http.NewRequest("GET", s.URL().String(), nil)
if err != nil {
Expand All @@ -378,9 +378,18 @@ func (s *targetScraper) scrape(ctx context.Context, w io.Writer) error {
return fmt.Errorf("server returned HTTP status %s", resp.Status)
}

_, err = profile.Parse(io.TeeReader(resp.Body, w))
if err != nil {
return errors.Wrap(err, "failed to parse target's pprof profile")
switch scrapeType {
case ProfileTraceType:
_, err := io.Copy(w, resp.Body)
if err != nil {
return errors.Wrap(err, "failed to write trace profile")
}

default:
_, err = profile.Parse(io.TeeReader(resp.Body, w))
if err != nil {
return errors.Wrap(err, "failed to parse target's pprof profile")
}
}

return nil
Expand Down Expand Up @@ -473,8 +482,15 @@ mainLoop:

b := sl.buffers.Get(sl.lastScrapeSize).([]byte)
buf := bytes.NewBuffer(b)
var scrapeType string
for _, l := range sl.target.labels {
if l.Name == ProfileType {
scrapeType = l.Value
break
}
}

scrapeErr := sl.scraper.scrape(scrapeCtx, buf)
scrapeErr := sl.scraper.scrape(scrapeCtx, buf, scrapeType)
cancel()

if scrapeErr == nil {
Expand Down
31 changes: 20 additions & 11 deletions scrape/target.go
Original file line number Diff line number Diff line change
Expand Up @@ -208,61 +208,61 @@ func (t *Target) Health() TargetHealth {
// ForProfiles
func LabelsByProfiles(lset labels.Labels, c *config.ProfilingConfig) []labels.Labels {
res := []labels.Labels{}
add := func(cfgs ...config.PprofProfilingConfig) {
add := func(t string, cfgs ...config.PprofProfilingConfig) {
for _, p := range cfgs {
if *p.Enabled {
l := lset.Copy()
l = append(l, labels.Label{Name: ProfilePath, Value: p.Path})
l = append(l, labels.Label{Name: ProfilePath, Value: p.Path}, labels.Label{Name: ProfileType, Value: t})
res = append(res, l)
}
}
}

if c.PprofConfig != nil {
if c.PprofConfig.Allocs != nil {
add(c.PprofConfig.Allocs.PprofProfilingConfig)
add(ProfileAllocsType, c.PprofConfig.Allocs.PprofProfilingConfig)
}
}

if c.PprofConfig != nil {
if c.PprofConfig.Block != nil {
add(c.PprofConfig.Block.PprofProfilingConfig)
add(ProfileBlockType, c.PprofConfig.Block.PprofProfilingConfig)
}
}

if c.PprofConfig != nil {
if c.PprofConfig.Goroutine != nil {
add(c.PprofConfig.Goroutine.PprofProfilingConfig)
add(ProfileGoroutineType, c.PprofConfig.Goroutine.PprofProfilingConfig)
}
}

if c.PprofConfig != nil {
if c.PprofConfig.Heap != nil {
add(c.PprofConfig.Heap.PprofProfilingConfig)
add(ProfileGoroutineType, c.PprofConfig.Heap.PprofProfilingConfig)
}
}

if c.PprofConfig != nil {
if c.PprofConfig.Mutex != nil {
add(c.PprofConfig.Mutex.PprofProfilingConfig)
add(ProfileMutexType, c.PprofConfig.Mutex.PprofProfilingConfig)
}
}

if c.PprofConfig != nil {
if c.PprofConfig.Profile != nil {
add(c.PprofConfig.Profile.PprofProfilingConfig)
add(ProfileProfileType, c.PprofConfig.Profile.PprofProfilingConfig)
}
}

if c.PprofConfig != nil {
if c.PprofConfig.Threadcreate != nil {
add(c.PprofConfig.Threadcreate.PprofProfilingConfig)
add(ProfileThreadCreateType, c.PprofConfig.Threadcreate.PprofProfilingConfig)
}
}

if c.PprofConfig != nil {
if c.PprofConfig.Trace != nil {
add(c.PprofConfig.Trace.PprofProfilingConfig)
add(ProfileTraceType, c.PprofConfig.Trace.PprofProfilingConfig)
}
}

Expand All @@ -277,7 +277,16 @@ func (ts Targets) Less(i, j int) bool { return ts[i].URL().String() < ts[j].URL(
func (ts Targets) Swap(i, j int) { ts[i], ts[j] = ts[j], ts[i] }

const (
ProfilePath = "profile_path"
ProfilePath = "profile_path"
ProfileType = "profile_type"
ProfileAllocsType = "allocs"
ProfileBlockType = "block"
ProfileGoroutineType = "goroutine"
ProfileHeapType = "heap"
ProfileMutexType = "mutex"
ProfileProfileType = "profile"
ProfileThreadCreateType = "threadcreate"
ProfileTraceType = "trace"
)

// populateLabels builds a label set from the given label set and scrape configuration.
Expand Down

0 comments on commit 855c7af

Please sign in to comment.