diff --git a/account/account.go b/account/account.go index 2f27b61efab..25a504ca23f 100644 --- a/account/account.go +++ b/account/account.go @@ -8,7 +8,7 @@ import ( jsonpatch "github.com/evanphx/json-patch" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/errortypes" - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/stored_requests" ) @@ -20,7 +20,7 @@ func GetAccount(ctx context.Context, cfg *config.Configuration, fetcher stored_r Message: fmt.Sprintf("Prebid-server has disabled Account ID: %s, please reach out to the prebid server host.", accountID), }} } - if cfg.AccountRequired && accountID == pbsmetrics.PublisherUnknown { + if cfg.AccountRequired && accountID == metrics.PublisherUnknown { return nil, []error{&errortypes.AcctRequired{ Message: fmt.Sprintf("Prebid-server has been configured to discard requests without a valid Account ID. Please reach out to the prebid server host."), }} diff --git a/account/account_test.go b/account/account_test.go index 0d192f18510..75c48a02d89 100644 --- a/account/account_test.go +++ b/account/account_test.go @@ -8,7 +8,7 @@ import ( "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/errortypes" - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/stored_requests" "github.com/stretchr/testify/assert" ) @@ -29,7 +29,7 @@ func (af mockAccountFetcher) FetchAccount(ctx context.Context, accountID string) } func TestGetAccount(t *testing.T) { - unknown := pbsmetrics.PublisherUnknown + unknown := metrics.PublisherUnknown testCases := []struct { accountID string // account_required diff --git a/adapters/appnexus/appnexus.go b/adapters/appnexus/appnexus.go index 8c3c61a9c35..e6398b4a010 100644 --- a/adapters/appnexus/appnexus.go +++ b/adapters/appnexus/appnexus.go @@ -20,8 +20,8 @@ import ( "github.com/mxmCherry/openrtb" "github.com/prebid/prebid-server/adapters" "github.com/prebid/prebid-server/errortypes" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" ) const defaultPlatformID int = 5 @@ -336,9 +336,9 @@ func (a *AppNexusAdapter) MakeRequests(request *openrtb.BidRequest, reqInfo *ada // Add Appnexus request level extension var isAMP, isVIDEO int - if reqInfo.PbsEntryPoint == pbsmetrics.ReqTypeAMP { + if reqInfo.PbsEntryPoint == metrics.ReqTypeAMP { isAMP = 1 - } else if reqInfo.PbsEntryPoint == pbsmetrics.ReqTypeVideo { + } else if reqInfo.PbsEntryPoint == metrics.ReqTypeVideo { isVIDEO = 1 } diff --git a/adapters/info.go b/adapters/info.go index d6ad1800150..7f3ad9c3af0 100644 --- a/adapters/info.go +++ b/adapters/info.go @@ -8,8 +8,8 @@ import ( "github.com/golang/glog" "github.com/mxmCherry/openrtb" "github.com/prebid/prebid-server/config" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" yaml "gopkg.in/yaml.v2" ) @@ -258,5 +258,5 @@ func parseBidderInfo(info BidderInfo) parsedBidderInfo { } type ExtraRequestInfo struct { - PbsEntryPoint pbsmetrics.RequestType + PbsEntryPoint metrics.RequestType } diff --git a/adapters/invibes/invibes.go b/adapters/invibes/invibes.go index 31e389d37a9..31124bd108f 100644 --- a/adapters/invibes/invibes.go +++ b/adapters/invibes/invibes.go @@ -14,8 +14,8 @@ import ( "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/errortypes" "github.com/prebid/prebid-server/macros" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" ) const adapterVersion = "prebid_1.0.0" @@ -133,7 +133,7 @@ func (a *InvibesAdapter) MakeRequests(request *openrtb.BidRequest, reqInfo *adap } invibesInternalParams.TestLog = invibesExt.Debug.TestLog } - if reqInfo.PbsEntryPoint == pbsmetrics.ReqTypeAMP { + if reqInfo.PbsEntryPoint == metrics.ReqTypeAMP { invibesInternalParams.IsAMP = true } diff --git a/endpoints/auction.go b/endpoints/auction.go index 4364b0bf152..0604c224458 100644 --- a/endpoints/auction.go +++ b/endpoints/auction.go @@ -18,9 +18,9 @@ import ( "github.com/prebid/prebid-server/errortypes" "github.com/prebid/prebid-server/exchange" "github.com/prebid/prebid-server/gdpr" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" "github.com/prebid/prebid-server/pbs" - "github.com/prebid/prebid-server/pbsmetrics" pbc "github.com/prebid/prebid-server/prebid_cache_client" "github.com/prebid/prebid-server/privacy" gdprPrivacy "github.com/prebid/prebid-server/privacy/gdpr" @@ -60,12 +60,12 @@ type auction struct { cfg *config.Configuration syncers map[openrtb_ext.BidderName]usersync.Usersyncer gdprPerms gdpr.Permissions - metricsEngine pbsmetrics.MetricsEngine + metricsEngine metrics.MetricsEngine dataCache cache.Cache exchanges map[string]adapters.Adapter } -func Auction(cfg *config.Configuration, syncers map[openrtb_ext.BidderName]usersync.Usersyncer, gdprPerms gdpr.Permissions, metricsEngine pbsmetrics.MetricsEngine, dataCache cache.Cache, exchanges map[string]adapters.Adapter) httprouter.Handle { +func Auction(cfg *config.Configuration, syncers map[openrtb_ext.BidderName]usersync.Usersyncer, gdprPerms gdpr.Permissions, metricsEngine metrics.MetricsEngine, dataCache cache.Cache, exchanges map[string]adapters.Adapter) httprouter.Handle { a := &auction{ cfg: cfg, syncers: syncers, @@ -89,7 +89,7 @@ func (a *auction) auction(w http.ResponseWriter, r *http.Request, _ httprouter.P glog.Infof("Failed to parse /auction request: %v", err) } writeAuctionError(w, "Error parsing request", err) - labels.RequestStatus = pbsmetrics.RequestStatusBadInput + labels.RequestStatus = metrics.RequestStatusBadInput return } status := "OK" @@ -102,7 +102,7 @@ func (a *auction) auction(w http.ResponseWriter, r *http.Request, _ httprouter.P glog.Infof("Invalid account id: %v", err) } writeAuctionError(w, "Unknown account id", fmt.Errorf("Unknown account")) - labels.RequestStatus = pbsmetrics.RequestStatusBadInput + labels.RequestStatus = metrics.RequestStatusBadInput return } labels.PubID = req.AccountID @@ -116,19 +116,19 @@ func (a *auction) auction(w http.ResponseWriter, r *http.Request, _ httprouter.P for _, bidder := range req.Bidders { if ex, ok := a.exchanges[bidder.BidderCode]; ok { // Make sure we have an independent label struct for each bidder. We don't want to run into issues with the goroutine below. - blabels := pbsmetrics.AdapterLabels{ + blabels := metrics.AdapterLabels{ Source: labels.Source, RType: labels.RType, Adapter: openrtb_ext.BidderName(bidder.BidderCode), PubID: labels.PubID, CookieFlag: labels.CookieFlag, - AdapterBids: pbsmetrics.AdapterBidPresent, + AdapterBids: metrics.AdapterBidPresent, } if skip := a.processUserSync(req, bidder, blabels, ex, &ctx); skip == true { continue } sentBids++ - bidderRunner := a.recoverSafely(func(bidder *pbs.PBSBidder, aLabels pbsmetrics.AdapterLabels) { + bidderRunner := a.recoverSafely(func(bidder *pbs.PBSBidder, aLabels metrics.AdapterLabels) { start := time.Now() bidList, err := ex.Call(ctx, req, bidder) @@ -158,7 +158,7 @@ func (a *auction) auction(w http.ResponseWriter, r *http.Request, _ httprouter.P } if err := cacheAccordingToMarkup(req, &resp, ctx, a, &labels); err != nil { writeAuctionError(w, "Prebid cache failed", err) - labels.RequestStatus = pbsmetrics.RequestStatusErr + labels.RequestStatus = metrics.RequestStatusErr return } if req.SortBids == 1 { @@ -172,8 +172,8 @@ func (a *auction) auction(w http.ResponseWriter, r *http.Request, _ httprouter.P enc.Encode(resp) } -func (a *auction) recoverSafely(inner func(*pbs.PBSBidder, pbsmetrics.AdapterLabels)) func(*pbs.PBSBidder, pbsmetrics.AdapterLabels) { - return func(bidder *pbs.PBSBidder, labels pbsmetrics.AdapterLabels) { +func (a *auction) recoverSafely(inner func(*pbs.PBSBidder, metrics.AdapterLabels)) func(*pbs.PBSBidder, metrics.AdapterLabels) { + return func(bidder *pbs.PBSBidder, labels metrics.AdapterLabels) { defer func() { if r := recover(); r != nil { if bidder == nil { @@ -207,7 +207,7 @@ func (a *auction) shouldUsersync(ctx context.Context, bidder openrtb_ext.BidderN } // cache video bids only for Web -func cacheVideoOnly(bids pbs.PBSBidSlice, ctx context.Context, deps *auction, labels *pbsmetrics.Labels) error { +func cacheVideoOnly(bids pbs.PBSBidSlice, ctx context.Context, deps *auction, labels *metrics.Labels) error { var cobjs []*pbc.CacheObject for _, bid := range bids { if bid.CreativeMediaType == "video" { @@ -362,31 +362,31 @@ func sortBidsAddKeywordsMobile(bids pbs.PBSBidSlice, pbs_req *pbs.PBSRequest, pr } } -func getDefaultLabels(r *http.Request) pbsmetrics.Labels { - return pbsmetrics.Labels{ - Source: pbsmetrics.DemandUnknown, - RType: pbsmetrics.ReqTypeLegacy, +func getDefaultLabels(r *http.Request) metrics.Labels { + return metrics.Labels{ + Source: metrics.DemandUnknown, + RType: metrics.ReqTypeLegacy, PubID: "", - CookieFlag: pbsmetrics.CookieFlagUnknown, - RequestStatus: pbsmetrics.RequestStatusOK, + CookieFlag: metrics.CookieFlagUnknown, + RequestStatus: metrics.RequestStatusOK, } } -func setLabelSource(labels *pbsmetrics.Labels, req *pbs.PBSRequest, status *string) { +func setLabelSource(labels *metrics.Labels, req *pbs.PBSRequest, status *string) { if req.App != nil { - labels.Source = pbsmetrics.DemandApp + labels.Source = metrics.DemandApp } else { - labels.Source = pbsmetrics.DemandWeb + labels.Source = metrics.DemandWeb if req.Cookie.LiveSyncCount() == 0 { - labels.CookieFlag = pbsmetrics.CookieFlagNo + labels.CookieFlag = metrics.CookieFlagNo *status = "no_cookie" } else { - labels.CookieFlag = pbsmetrics.CookieFlagYes + labels.CookieFlag = metrics.CookieFlagYes } } } -func cacheAccordingToMarkup(req *pbs.PBSRequest, resp *pbs.PBSResponse, ctx context.Context, a *auction, labels *pbsmetrics.Labels) error { +func cacheAccordingToMarkup(req *pbs.PBSRequest, resp *pbs.PBSResponse, ctx context.Context, a *auction, labels *metrics.Labels) error { if req.CacheMarkup == 1 { cobjs := make([]*pbc.CacheObject, len(resp.Bids)) for i, bid := range resp.Bids { @@ -422,22 +422,22 @@ func cacheAccordingToMarkup(req *pbs.PBSRequest, resp *pbs.PBSResponse, ctx cont return nil } -func processBidResult(bidList pbs.PBSBidSlice, bidder *pbs.PBSBidder, aLabels *pbsmetrics.AdapterLabels, metrics pbsmetrics.MetricsEngine, err error) { +func processBidResult(bidList pbs.PBSBidSlice, bidder *pbs.PBSBidder, aLabels *metrics.AdapterLabels, metricsEngine metrics.MetricsEngine, err error) { if err != nil { var s struct{} if err == context.DeadlineExceeded { - aLabels.AdapterErrors = map[pbsmetrics.AdapterError]struct{}{pbsmetrics.AdapterErrorTimeout: s} + aLabels.AdapterErrors = map[metrics.AdapterError]struct{}{metrics.AdapterErrorTimeout: s} bidder.Error = "Timed out" } else if err != context.Canceled { bidder.Error = err.Error() switch err.(type) { case *errortypes.BadInput: - aLabels.AdapterErrors = map[pbsmetrics.AdapterError]struct{}{pbsmetrics.AdapterErrorBadInput: s} + aLabels.AdapterErrors = map[metrics.AdapterError]struct{}{metrics.AdapterErrorBadInput: s} case *errortypes.BadServerResponse: - aLabels.AdapterErrors = map[pbsmetrics.AdapterError]struct{}{pbsmetrics.AdapterErrorBadServerResponse: s} + aLabels.AdapterErrors = map[metrics.AdapterError]struct{}{metrics.AdapterErrorBadServerResponse: s} default: glog.Warningf("Error from bidder %v. Ignoring all bids: %v", bidder.BidderCode, err) - aLabels.AdapterErrors = map[pbsmetrics.AdapterError]struct{}{pbsmetrics.AdapterErrorUnknown: s} + aLabels.AdapterErrors = map[metrics.AdapterError]struct{}{metrics.AdapterErrorUnknown: s} } } } else if bidList != nil { @@ -445,22 +445,22 @@ func processBidResult(bidList pbs.PBSBidSlice, bidder *pbs.PBSBidder, aLabels *p bidder.NumBids = len(bidList) for _, bid := range bidList { var cpm = float64(bid.Price * 1000) - metrics.RecordAdapterPrice(*aLabels, cpm) + metricsEngine.RecordAdapterPrice(*aLabels, cpm) switch bid.CreativeMediaType { case "banner": - metrics.RecordAdapterBidReceived(*aLabels, openrtb_ext.BidTypeBanner, bid.Adm != "") + metricsEngine.RecordAdapterBidReceived(*aLabels, openrtb_ext.BidTypeBanner, bid.Adm != "") case "video": - metrics.RecordAdapterBidReceived(*aLabels, openrtb_ext.BidTypeVideo, bid.Adm != "") + metricsEngine.RecordAdapterBidReceived(*aLabels, openrtb_ext.BidTypeVideo, bid.Adm != "") } bid.ResponseTime = bidder.ResponseTime } } else { bidder.NoBid = true - aLabels.AdapterBids = pbsmetrics.AdapterBidNone + aLabels.AdapterBids = metrics.AdapterBidNone } } -func (a *auction) recordMetrics(req *pbs.PBSRequest, labels pbsmetrics.Labels) { +func (a *auction) recordMetrics(req *pbs.PBSRequest, labels metrics.Labels) { a.metricsEngine.RecordRequest(labels) if req == nil { a.metricsEngine.RecordLegacyImps(labels, 0) @@ -470,7 +470,7 @@ func (a *auction) recordMetrics(req *pbs.PBSRequest, labels pbsmetrics.Labels) { a.metricsEngine.RecordRequestTime(labels, time.Since(req.Start)) } -func (a *auction) processUserSync(req *pbs.PBSRequest, bidder *pbs.PBSBidder, blabels pbsmetrics.AdapterLabels, ex adapters.Adapter, ctx *context.Context) bool { +func (a *auction) processUserSync(req *pbs.PBSRequest, bidder *pbs.PBSBidder, blabels metrics.AdapterLabels, ex adapters.Adapter, ctx *context.Context) bool { var skip bool = false if req.App != nil { return skip @@ -501,7 +501,7 @@ func (a *auction) processUserSync(req *pbs.PBSRequest, bidder *pbs.PBSBidder, bl glog.Errorf("Failed to get usersync info for %s: %v", syncerCode, err) } } - blabels.CookieFlag = pbsmetrics.CookieFlagNo + blabels.CookieFlag = metrics.CookieFlagNo if ex.SkipNoCookies() { skip = true } diff --git a/endpoints/auction_test.go b/endpoints/auction_test.go index 1e41b02aaa2..f7f915cbaa5 100644 --- a/endpoints/auction_test.go +++ b/endpoints/auction_test.go @@ -14,10 +14,10 @@ import ( "github.com/prebid/prebid-server/cache/dummycache" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/gdpr" + "github.com/prebid/prebid-server/metrics" + metricsConf "github.com/prebid/prebid-server/metrics/config" "github.com/prebid/prebid-server/openrtb_ext" "github.com/prebid/prebid-server/pbs" - "github.com/prebid/prebid-server/pbsmetrics" - metricsConf "github.com/prebid/prebid-server/pbsmetrics/config" "github.com/prebid/prebid-server/prebid_cache_client" gdprPolicy "github.com/prebid/prebid-server/privacy/gdpr" "github.com/prebid/prebid-server/usersync/usersyncers" @@ -353,7 +353,7 @@ func TestCacheVideoOnly(t *testing.T) { HostVendorID: 0, }, nil, nil) prebid_cache_client.InitPrebidCache(server.URL) - var labels = &pbsmetrics.Labels{} + var labels = &metrics.Labels{} if err := cacheVideoOnly(bids, ctx, &auction{cfg: cfg, syncers: syncers, gdprPerms: gdprPerms, metricsEngine: &metricsConf.DummyMetricsEngine{}}, labels); err != nil { t.Errorf("Prebid cache failed: %v \n", err) return @@ -615,9 +615,9 @@ func TestPanicRecovery(t *testing.T) { }, metricsEngine: &metricsConf.DummyMetricsEngine{}, } - panicker := func(bidder *pbs.PBSBidder, blables pbsmetrics.AdapterLabels) { + panicker := func(bidder *pbs.PBSBidder, blables metrics.AdapterLabels) { panic("panic!") } recovered := dummy.recoverSafely(panicker) - recovered(nil, pbsmetrics.AdapterLabels{}) + recovered(nil, metrics.AdapterLabels{}) } diff --git a/endpoints/cookie_sync.go b/endpoints/cookie_sync.go index abe35c56a52..21876af9efd 100644 --- a/endpoints/cookie_sync.go +++ b/endpoints/cookie_sync.go @@ -16,8 +16,8 @@ import ( "github.com/prebid/prebid-server/analytics" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/gdpr" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" "github.com/prebid/prebid-server/privacy" "github.com/prebid/prebid-server/privacy/ccpa" gdprPrivacy "github.com/prebid/prebid-server/privacy/gdpr" @@ -28,7 +28,7 @@ func NewCookieSyncEndpoint( syncers map[openrtb_ext.BidderName]usersync.Usersyncer, cfg *config.Configuration, syncPermissions gdpr.Permissions, - metrics pbsmetrics.MetricsEngine, + metrics metrics.MetricsEngine, pbsAnalytics analytics.PBSAnalyticsModule, bidderMap map[string]openrtb_ext.BidderName) httprouter.Handle { @@ -55,7 +55,7 @@ type cookieSyncDeps struct { hostCookie *config.HostCookie gDPR *config.GDPR syncPermissions gdpr.Permissions - metrics pbsmetrics.MetricsEngine + metrics metrics.MetricsEngine pbsAnalytics analytics.PBSAnalyticsModule enforceCCPA bool bidderLookup map[string]struct{} diff --git a/endpoints/cookie_sync_test.go b/endpoints/cookie_sync_test.go index 058ed365375..77be25907c6 100644 --- a/endpoints/cookie_sync_test.go +++ b/endpoints/cookie_sync_test.go @@ -18,8 +18,8 @@ import ( analyticsConf "github.com/prebid/prebid-server/analytics/config" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/gdpr" + metricsConf "github.com/prebid/prebid-server/metrics/config" "github.com/prebid/prebid-server/openrtb_ext" - metricsConf "github.com/prebid/prebid-server/pbsmetrics/config" "github.com/prebid/prebid-server/usersync" "github.com/stretchr/testify/assert" ) diff --git a/endpoints/openrtb2/amp_auction.go b/endpoints/openrtb2/amp_auction.go index 902af46421b..afc32e5ea2b 100644 --- a/endpoints/openrtb2/amp_auction.go +++ b/endpoints/openrtb2/amp_auction.go @@ -21,8 +21,8 @@ import ( "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/errortypes" "github.com/prebid/prebid-server/exchange" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" "github.com/prebid/prebid-server/privacy" "github.com/prebid/prebid-server/privacy/ccpa" "github.com/prebid/prebid-server/privacy/gdpr" @@ -49,7 +49,7 @@ func NewAmpEndpoint( requestsById stored_requests.Fetcher, accounts stored_requests.AccountFetcher, cfg *config.Configuration, - met pbsmetrics.MetricsEngine, + met metrics.MetricsEngine, pbsAnalytics analytics.PBSAnalyticsModule, disabledBidders map[string]string, defReqJSON []byte, @@ -103,12 +103,12 @@ func (deps *endpointDeps) AmpAuction(w http.ResponseWriter, r *http.Request, _ h // Set this as an AMP request in Metrics. - labels := pbsmetrics.Labels{ - Source: pbsmetrics.DemandWeb, - RType: pbsmetrics.ReqTypeAMP, - PubID: pbsmetrics.PublisherUnknown, - CookieFlag: pbsmetrics.CookieFlagUnknown, - RequestStatus: pbsmetrics.RequestStatusOK, + labels := metrics.Labels{ + Source: metrics.DemandWeb, + RType: metrics.ReqTypeAMP, + PubID: metrics.PublisherUnknown, + CookieFlag: metrics.CookieFlagUnknown, + RequestStatus: metrics.RequestStatusOK, } defer func() { deps.metricsEngine.RecordRequest(labels) @@ -137,7 +137,7 @@ func (deps *endpointDeps) AmpAuction(w http.ResponseWriter, r *http.Request, _ h for _, err := range errortypes.FatalOnly(errL) { w.Write([]byte(fmt.Sprintf("Invalid request format: %s\n", err.Error()))) } - labels.RequestStatus = pbsmetrics.RequestStatusBadInput + labels.RequestStatus = metrics.RequestStatusBadInput return } @@ -154,9 +154,9 @@ func (deps *endpointDeps) AmpAuction(w http.ResponseWriter, r *http.Request, _ h usersyncs := usersync.ParsePBSCookieFromRequest(r, &(deps.cfg.HostCookie)) if usersyncs.LiveSyncCount() == 0 { - labels.CookieFlag = pbsmetrics.CookieFlagNo + labels.CookieFlag = metrics.CookieFlagNo } else { - labels.CookieFlag = pbsmetrics.CookieFlagYes + labels.CookieFlag = metrics.CookieFlagYes } labels.PubID = getAccountID(req.Site.Publisher) // Look up account now that we have resolved the pubID value @@ -164,12 +164,12 @@ func (deps *endpointDeps) AmpAuction(w http.ResponseWriter, r *http.Request, _ h if len(acctIDErrs) > 0 { errL = append(errL, acctIDErrs...) httpStatus := http.StatusBadRequest - metricsStatus := pbsmetrics.RequestStatusBadInput + metricsStatus := metrics.RequestStatusBadInput for _, er := range errL { errCode := errortypes.ReadCode(er) if errCode == errortypes.BlacklistedAppErrorCode || errCode == errortypes.BlacklistedAcctErrorCode { httpStatus = http.StatusServiceUnavailable - metricsStatus = pbsmetrics.RequestStatusBlacklisted + metricsStatus = metrics.RequestStatusBlacklisted break } } @@ -275,7 +275,7 @@ func (deps *endpointDeps) AmpAuction(w http.ResponseWriter, r *http.Request, _ h // If we've sent _any_ bytes, then Go would have sent the 200 status code first. // That status code can't be un-sent... so the best we can do is log the error. if err := enc.Encode(ampResponse); err != nil { - labels.RequestStatus = pbsmetrics.RequestStatusNetworkErr + labels.RequestStatus = metrics.RequestStatusNetworkErr ao.Errors = append(ao.Errors, fmt.Errorf("/openrtb2/amp Failed to send response: %v", err)) } } diff --git a/endpoints/openrtb2/amp_auction_test.go b/endpoints/openrtb2/amp_auction_test.go index daa5c1feb20..746d436d711 100644 --- a/endpoints/openrtb2/amp_auction_test.go +++ b/endpoints/openrtb2/amp_auction_test.go @@ -18,9 +18,9 @@ import ( analyticsConf "github.com/prebid/prebid-server/analytics/config" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/exchange" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" - metrics "github.com/rcrowley/go-metrics" + gometrics "github.com/rcrowley/go-metrics" "github.com/stretchr/testify/assert" ) @@ -1269,6 +1269,6 @@ func TestBuildAmpObject(t *testing.T) { } } -func newTestMetrics() *pbsmetrics.Metrics { - return pbsmetrics.NewMetrics(metrics.NewRegistry(), openrtb_ext.CoreBidderNames(), config.DisabledMetrics{}) +func newTestMetrics() *metrics.Metrics { + return metrics.NewMetrics(gometrics.NewRegistry(), openrtb_ext.CoreBidderNames(), config.DisabledMetrics{}) } diff --git a/endpoints/openrtb2/auction.go b/endpoints/openrtb2/auction.go index a3c398c2805..21fa2c8a5a4 100644 --- a/endpoints/openrtb2/auction.go +++ b/endpoints/openrtb2/auction.go @@ -26,8 +26,8 @@ import ( "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/errortypes" "github.com/prebid/prebid-server/exchange" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" "github.com/prebid/prebid-server/prebid_cache_client" "github.com/prebid/prebid-server/privacy/ccpa" "github.com/prebid/prebid-server/stored_requests" @@ -52,7 +52,7 @@ func NewEndpoint( requestsById stored_requests.Fetcher, accounts stored_requests.AccountFetcher, cfg *config.Configuration, - met pbsmetrics.MetricsEngine, + met metrics.MetricsEngine, pbsAnalytics analytics.PBSAnalyticsModule, disabledBidders map[string]string, defReqJSON []byte, @@ -94,7 +94,7 @@ type endpointDeps struct { videoFetcher stored_requests.Fetcher accounts stored_requests.AccountFetcher cfg *config.Configuration - metricsEngine pbsmetrics.MetricsEngine + metricsEngine metrics.MetricsEngine analytics analytics.PBSAnalyticsModule disabledBidders map[string]string defaultRequest bool @@ -120,12 +120,12 @@ func (deps *endpointDeps) Auction(w http.ResponseWriter, r *http.Request, _ http StartTime: start, } - labels := pbsmetrics.Labels{ - Source: pbsmetrics.DemandUnknown, - RType: pbsmetrics.ReqTypeORTB2Web, - PubID: pbsmetrics.PublisherUnknown, - CookieFlag: pbsmetrics.CookieFlagUnknown, - RequestStatus: pbsmetrics.RequestStatusOK, + labels := metrics.Labels{ + Source: metrics.DemandUnknown, + RType: metrics.ReqTypeORTB2Web, + PubID: metrics.PublisherUnknown, + CookieFlag: metrics.CookieFlagUnknown, + RequestStatus: metrics.RequestStatusOK, } defer func() { deps.metricsEngine.RecordRequest(labels) @@ -150,15 +150,15 @@ func (deps *endpointDeps) Auction(w http.ResponseWriter, r *http.Request, _ http usersyncs := usersync.ParsePBSCookieFromRequest(r, &(deps.cfg.HostCookie)) if req.App != nil { - labels.Source = pbsmetrics.DemandApp - labels.RType = pbsmetrics.ReqTypeORTB2App + labels.Source = metrics.DemandApp + labels.RType = metrics.ReqTypeORTB2App labels.PubID = getAccountID(req.App.Publisher) } else { //req.Site != nil - labels.Source = pbsmetrics.DemandWeb + labels.Source = metrics.DemandWeb if usersyncs.LiveSyncCount() == 0 { - labels.CookieFlag = pbsmetrics.CookieFlagNo + labels.CookieFlag = metrics.CookieFlagNo } else { - labels.CookieFlag = pbsmetrics.CookieFlagYes + labels.CookieFlag = metrics.CookieFlagYes } labels.PubID = getAccountID(req.Site.Publisher) } @@ -185,7 +185,7 @@ func (deps *endpointDeps) Auction(w http.ResponseWriter, r *http.Request, _ http ao.Response = response ao.Account = account if err != nil { - labels.RequestStatus = pbsmetrics.RequestStatusErr + labels.RequestStatus = metrics.RequestStatusErr w.WriteHeader(http.StatusInternalServerError) fmt.Fprintf(w, "Critical error while running the auction: %v", err) glog.Errorf("/openrtb2/auction Critical error: %v", err) @@ -205,7 +205,7 @@ func (deps *endpointDeps) Auction(w http.ResponseWriter, r *http.Request, _ http // If we've sent _any_ bytes, then Go would have sent the 200 status code first. // That status code can't be un-sent... so the best we can do is log the error. if err := enc.Encode(response); err != nil { - labels.RequestStatus = pbsmetrics.RequestStatusNetworkErr + labels.RequestStatus = metrics.RequestStatusNetworkErr ao.Errors = append(ao.Errors, fmt.Errorf("/openrtb2/auction Failed to send response: %v", err)) } } @@ -1280,16 +1280,16 @@ func parseUserID(cfg *config.Configuration, httpReq *http.Request) (string, bool } // Write(return) errors to the client, if any. Returns true if errors were found. -func writeError(errs []error, w http.ResponseWriter, labels *pbsmetrics.Labels) bool { +func writeError(errs []error, w http.ResponseWriter, labels *metrics.Labels) bool { var rc bool = false if len(errs) > 0 { httpStatus := http.StatusBadRequest - metricsStatus := pbsmetrics.RequestStatusBadInput + metricsStatus := metrics.RequestStatusBadInput for _, err := range errs { erVal := errortypes.ReadCode(err) if erVal == errortypes.BlacklistedAppErrorCode || erVal == errortypes.BlacklistedAcctErrorCode { httpStatus = http.StatusServiceUnavailable - metricsStatus = pbsmetrics.RequestStatusBlacklisted + metricsStatus = metrics.RequestStatusBlacklisted break } } @@ -1317,5 +1317,5 @@ func getAccountID(pub *openrtb.Publisher) string { return pub.ID } } - return pbsmetrics.PublisherUnknown + return metrics.PublisherUnknown } diff --git a/endpoints/openrtb2/auction_test.go b/endpoints/openrtb2/auction_test.go index d3b2d8b26bc..ade3c14a0e7 100644 --- a/endpoints/openrtb2/auction_test.go +++ b/endpoints/openrtb2/auction_test.go @@ -27,8 +27,8 @@ import ( "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/errortypes" "github.com/prebid/prebid-server/exchange" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" "github.com/prebid/prebid-server/stored_requests/backends/empty_fetcher" "github.com/prebid/prebid-server/util/iputil" "github.com/stretchr/testify/assert" @@ -1695,12 +1695,12 @@ func TestGetAccountID(t *testing.T) { { description: "Neither Publisher.ID or Publisher.Ext.Prebid.ParentAccount present", pub: &openrtb.Publisher{}, - expectedAccID: pbsmetrics.PublisherUnknown, + expectedAccID: metrics.PublisherUnknown, }, { description: "Publisher is nil", pub: nil, - expectedAccID: pbsmetrics.PublisherUnknown, + expectedAccID: metrics.PublisherUnknown, }, } diff --git a/endpoints/openrtb2/video_auction.go b/endpoints/openrtb2/video_auction.go index 95186beb9a1..7735d886730 100644 --- a/endpoints/openrtb2/video_auction.go +++ b/endpoints/openrtb2/video_auction.go @@ -27,8 +27,8 @@ import ( "github.com/prebid/prebid-server/analytics" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/exchange" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" "github.com/prebid/prebid-server/prebid_cache_client" "github.com/prebid/prebid-server/stored_requests" "github.com/prebid/prebid-server/usersync" @@ -43,7 +43,7 @@ func NewVideoEndpoint( videoFetcher stored_requests.Fetcher, accounts stored_requests.AccountFetcher, cfg *config.Configuration, - met pbsmetrics.MetricsEngine, + met metrics.MetricsEngine, pbsAnalytics analytics.PBSAnalyticsModule, disabledBidders map[string]string, defReqJSON []byte, @@ -114,12 +114,12 @@ func (deps *endpointDeps) VideoAuctionEndpoint(w http.ResponseWriter, r *http.Re StartTime: start, } - labels := pbsmetrics.Labels{ - Source: pbsmetrics.DemandUnknown, - RType: pbsmetrics.ReqTypeVideo, - PubID: pbsmetrics.PublisherUnknown, - CookieFlag: pbsmetrics.CookieFlagUnknown, - RequestStatus: pbsmetrics.RequestStatusOK, + labels := metrics.Labels{ + Source: metrics.DemandUnknown, + RType: metrics.ReqTypeVideo, + PubID: metrics.PublisherUnknown, + CookieFlag: metrics.CookieFlagUnknown, + RequestStatus: metrics.RequestStatusOK, } debugQuery := r.URL.Query().Get("debug") @@ -255,14 +255,14 @@ func (deps *endpointDeps) VideoAuctionEndpoint(w http.ResponseWriter, r *http.Re usersyncs := usersync.ParsePBSCookieFromRequest(r, &(deps.cfg.HostCookie)) if bidReq.App != nil { - labels.Source = pbsmetrics.DemandApp + labels.Source = metrics.DemandApp labels.PubID = getAccountID(bidReq.App.Publisher) } else { // both bidReq.App == nil and bidReq.Site != nil are true - labels.Source = pbsmetrics.DemandWeb + labels.Source = metrics.DemandWeb if usersyncs.LiveSyncCount() == 0 { - labels.CookieFlag = pbsmetrics.CookieFlagNo + labels.CookieFlag = metrics.CookieFlagNo } else { - labels.CookieFlag = pbsmetrics.CookieFlagYes + labels.CookieFlag = metrics.CookieFlagYes } labels.PubID = getAccountID(bidReq.Site.Publisher) } @@ -340,25 +340,25 @@ func cleanupVideoBidRequest(videoReq *openrtb_ext.BidRequestVideo, podErrors []P return videoReq } -func handleError(labels *pbsmetrics.Labels, w http.ResponseWriter, errL []error, vo *analytics.VideoObject, debugLog *exchange.DebugLog) { +func handleError(labels *metrics.Labels, w http.ResponseWriter, errL []error, vo *analytics.VideoObject, debugLog *exchange.DebugLog) { if debugLog != nil && debugLog.Enabled { if rawUUID, err := uuid.NewV4(); err == nil { debugLog.CacheKey = rawUUID.String() } errL = append(errL, fmt.Errorf("[Debug cache ID: %s]", debugLog.CacheKey)) } - labels.RequestStatus = pbsmetrics.RequestStatusErr + labels.RequestStatus = metrics.RequestStatusErr var errors string var status int = http.StatusInternalServerError for _, er := range errL { erVal := errortypes.ReadCode(er) if erVal == errortypes.BlacklistedAppErrorCode || erVal == errortypes.BlacklistedAcctErrorCode { status = http.StatusServiceUnavailable - labels.RequestStatus = pbsmetrics.RequestStatusBlacklisted + labels.RequestStatus = metrics.RequestStatusBlacklisted break } else if erVal == errortypes.AcctRequiredErrorCode { status = http.StatusBadRequest - labels.RequestStatus = pbsmetrics.RequestStatusBadInput + labels.RequestStatus = metrics.RequestStatusBadInput break } errors = fmt.Sprintf("%s %s", errors, er.Error()) diff --git a/endpoints/openrtb2/video_auction_test.go b/endpoints/openrtb2/video_auction_test.go index 6e2bf23093e..a70d45ac3b8 100644 --- a/endpoints/openrtb2/video_auction_test.go +++ b/endpoints/openrtb2/video_auction_test.go @@ -17,8 +17,8 @@ import ( analyticsConf "github.com/prebid/prebid-server/analytics/config" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/exchange" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" "github.com/prebid/prebid-server/prebid_cache_client" "github.com/prebid/prebid-server/stored_requests/backends/empty_fetcher" "github.com/stretchr/testify/assert" @@ -851,12 +851,12 @@ func TestHandleError(t *testing.T) { Errors: make([]error, 0), } - labels := pbsmetrics.Labels{ - Source: pbsmetrics.DemandUnknown, - RType: pbsmetrics.ReqTypeVideo, - PubID: pbsmetrics.PublisherUnknown, - CookieFlag: pbsmetrics.CookieFlagUnknown, - RequestStatus: pbsmetrics.RequestStatusOK, + labels := metrics.Labels{ + Source: metrics.DemandUnknown, + RType: metrics.ReqTypeVideo, + PubID: metrics.PublisherUnknown, + CookieFlag: metrics.CookieFlagUnknown, + RequestStatus: metrics.RequestStatusOK, } recorder := httptest.NewRecorder() @@ -864,7 +864,7 @@ func TestHandleError(t *testing.T) { err2 := errors.New("Error for testing handleError 2") handleError(&labels, recorder, []error{err1, err2}, &vo, nil) - assert.Equal(t, pbsmetrics.RequestStatusErr, labels.RequestStatus, "labels.RequestStatus should indicate an error") + assert.Equal(t, metrics.RequestStatusErr, labels.RequestStatus, "labels.RequestStatus should indicate an error") assert.Equal(t, 500, recorder.Code, "Error status should be written to writer") assert.Equal(t, 500, vo.Status, "Analytics object should have error status") assert.Equal(t, 2, len(vo.Errors), "New errors should be appended to Analytics object Errors") @@ -885,8 +885,8 @@ func TestHandleErrorMetrics(t *testing.T) { deps, met, mod := mockDepsWithMetrics(t, ex) deps.VideoAuctionEndpoint(recorder, req, nil) - assert.Equal(t, int64(0), met.RequestStatuses[pbsmetrics.ReqTypeVideo][pbsmetrics.RequestStatusOK].Count(), "OK requests count should be 0") - assert.Equal(t, int64(1), met.RequestStatuses[pbsmetrics.ReqTypeVideo][pbsmetrics.RequestStatusErr].Count(), "Error requests count should be 1") + assert.Equal(t, int64(0), met.RequestStatuses[metrics.ReqTypeVideo][metrics.RequestStatusOK].Count(), "OK requests count should be 0") + assert.Equal(t, int64(1), met.RequestStatuses[metrics.ReqTypeVideo][metrics.RequestStatusErr].Count(), "Error requests count should be 1") assert.Equal(t, 1, len(mod.videoObjects), "Mock AnalyticsModule should have 1 AuctionObject") assert.Equal(t, 500, mod.videoObjects[0].Status, "AnalyticsObject should have 500 status") assert.Equal(t, 2, len(mod.videoObjects[0].Errors), "AnalyticsObject should have Errors length of 2") @@ -1022,12 +1022,12 @@ func TestHandleErrorDebugLog(t *testing.T) { Errors: make([]error, 0), } - labels := pbsmetrics.Labels{ - Source: pbsmetrics.DemandUnknown, - RType: pbsmetrics.ReqTypeVideo, - PubID: pbsmetrics.PublisherUnknown, - CookieFlag: pbsmetrics.CookieFlagUnknown, - RequestStatus: pbsmetrics.RequestStatusOK, + labels := metrics.Labels{ + Source: metrics.DemandUnknown, + RType: metrics.ReqTypeVideo, + PubID: metrics.PublisherUnknown, + CookieFlag: metrics.CookieFlagUnknown, + RequestStatus: metrics.RequestStatusOK, } recorder := httptest.NewRecorder() @@ -1046,7 +1046,7 @@ func TestHandleErrorDebugLog(t *testing.T) { } handleError(&labels, recorder, []error{err1, err2}, &vo, &debugLog) - assert.Equal(t, pbsmetrics.RequestStatusErr, labels.RequestStatus, "labels.RequestStatus should indicate an error") + assert.Equal(t, metrics.RequestStatusErr, labels.RequestStatus, "labels.RequestStatus should indicate an error") assert.Equal(t, 500, recorder.Code, "Error status should be written to writer") assert.Equal(t, 500, vo.Status, "Analytics object should have error status") assert.Equal(t, 3, len(vo.Errors), "New errors including debug cache ID should be appended to Analytics object Errors") @@ -1197,7 +1197,7 @@ func TestFormatTargetingKeyLongKey(t *testing.T) { assert.Equal(t, "hb_pb_20.00", res, "Tergeting key constructed incorrectly") } -func mockDepsWithMetrics(t *testing.T, ex *mockExchangeVideo) (*endpointDeps, *pbsmetrics.Metrics, *mockAnalyticsModule) { +func mockDepsWithMetrics(t *testing.T, ex *mockExchangeVideo) (*endpointDeps, *metrics.Metrics, *mockAnalyticsModule) { mockModule := &mockAnalyticsModule{} metrics := newTestMetrics() deps := &endpointDeps{ diff --git a/endpoints/setuid.go b/endpoints/setuid.go index 53d8f43bdc2..caa3ae1766d 100644 --- a/endpoints/setuid.go +++ b/endpoints/setuid.go @@ -13,8 +13,8 @@ import ( "github.com/prebid/prebid-server/analytics" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/gdpr" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" "github.com/prebid/prebid-server/usersync" ) @@ -26,7 +26,7 @@ const ( chromeiOSStrLen = len(chromeiOSStr) ) -func NewSetUIDEndpoint(cfg config.HostCookie, syncers map[openrtb_ext.BidderName]usersync.Usersyncer, perms gdpr.Permissions, pbsanalytics analytics.PBSAnalyticsModule, metrics pbsmetrics.MetricsEngine) httprouter.Handle { +func NewSetUIDEndpoint(cfg config.HostCookie, syncers map[openrtb_ext.BidderName]usersync.Usersyncer, perms gdpr.Permissions, pbsanalytics analytics.PBSAnalyticsModule, metricsEngine metrics.MetricsEngine) httprouter.Handle { cookieTTL := time.Duration(cfg.TTL) * 24 * time.Hour validFamilyNameMap := make(map[string]struct{}) @@ -45,8 +45,8 @@ func NewSetUIDEndpoint(cfg config.HostCookie, syncers map[openrtb_ext.BidderName pc := usersync.ParsePBSCookieFromRequest(r, &cfg) if !pc.AllowSyncs() { w.WriteHeader(http.StatusUnauthorized) - metrics.RecordUserIDSet(pbsmetrics.UserLabels{ - Action: pbsmetrics.RequestActionOptOut, + metricsEngine.RecordUserIDSet(metrics.UserLabels{ + Action: metrics.RequestActionOptOut, }) so.Status = http.StatusUnauthorized return @@ -58,8 +58,8 @@ func NewSetUIDEndpoint(cfg config.HostCookie, syncers map[openrtb_ext.BidderName if err != nil { w.WriteHeader(http.StatusBadRequest) w.Write([]byte(err.Error())) - metrics.RecordUserIDSet(pbsmetrics.UserLabels{ - Action: pbsmetrics.RequestActionErr, + metricsEngine.RecordUserIDSet(metrics.UserLabels{ + Action: metrics.RequestActionErr, }) so.Status = http.StatusBadRequest return @@ -69,8 +69,8 @@ func NewSetUIDEndpoint(cfg config.HostCookie, syncers map[openrtb_ext.BidderName if shouldReturn, status, body := preventSyncsGDPR(query.Get("gdpr"), query.Get("gdpr_consent"), perms); shouldReturn { w.WriteHeader(status) w.Write([]byte(body)) - metrics.RecordUserIDSet(pbsmetrics.UserLabels{ - Action: pbsmetrics.RequestActionGDPR, + metricsEngine.RecordUserIDSet(metrics.UserLabels{ + Action: metrics.RequestActionGDPR, Bidder: openrtb_ext.BidderName(familyName), }) so.Status = status @@ -87,11 +87,11 @@ func NewSetUIDEndpoint(cfg config.HostCookie, syncers map[openrtb_ext.BidderName } if err == nil { - labels := pbsmetrics.UserLabels{ - Action: pbsmetrics.RequestActionSet, + labels := metrics.UserLabels{ + Action: metrics.RequestActionSet, Bidder: openrtb_ext.BidderName(familyName), } - metrics.RecordUserIDSet(labels) + metricsEngine.RecordUserIDSet(labels) so.Success = true } diff --git a/endpoints/setuid_test.go b/endpoints/setuid_test.go index e63944e2aec..ae0636770da 100644 --- a/endpoints/setuid_test.go +++ b/endpoints/setuid_test.go @@ -11,7 +11,7 @@ import ( "time" "github.com/prebid/prebid-server/config" - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/privacy" "github.com/prebid/prebid-server/usersync" "github.com/stretchr/testify/assert" @@ -19,7 +19,7 @@ import ( "github.com/prebid/prebid-server/openrtb_ext" analyticsConf "github.com/prebid/prebid-server/analytics/config" - metricsConf "github.com/prebid/prebid-server/pbsmetrics/config" + metricsConf "github.com/prebid/prebid-server/metrics/config" ) func TestSetUIDEndpoint(t *testing.T) { @@ -204,7 +204,7 @@ func TestSetUIDEndpointMetrics(t *testing.T) { cookies []*usersync.PBSCookie validFamilyNames []string gdprAllowsHostCookies bool - expectedMetricAction pbsmetrics.RequestAction + expectedMetricAction metrics.RequestAction expectedMetricBidder openrtb_ext.BidderName expectedResponseCode int description string @@ -214,7 +214,7 @@ func TestSetUIDEndpointMetrics(t *testing.T) { cookies: []*usersync.PBSCookie{}, validFamilyNames: []string{"pubmatic"}, gdprAllowsHostCookies: true, - expectedMetricAction: pbsmetrics.RequestActionSet, + expectedMetricAction: metrics.RequestActionSet, expectedMetricBidder: openrtb_ext.BidderName("pubmatic"), expectedResponseCode: 200, description: "Success - Sync", @@ -224,7 +224,7 @@ func TestSetUIDEndpointMetrics(t *testing.T) { cookies: []*usersync.PBSCookie{}, validFamilyNames: []string{"pubmatic"}, gdprAllowsHostCookies: true, - expectedMetricAction: pbsmetrics.RequestActionSet, + expectedMetricAction: metrics.RequestActionSet, expectedMetricBidder: openrtb_ext.BidderName("pubmatic"), expectedResponseCode: 200, description: "Success - Unsync", @@ -234,7 +234,7 @@ func TestSetUIDEndpointMetrics(t *testing.T) { cookies: []*usersync.PBSCookie{usersync.NewPBSCookieWithOptOut()}, validFamilyNames: []string{"pubmatic"}, gdprAllowsHostCookies: true, - expectedMetricAction: pbsmetrics.RequestActionOptOut, + expectedMetricAction: metrics.RequestActionOptOut, expectedResponseCode: 401, description: "Cookie Opted Out", }, @@ -243,7 +243,7 @@ func TestSetUIDEndpointMetrics(t *testing.T) { cookies: []*usersync.PBSCookie{}, validFamilyNames: []string{}, gdprAllowsHostCookies: true, - expectedMetricAction: pbsmetrics.RequestActionErr, + expectedMetricAction: metrics.RequestActionErr, expectedResponseCode: 400, description: "Unsupported Cookie Name", }, @@ -252,7 +252,7 @@ func TestSetUIDEndpointMetrics(t *testing.T) { cookies: []*usersync.PBSCookie{}, validFamilyNames: []string{"pubmatic"}, gdprAllowsHostCookies: false, - expectedMetricAction: pbsmetrics.RequestActionGDPR, + expectedMetricAction: metrics.RequestActionGDPR, expectedMetricBidder: openrtb_ext.BidderName("pubmatic"), expectedResponseCode: 400, description: "Prevented By GDPR", @@ -260,21 +260,21 @@ func TestSetUIDEndpointMetrics(t *testing.T) { } for _, test := range testCases { - metrics := &pbsmetrics.MetricsEngineMock{} - expectedLabels := pbsmetrics.UserLabels{ + metricsEngine := &metrics.MetricsEngineMock{} + expectedLabels := metrics.UserLabels{ Action: test.expectedMetricAction, Bidder: test.expectedMetricBidder, } - metrics.On("RecordUserIDSet", expectedLabels).Once() + metricsEngine.On("RecordUserIDSet", expectedLabels).Once() req := httptest.NewRequest("GET", test.uri, nil) for _, v := range test.cookies { addCookie(req, v) } - response := doRequest(req, metrics, test.validFamilyNames, test.gdprAllowsHostCookies, false) + response := doRequest(req, metricsEngine, test.validFamilyNames, test.gdprAllowsHostCookies, false) assert.Equal(t, test.expectedResponseCode, response.Code, test.description) - metrics.AssertExpectations(t) + metricsEngine.AssertExpectations(t) } } @@ -384,7 +384,7 @@ func makeRequest(uri string, existingSyncs map[string]string) *http.Request { return request } -func doRequest(req *http.Request, metrics pbsmetrics.MetricsEngine, validFamilyNames []string, gdprAllowsHostCookies bool, gdprReturnsError bool) *httptest.ResponseRecorder { +func doRequest(req *http.Request, metrics metrics.MetricsEngine, validFamilyNames []string, gdprAllowsHostCookies bool, gdprReturnsError bool) *httptest.ResponseRecorder { cfg := config.Configuration{} perms := &mockPermsSetUID{ allowHost: gdprAllowsHostCookies, diff --git a/exchange/adapter_util.go b/exchange/adapter_util.go index 9ec3407dc51..f361508e91e 100644 --- a/exchange/adapter_util.go +++ b/exchange/adapter_util.go @@ -4,7 +4,7 @@ import ( "fmt" "net/http" - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/adapters" "github.com/prebid/prebid-server/adapters/ix" @@ -14,7 +14,7 @@ import ( "github.com/prebid/prebid-server/openrtb_ext" ) -func BuildAdapters(client *http.Client, cfg *config.Configuration, infos adapters.BidderInfos, me pbsmetrics.MetricsEngine) (map[openrtb_ext.BidderName]adaptedBidder, []error) { +func BuildAdapters(client *http.Client, cfg *config.Configuration, infos adapters.BidderInfos, me metrics.MetricsEngine) (map[openrtb_ext.BidderName]adaptedBidder, []error) { exchangeBidders := buildExchangeBiddersLegacy(cfg.Adapters, infos) exchangeBiddersModern, errs := buildExchangeBidders(cfg, infos, client, me) @@ -32,7 +32,7 @@ func BuildAdapters(client *http.Client, cfg *config.Configuration, infos adapter return exchangeBidders, nil } -func buildExchangeBidders(cfg *config.Configuration, infos adapters.BidderInfos, client *http.Client, me pbsmetrics.MetricsEngine) (map[openrtb_ext.BidderName]adaptedBidder, []error) { +func buildExchangeBidders(cfg *config.Configuration, infos adapters.BidderInfos, client *http.Client, me metrics.MetricsEngine) (map[openrtb_ext.BidderName]adaptedBidder, []error) { bidders, errs := buildBidders(cfg.Adapters, infos, newAdapterBuilders()) if len(errs) > 0 { return nil, errs diff --git a/exchange/adapter_util_test.go b/exchange/adapter_util_test.go index d9f997a76a7..f001092245e 100644 --- a/exchange/adapter_util_test.go +++ b/exchange/adapter_util_test.go @@ -15,8 +15,8 @@ import ( "github.com/prebid/prebid-server/adapters/rubicon" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/currency" + metrics "github.com/prebid/prebid-server/metrics/config" "github.com/prebid/prebid-server/openrtb_ext" - metrics "github.com/prebid/prebid-server/pbsmetrics/config" "github.com/stretchr/testify/assert" ) diff --git a/exchange/bidder.go b/exchange/bidder.go index a6463d0f07a..c59d2b4f207 100644 --- a/exchange/bidder.go +++ b/exchange/bidder.go @@ -22,8 +22,8 @@ import ( "github.com/prebid/prebid-server/adapters" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/errortypes" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" "golang.org/x/net/context/ctxhttp" ) @@ -91,7 +91,7 @@ type pbsOrtbSeatBid struct { // // The name refers to the "Adapter" architecture pattern, and should not be confused with a Prebid "Adapter" // (which is being phased out and replaced by Bidder for OpenRTB auctions) -func adaptBidder(bidder adapters.Bidder, client *http.Client, cfg *config.Configuration, me pbsmetrics.MetricsEngine, name openrtb_ext.BidderName) adaptedBidder { +func adaptBidder(bidder adapters.Bidder, client *http.Client, cfg *config.Configuration, me metrics.MetricsEngine, name openrtb_ext.BidderName) adaptedBidder { return &bidderAdapter{ Bidder: bidder, BidderName: name, @@ -108,7 +108,7 @@ type bidderAdapter struct { Bidder adapters.Bidder BidderName openrtb_ext.BidderName Client *http.Client - me pbsmetrics.MetricsEngine + me metrics.MetricsEngine config bidderAdapterConfig } diff --git a/exchange/bidder_test.go b/exchange/bidder_test.go index 7ad09dc740e..8266daaa172 100644 --- a/exchange/bidder_test.go +++ b/exchange/bidder_test.go @@ -20,10 +20,9 @@ import ( "github.com/prebid/prebid-server/adapters" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/currency" + "github.com/prebid/prebid-server/metrics" + metricsConfig "github.com/prebid/prebid-server/metrics/config" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" - metricsConf "github.com/prebid/prebid-server/pbsmetrics/config" - metricsConfig "github.com/prebid/prebid-server/pbsmetrics/config" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" @@ -204,7 +203,7 @@ func TestBidderTimeout(t *testing.T) { Bidder: &mixedMultiBidder{}, BidderName: openrtb_ext.BidderAppnexus, Client: server.Client(), - me: &metricsConf.DummyMetricsEngine{}, + me: &metricsConfig.DummyMetricsEngine{}, } callInfo := bidder.doRequest(ctx, &adapters.RequestData{ @@ -247,7 +246,7 @@ func TestConnectionClose(t *testing.T) { Bidder: &mixedMultiBidder{}, Client: server.Client(), BidderName: openrtb_ext.BidderAppnexus, - me: &metricsConf.DummyMetricsEngine{}, + me: &metricsConfig.DummyMetricsEngine{}, } callInfo := bidder.doRequest(context.Background(), &adapters.RequestData{ @@ -1218,7 +1217,7 @@ func TestCallRecordAdapterConnections(t *testing.T) { } // setup a mock metrics engine and its expectation - metrics := &pbsmetrics.MetricsEngineMock{} + metrics := &metrics.MetricsEngineMock{} expectedAdapterName := openrtb_ext.BidderAppnexus compareConnWaitTime := func(dur time.Duration) bool { return dur.Nanoseconds() > 0 } @@ -1270,7 +1269,7 @@ func (TLSHandshakeTripper) RoundTrip(req *http.Request) (*http.Response, error) func TestCallRecordDNSTime(t *testing.T) { // setup a mock metrics engine and its expectation - metricsMock := &pbsmetrics.MetricsEngineMock{} + metricsMock := &metrics.MetricsEngineMock{} metricsMock.Mock.On("RecordDNSTime", mock.Anything).Return() // Instantiate the bidder that will send the request. We'll make sure to use an @@ -1291,7 +1290,7 @@ func TestCallRecordDNSTime(t *testing.T) { func TestCallRecordTLSHandshakeTime(t *testing.T) { // setup a mock metrics engine and its expectation - metricsMock := &pbsmetrics.MetricsEngineMock{} + metricsMock := &metrics.MetricsEngineMock{} metricsMock.Mock.On("RecordTLSHandshakeTime", mock.Anything).Return() // Instantiate the bidder that will send the request. We'll make sure to use an @@ -1328,7 +1327,7 @@ func TestTimeoutNotificationOff(t *testing.T) { Bidder: bidderImpl, Client: server.Client(), config: bidderAdapterConfig{Debug: config.Debug{}}, - me: &metricsConf.DummyMetricsEngine{}, + me: &metricsConfig.DummyMetricsEngine{}, } if tb, ok := bidder.Bidder.(adapters.TimeoutBidder); !ok { t.Error("Failed to cast bidder to a TimeoutBidder") @@ -1369,7 +1368,7 @@ func TestTimeoutNotificationOn(t *testing.T) { }, }, }, - me: &metricsConf.DummyMetricsEngine{}, + me: &metricsConfig.DummyMetricsEngine{}, } // Unwrap To Mimic exchange.go Casting Code diff --git a/exchange/exchange.go b/exchange/exchange.go index a3841fac805..1bd7e61e19a 100644 --- a/exchange/exchange.go +++ b/exchange/exchange.go @@ -24,8 +24,8 @@ import ( "github.com/prebid/prebid-server/currency" "github.com/prebid/prebid-server/errortypes" "github.com/prebid/prebid-server/gdpr" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" "github.com/prebid/prebid-server/prebid_cache_client" ) @@ -52,7 +52,7 @@ type IdFetcher interface { type exchange struct { adapterMap map[openrtb_ext.BidderName]adaptedBidder - me pbsmetrics.MetricsEngine + me metrics.MetricsEngine cache prebid_cache_client.Client cacheTime time.Duration gDPR gdpr.Permissions @@ -77,7 +77,7 @@ type bidResponseWrapper struct { bidder openrtb_ext.BidderName } -func NewExchange(adapters map[openrtb_ext.BidderName]adaptedBidder, cache prebid_cache_client.Client, cfg *config.Configuration, metricsEngine pbsmetrics.MetricsEngine, gDPR gdpr.Permissions, currencyConverter *currency.RateConverter, categoriesFetcher stored_requests.CategoryFetcher) Exchange { +func NewExchange(adapters map[openrtb_ext.BidderName]adaptedBidder, cache prebid_cache_client.Client, cfg *config.Configuration, metricsEngine metrics.MetricsEngine, gDPR gdpr.Permissions, currencyConverter *currency.RateConverter, categoriesFetcher stored_requests.CategoryFetcher) Exchange { return &exchange{ adapterMap: adapters, cache: cache, @@ -101,12 +101,12 @@ type AuctionRequest struct { BidRequest *openrtb.BidRequest Account config.Account UserSyncs IdFetcher - RequestType pbsmetrics.RequestType + RequestType metrics.RequestType StartTime time.Time // LegacyLabels is included here for temporary compatability with cleanOpenRTBRequests // in HoldAuction until we get to factoring it away. Do not use for anything new. - LegacyLabels pbsmetrics.Labels + LegacyLabels metrics.Labels } // BidderRequest holds the bidder specific request and all other @@ -115,7 +115,7 @@ type BidderRequest struct { BidRequest *openrtb.BidRequest BidderName openrtb_ext.BidderName BidderCoreName openrtb_ext.BidderName - BidderLabels pbsmetrics.AdapterLabels + BidderLabels metrics.AdapterLabels } func (e *exchange) HoldAuction(ctx context.Context, r AuctionRequest, debugLog *DebugLog) (*openrtb.BidResponse, error) { @@ -248,9 +248,9 @@ func (e *exchange) parseUsersyncIfAmbiguous(bidRequest *openrtb.BidRequest) bool return usersyncIfAmbiguous } -func recordImpMetrics(bidRequest *openrtb.BidRequest, metricsEngine pbsmetrics.MetricsEngine) { +func recordImpMetrics(bidRequest *openrtb.BidRequest, metricsEngine metrics.MetricsEngine) { for _, impInRequest := range bidRequest.Imp { - var impLabels pbsmetrics.ImpLabels = pbsmetrics.ImpLabels{ + var impLabels metrics.ImpLabels = metrics.ImpLabels{ BannerImps: impInRequest.Banner != nil, VideoImps: impInRequest.Video != nil, AudioImps: impInRequest.Audio != nil, @@ -448,31 +448,31 @@ func (e *exchange) recoverSafely(bidderRequests []BidderRequest, } } -func bidsToMetric(bids *pbsOrtbSeatBid) pbsmetrics.AdapterBid { +func bidsToMetric(bids *pbsOrtbSeatBid) metrics.AdapterBid { if bids == nil || len(bids.bids) == 0 { - return pbsmetrics.AdapterBidNone + return metrics.AdapterBidNone } - return pbsmetrics.AdapterBidPresent + return metrics.AdapterBidPresent } -func errorsToMetric(errs []error) map[pbsmetrics.AdapterError]struct{} { +func errorsToMetric(errs []error) map[metrics.AdapterError]struct{} { if len(errs) == 0 { return nil } - ret := make(map[pbsmetrics.AdapterError]struct{}, len(errs)) + ret := make(map[metrics.AdapterError]struct{}, len(errs)) var s struct{} for _, err := range errs { switch errortypes.ReadCode(err) { case errortypes.TimeoutErrorCode: - ret[pbsmetrics.AdapterErrorTimeout] = s + ret[metrics.AdapterErrorTimeout] = s case errortypes.BadInputErrorCode: - ret[pbsmetrics.AdapterErrorBadInput] = s + ret[metrics.AdapterErrorBadInput] = s case errortypes.BadServerResponseErrorCode: - ret[pbsmetrics.AdapterErrorBadServerResponse] = s + ret[metrics.AdapterErrorBadServerResponse] = s case errortypes.FailedToRequestBidsErrorCode: - ret[pbsmetrics.AdapterErrorFailedToRequestBids] = s + ret[metrics.AdapterErrorFailedToRequestBids] = s default: - ret[pbsmetrics.AdapterErrorUnknown] = s + ret[metrics.AdapterErrorUnknown] = s } } return ret diff --git a/exchange/exchange_test.go b/exchange/exchange_test.go index b31846e83a7..c013f58743e 100644 --- a/exchange/exchange_test.go +++ b/exchange/exchange_test.go @@ -19,10 +19,10 @@ import ( "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/currency" "github.com/prebid/prebid-server/gdpr" + "github.com/prebid/prebid-server/metrics" + metricsConf "github.com/prebid/prebid-server/metrics/config" + metricsConfig "github.com/prebid/prebid-server/metrics/config" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" - metricsConf "github.com/prebid/prebid-server/pbsmetrics/config" - metricsConfig "github.com/prebid/prebid-server/pbsmetrics/config" pbc "github.com/prebid/prebid-server/prebid_cache_client" "github.com/prebid/prebid-server/stored_requests" "github.com/prebid/prebid-server/stored_requests/backends/file_fetcher" @@ -1156,13 +1156,13 @@ func TestPanicRecovery(t *testing.T) { panic("panic!") } - apnLabels := pbsmetrics.AdapterLabels{ - Source: pbsmetrics.DemandWeb, - RType: pbsmetrics.ReqTypeORTB2Web, + apnLabels := metrics.AdapterLabels{ + Source: metrics.DemandWeb, + RType: metrics.ReqTypeORTB2Web, Adapter: openrtb_ext.BidderAppnexus, PubID: "test1", - CookieFlag: pbsmetrics.CookieFlagYes, - AdapterBids: pbsmetrics.AdapterBidNone, + CookieFlag: metrics.CookieFlagYes, + AdapterBids: metrics.AdapterBidNone, } bidderRequests := []BidderRequest{ diff --git a/exchange/targeting_test.go b/exchange/targeting_test.go index e67a5bbee80..160d7465ff2 100644 --- a/exchange/targeting_test.go +++ b/exchange/targeting_test.go @@ -13,8 +13,8 @@ import ( "github.com/prebid/prebid-server/gdpr" - metricsConf "github.com/prebid/prebid-server/pbsmetrics/config" - metricsConfig "github.com/prebid/prebid-server/pbsmetrics/config" + metricsConf "github.com/prebid/prebid-server/metrics/config" + metricsConfig "github.com/prebid/prebid-server/metrics/config" "github.com/mxmCherry/openrtb" "github.com/prebid/prebid-server/adapters" diff --git a/exchange/utils.go b/exchange/utils.go index 4a1c8bb52b7..d70829ff90c 100644 --- a/exchange/utils.go +++ b/exchange/utils.go @@ -12,18 +12,18 @@ import ( "github.com/mxmCherry/openrtb" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/gdpr" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" "github.com/prebid/prebid-server/privacy" "github.com/prebid/prebid-server/privacy/ccpa" "github.com/prebid/prebid-server/privacy/lmt" ) -var integrationTypeMap = map[pbsmetrics.RequestType]config.IntegrationType{ - pbsmetrics.ReqTypeAMP: config.IntegrationTypeAMP, - pbsmetrics.ReqTypeORTB2App: config.IntegrationTypeApp, - pbsmetrics.ReqTypeVideo: config.IntegrationTypeVideo, - pbsmetrics.ReqTypeORTB2Web: config.IntegrationTypeWeb, +var integrationTypeMap = map[metrics.RequestType]config.IntegrationType{ + metrics.ReqTypeAMP: config.IntegrationTypeAMP, + metrics.ReqTypeORTB2App: config.IntegrationTypeApp, + metrics.ReqTypeVideo: config.IntegrationTypeVideo, + metrics.ReqTypeORTB2Web: config.IntegrationTypeWeb, } const unknownBidder string = "" @@ -57,7 +57,7 @@ func cleanOpenRTBRequests(ctx context.Context, requestExt *openrtb_ext.ExtRequest, gDPR gdpr.Permissions, usersyncIfAmbiguous bool, - privacyConfig config.Privacy) (bidderRequests []BidderRequest, privacyLabels pbsmetrics.PrivacyLabels, errs []error) { + privacyConfig config.Privacy) (bidderRequests []BidderRequest, privacyLabels metrics.PrivacyLabels, errs []error) { impsByBidder, errs := splitImps(req.BidRequest.Imp) if len(errs) > 0 { @@ -77,7 +77,7 @@ func cleanOpenRTBRequests(ctx context.Context, gdpr := extractGDPR(req.BidRequest, usersyncIfAmbiguous) consent := extractConsent(req.BidRequest) - ampGDPRException := (req.LegacyLabels.RType == pbsmetrics.ReqTypeAMP) && gDPR.AMPException() + ampGDPRException := (req.LegacyLabels.RType == metrics.ReqTypeAMP) && gDPR.AMPException() ccpaEnforcer, err := extractCCPA(req.BidRequest, privacyConfig, &req.Account, aliases, integrationTypeMap[req.LegacyLabels.RType]) if err != nil { @@ -105,7 +105,7 @@ func cleanOpenRTBRequests(ctx context.Context, parsedConsent, err := vendorconsent.ParseString(consent) if err == nil { version := int(parsedConsent.Version()) - privacyLabels.GDPRTCFVersion = pbsmetrics.TCFVersionToValue(version) + privacyLabels.GDPRTCFVersion = metrics.TCFVersionToValue(version) } } @@ -207,19 +207,19 @@ func getAuctionBidderRequests(req AuctionRequest, BidderName: openrtb_ext.BidderName(bidder), BidderCoreName: coreBidder, BidRequest: &reqCopy, - BidderLabels: pbsmetrics.AdapterLabels{ + BidderLabels: metrics.AdapterLabels{ Source: req.LegacyLabels.Source, RType: req.LegacyLabels.RType, Adapter: coreBidder, PubID: req.LegacyLabels.PubID, CookieFlag: req.LegacyLabels.CookieFlag, - AdapterBids: pbsmetrics.AdapterBidPresent, + AdapterBids: metrics.AdapterBidPresent, }, } if hadSync := prepareUser(&reqCopy, bidder.BidderName.String(), coreBidder, explicitBuyerUIDs, req.UserSyncs); !hadSync && req.BidRequest.App == nil { - bidder.BidderLabels.CookieFlag = pbsmetrics.CookieFlagNo + bidder.BidderLabels.CookieFlag = metrics.CookieFlagNo } else { - bidder.BidderLabels.CookieFlag = pbsmetrics.CookieFlagYes + bidder.BidderLabels.CookieFlag = metrics.CookieFlagYes } bidderRequests = append(bidderRequests, bidder) diff --git a/exchange/utils_test.go b/exchange/utils_test.go index 2f24af2a06c..5fb5707c07b 100644 --- a/exchange/utils_test.go +++ b/exchange/utils_test.go @@ -10,8 +10,8 @@ import ( "github.com/mxmCherry/openrtb" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/errortypes" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" "github.com/stretchr/testify/assert" ) @@ -112,7 +112,7 @@ func TestCleanOpenRTBRequestsCCPA(t *testing.T) { ccpaHostEnabled bool ccpaAccountEnabled *bool expectDataScrub bool - expectPrivacyLabels pbsmetrics.PrivacyLabels + expectPrivacyLabels metrics.PrivacyLabels }{ { description: "Feature Flags Enabled - Opt Out", @@ -120,7 +120,7 @@ func TestCleanOpenRTBRequestsCCPA(t *testing.T) { ccpaHostEnabled: true, ccpaAccountEnabled: &trueValue, expectDataScrub: true, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ CCPAProvided: true, CCPAEnforced: true, }, @@ -131,7 +131,7 @@ func TestCleanOpenRTBRequestsCCPA(t *testing.T) { ccpaHostEnabled: true, ccpaAccountEnabled: &trueValue, expectDataScrub: false, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ CCPAProvided: true, CCPAEnforced: false, }, @@ -143,7 +143,7 @@ func TestCleanOpenRTBRequestsCCPA(t *testing.T) { ccpaHostEnabled: true, ccpaAccountEnabled: &trueValue, expectDataScrub: false, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ CCPAProvided: true, CCPAEnforced: false, }, @@ -155,7 +155,7 @@ func TestCleanOpenRTBRequestsCCPA(t *testing.T) { ccpaHostEnabled: true, ccpaAccountEnabled: &trueValue, expectDataScrub: false, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ CCPAProvided: true, CCPAEnforced: true, }, @@ -167,7 +167,7 @@ func TestCleanOpenRTBRequestsCCPA(t *testing.T) { ccpaHostEnabled: true, ccpaAccountEnabled: &trueValue, expectDataScrub: true, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ CCPAProvided: true, CCPAEnforced: true, }, @@ -178,7 +178,7 @@ func TestCleanOpenRTBRequestsCCPA(t *testing.T) { ccpaHostEnabled: false, ccpaAccountEnabled: &trueValue, expectDataScrub: true, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ CCPAProvided: true, CCPAEnforced: true, }, @@ -189,7 +189,7 @@ func TestCleanOpenRTBRequestsCCPA(t *testing.T) { ccpaHostEnabled: true, ccpaAccountEnabled: &falseValue, expectDataScrub: false, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ CCPAProvided: true, CCPAEnforced: false, }, @@ -200,7 +200,7 @@ func TestCleanOpenRTBRequestsCCPA(t *testing.T) { ccpaHostEnabled: true, ccpaAccountEnabled: nil, expectDataScrub: true, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ CCPAProvided: true, CCPAEnforced: true, }, @@ -211,7 +211,7 @@ func TestCleanOpenRTBRequestsCCPA(t *testing.T) { ccpaHostEnabled: false, ccpaAccountEnabled: nil, expectDataScrub: false, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ CCPAProvided: true, CCPAEnforced: false, }, @@ -315,13 +315,13 @@ func TestCleanOpenRTBRequestsCOPPA(t *testing.T) { description string coppa int8 expectDataScrub bool - expectPrivacyLabels pbsmetrics.PrivacyLabels + expectPrivacyLabels metrics.PrivacyLabels }{ { description: "Enabled", coppa: 1, expectDataScrub: true, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ COPPAEnforced: true, }, }, @@ -329,7 +329,7 @@ func TestCleanOpenRTBRequestsCOPPA(t *testing.T) { description: "Disabled", coppa: 0, expectDataScrub: false, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ COPPAEnforced: false, }, }, @@ -977,14 +977,14 @@ func TestCleanOpenRTBRequestsLMT(t *testing.T) { lmt *int8 enforceLMT bool expectDataScrub bool - expectPrivacyLabels pbsmetrics.PrivacyLabels + expectPrivacyLabels metrics.PrivacyLabels }{ { description: "Feature Flag Enabled - OpenTRB Enabled", lmt: &enabled, enforceLMT: true, expectDataScrub: true, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ LMTEnforced: true, }, }, @@ -993,7 +993,7 @@ func TestCleanOpenRTBRequestsLMT(t *testing.T) { lmt: &enabled, enforceLMT: false, expectDataScrub: false, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ LMTEnforced: false, }, }, @@ -1002,7 +1002,7 @@ func TestCleanOpenRTBRequestsLMT(t *testing.T) { lmt: &disabled, enforceLMT: true, expectDataScrub: false, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ LMTEnforced: false, }, }, @@ -1011,7 +1011,7 @@ func TestCleanOpenRTBRequestsLMT(t *testing.T) { lmt: &disabled, enforceLMT: false, expectDataScrub: false, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ LMTEnforced: false, }, }, @@ -1057,7 +1057,7 @@ func TestCleanOpenRTBRequestsGDPR(t *testing.T) { gdpr string gdprConsent string gdprScrub bool - expectPrivacyLabels pbsmetrics.PrivacyLabels + expectPrivacyLabels metrics.PrivacyLabels }{ { description: "Enforce - TCF Invalid", @@ -1066,7 +1066,7 @@ func TestCleanOpenRTBRequestsGDPR(t *testing.T) { gdpr: "1", gdprConsent: "malformed", gdprScrub: false, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ GDPREnforced: true, GDPRTCFVersion: "", }, @@ -1078,9 +1078,9 @@ func TestCleanOpenRTBRequestsGDPR(t *testing.T) { gdpr: "1", gdprConsent: "BONV8oqONXwgmADACHENAO7pqzAAppY", gdprScrub: true, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ GDPREnforced: true, - GDPRTCFVersion: pbsmetrics.TCFVersionV1, + GDPRTCFVersion: metrics.TCFVersionV1, }, }, { @@ -1090,9 +1090,9 @@ func TestCleanOpenRTBRequestsGDPR(t *testing.T) { gdpr: "1", gdprConsent: "COzTVhaOzTVhaGvAAAENAiCIAP_AAH_AAAAAAEEUACCKAAA", gdprScrub: true, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ GDPREnforced: true, - GDPRTCFVersion: pbsmetrics.TCFVersionV2, + GDPRTCFVersion: metrics.TCFVersionV2, }, }, { @@ -1102,7 +1102,7 @@ func TestCleanOpenRTBRequestsGDPR(t *testing.T) { gdpr: "0", gdprConsent: "BONV8oqONXwgmADACHENAO7pqzAAppY", gdprScrub: false, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ GDPREnforced: false, GDPRTCFVersion: "", }, @@ -1114,9 +1114,9 @@ func TestCleanOpenRTBRequestsGDPR(t *testing.T) { gdpr: "1", gdprConsent: "BONV8oqONXwgmADACHENAO7pqzAAppY", gdprScrub: true, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ GDPREnforced: true, - GDPRTCFVersion: pbsmetrics.TCFVersionV1, + GDPRTCFVersion: metrics.TCFVersionV1, }, }, { @@ -1126,7 +1126,7 @@ func TestCleanOpenRTBRequestsGDPR(t *testing.T) { gdpr: "1", gdprConsent: "BONV8oqONXwgmADACHENAO7pqzAAppY", gdprScrub: false, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ GDPREnforced: false, GDPRTCFVersion: "", }, @@ -1138,9 +1138,9 @@ func TestCleanOpenRTBRequestsGDPR(t *testing.T) { gdpr: "1", gdprConsent: "BONV8oqONXwgmADACHENAO7pqzAAppY", gdprScrub: true, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ GDPREnforced: true, - GDPRTCFVersion: pbsmetrics.TCFVersionV1, + GDPRTCFVersion: metrics.TCFVersionV1, }, }, { @@ -1150,7 +1150,7 @@ func TestCleanOpenRTBRequestsGDPR(t *testing.T) { gdpr: "1", gdprConsent: "BONV8oqONXwgmADACHENAO7pqzAAppY", gdprScrub: false, - expectPrivacyLabels: pbsmetrics.PrivacyLabels{ + expectPrivacyLabels: metrics.PrivacyLabels{ GDPREnforced: false, GDPRTCFVersion: "", }, diff --git a/go.mod b/go.mod index c6c15c9da11..48fc6b6479b 100644 --- a/go.mod +++ b/go.mod @@ -29,7 +29,7 @@ require ( github.com/mattn/go-colorable v0.1.2 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect github.com/mitchellh/mapstructure v1.0.0 // indirect - github.com/mssola/user_agent v0.4.1 + github.com/mssola/user_agent v0.4.1 // indirect github.com/mxmCherry/openrtb v11.0.0+incompatible github.com/onsi/ginkgo v1.10.1 // indirect github.com/onsi/gomega v1.7.0 // indirect diff --git a/pbsmetrics/config/metrics.go b/metrics/config/metrics.go similarity index 73% rename from pbsmetrics/config/metrics.go rename to metrics/config/metrics.go index b97654a672b..c1f726e904e 100644 --- a/pbsmetrics/config/metrics.go +++ b/metrics/config/metrics.go @@ -4,10 +4,10 @@ import ( "time" "github.com/prebid/prebid-server/config" + "github.com/prebid/prebid-server/metrics" + prometheusmetrics "github.com/prebid/prebid-server/metrics/prometheus" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" - prometheusmetrics "github.com/prebid/prebid-server/pbsmetrics/prometheus" - metrics "github.com/rcrowley/go-metrics" + gometrics "github.com/rcrowley/go-metrics" influxdb "github.com/vrischmann/go-metrics-influxdb" ) @@ -22,7 +22,7 @@ func NewMetricsEngine(cfg *config.Configuration, adapterList []openrtb_ext.Bidde if cfg.Metrics.Influxdb.Host != "" { // Currently use go-metrics as the metrics piece for influx - returnEngine.GoMetrics = pbsmetrics.NewMetrics(metrics.NewPrefixedRegistry("prebidserver."), adapterList, cfg.Metrics.Disabled) + returnEngine.GoMetrics = metrics.NewMetrics(gometrics.NewPrefixedRegistry("prebidserver."), adapterList, cfg.Metrics.Disabled) engineList = append(engineList, returnEngine.GoMetrics) // Set up the Influx logger go influxdb.InfluxDB( @@ -55,17 +55,17 @@ func NewMetricsEngine(cfg *config.Configuration, adapterList []openrtb_ext.Bidde // DetailedMetricsEngine is a MultiMetricsEngine that preserves links to underlying metrics engines. type DetailedMetricsEngine struct { - pbsmetrics.MetricsEngine - GoMetrics *pbsmetrics.Metrics + metrics.MetricsEngine + GoMetrics *metrics.Metrics PrometheusMetrics *prometheusmetrics.Metrics } // MultiMetricsEngine logs metrics to multiple metrics databases The can be useful in transitioning // an instance from one engine to another, you can run both in parallel to verify stats match up. -type MultiMetricsEngine []pbsmetrics.MetricsEngine +type MultiMetricsEngine []metrics.MetricsEngine // RecordRequest across all engines -func (me *MultiMetricsEngine) RecordRequest(labels pbsmetrics.Labels) { +func (me *MultiMetricsEngine) RecordRequest(labels metrics.Labels) { for _, thisME := range *me { thisME.RecordRequest(labels) } @@ -84,49 +84,49 @@ func (me *MultiMetricsEngine) RecordConnectionClose(success bool) { } //RecordsImps records imps with imp types across all metric engines -func (me *MultiMetricsEngine) RecordImps(implabels pbsmetrics.ImpLabels) { +func (me *MultiMetricsEngine) RecordImps(implabels metrics.ImpLabels) { for _, thisME := range *me { thisME.RecordImps(implabels) } } // RecordImps for the legacy endpoint -func (me *MultiMetricsEngine) RecordLegacyImps(labels pbsmetrics.Labels, numImps int) { +func (me *MultiMetricsEngine) RecordLegacyImps(labels metrics.Labels, numImps int) { for _, thisME := range *me { thisME.RecordLegacyImps(labels, numImps) } } // RecordRequestTime across all engines -func (me *MultiMetricsEngine) RecordRequestTime(labels pbsmetrics.Labels, length time.Duration) { +func (me *MultiMetricsEngine) RecordRequestTime(labels metrics.Labels, length time.Duration) { for _, thisME := range *me { thisME.RecordRequestTime(labels, length) } } // RecordStoredDataFetchTime across all engines -func (me *MultiMetricsEngine) RecordStoredDataFetchTime(labels pbsmetrics.StoredDataLabels, length time.Duration) { +func (me *MultiMetricsEngine) RecordStoredDataFetchTime(labels metrics.StoredDataLabels, length time.Duration) { for _, thisME := range *me { thisME.RecordStoredDataFetchTime(labels, length) } } // RecordStoredDataError across all engines -func (me *MultiMetricsEngine) RecordStoredDataError(labels pbsmetrics.StoredDataLabels) { +func (me *MultiMetricsEngine) RecordStoredDataError(labels metrics.StoredDataLabels) { for _, thisME := range *me { thisME.RecordStoredDataError(labels) } } // RecordAdapterPanic across all engines -func (me *MultiMetricsEngine) RecordAdapterPanic(labels pbsmetrics.AdapterLabels) { +func (me *MultiMetricsEngine) RecordAdapterPanic(labels metrics.AdapterLabels) { for _, thisME := range *me { thisME.RecordAdapterPanic(labels) } } // RecordAdapterRequest across all engines -func (me *MultiMetricsEngine) RecordAdapterRequest(labels pbsmetrics.AdapterLabels) { +func (me *MultiMetricsEngine) RecordAdapterRequest(labels metrics.AdapterLabels) { for _, thisME := range *me { thisME.RecordAdapterRequest(labels) } @@ -154,21 +154,21 @@ func (me *MultiMetricsEngine) RecordTLSHandshakeTime(tlsHandshakeTime time.Durat } // RecordAdapterBidReceived across all engines -func (me *MultiMetricsEngine) RecordAdapterBidReceived(labels pbsmetrics.AdapterLabels, bidType openrtb_ext.BidType, hasAdm bool) { +func (me *MultiMetricsEngine) RecordAdapterBidReceived(labels metrics.AdapterLabels, bidType openrtb_ext.BidType, hasAdm bool) { for _, thisME := range *me { thisME.RecordAdapterBidReceived(labels, bidType, hasAdm) } } // RecordAdapterPrice across all engines -func (me *MultiMetricsEngine) RecordAdapterPrice(labels pbsmetrics.AdapterLabels, cpm float64) { +func (me *MultiMetricsEngine) RecordAdapterPrice(labels metrics.AdapterLabels, cpm float64) { for _, thisME := range *me { thisME.RecordAdapterPrice(labels, cpm) } } // RecordAdapterTime across all engines -func (me *MultiMetricsEngine) RecordAdapterTime(labels pbsmetrics.AdapterLabels, length time.Duration) { +func (me *MultiMetricsEngine) RecordAdapterTime(labels metrics.AdapterLabels, length time.Duration) { for _, thisME := range *me { thisME.RecordAdapterTime(labels, length) } @@ -182,21 +182,21 @@ func (me *MultiMetricsEngine) RecordCookieSync() { } // RecordStoredReqCacheResult across all engines -func (me *MultiMetricsEngine) RecordStoredReqCacheResult(cacheResult pbsmetrics.CacheResult, inc int) { +func (me *MultiMetricsEngine) RecordStoredReqCacheResult(cacheResult metrics.CacheResult, inc int) { for _, thisME := range *me { thisME.RecordStoredReqCacheResult(cacheResult, inc) } } // RecordStoredImpCacheResult across all engines -func (me *MultiMetricsEngine) RecordStoredImpCacheResult(cacheResult pbsmetrics.CacheResult, inc int) { +func (me *MultiMetricsEngine) RecordStoredImpCacheResult(cacheResult metrics.CacheResult, inc int) { for _, thisME := range *me { thisME.RecordStoredImpCacheResult(cacheResult, inc) } } // RecordAccountCacheResult across all engines -func (me *MultiMetricsEngine) RecordAccountCacheResult(cacheResult pbsmetrics.CacheResult, inc int) { +func (me *MultiMetricsEngine) RecordAccountCacheResult(cacheResult metrics.CacheResult, inc int) { for _, thisME := range *me { thisME.RecordAccountCacheResult(cacheResult, inc) } @@ -210,7 +210,7 @@ func (me *MultiMetricsEngine) RecordAdapterCookieSync(adapter openrtb_ext.Bidder } // RecordUserIDSet across all engines -func (me *MultiMetricsEngine) RecordUserIDSet(userLabels pbsmetrics.UserLabels) { +func (me *MultiMetricsEngine) RecordUserIDSet(userLabels metrics.UserLabels) { for _, thisME := range *me { thisME.RecordUserIDSet(userLabels) } @@ -224,7 +224,7 @@ func (me *MultiMetricsEngine) RecordPrebidCacheRequestTime(success bool, length } // RecordRequestQueueTime across all engines -func (me *MultiMetricsEngine) RecordRequestQueueTime(success bool, requestType pbsmetrics.RequestType, length time.Duration) { +func (me *MultiMetricsEngine) RecordRequestQueueTime(success bool, requestType metrics.RequestType, length time.Duration) { for _, thisME := range *me { thisME.RecordRequestQueueTime(success, requestType, length) } @@ -238,7 +238,7 @@ func (me *MultiMetricsEngine) RecordTimeoutNotice(success bool) { } // RecordRequestPrivacy across all engines -func (me *MultiMetricsEngine) RecordRequestPrivacy(privacy pbsmetrics.PrivacyLabels) { +func (me *MultiMetricsEngine) RecordRequestPrivacy(privacy metrics.PrivacyLabels) { for _, thisME := range *me { thisME.RecordRequestPrivacy(privacy) } @@ -248,7 +248,7 @@ func (me *MultiMetricsEngine) RecordRequestPrivacy(privacy pbsmetrics.PrivacyLab type DummyMetricsEngine struct{} // RecordRequest as a noop -func (me *DummyMetricsEngine) RecordRequest(labels pbsmetrics.Labels) { +func (me *DummyMetricsEngine) RecordRequest(labels metrics.Labels) { } // RecordConnectionAccept as a noop @@ -260,31 +260,31 @@ func (me *DummyMetricsEngine) RecordConnectionClose(success bool) { } // RecordImps as a noop -func (me *DummyMetricsEngine) RecordImps(implabels pbsmetrics.ImpLabels) { +func (me *DummyMetricsEngine) RecordImps(implabels metrics.ImpLabels) { } // RecordLegacyImps as a noop -func (me *DummyMetricsEngine) RecordLegacyImps(labels pbsmetrics.Labels, numImps int) { +func (me *DummyMetricsEngine) RecordLegacyImps(labels metrics.Labels, numImps int) { } // RecordRequestTime as a noop -func (me *DummyMetricsEngine) RecordRequestTime(labels pbsmetrics.Labels, length time.Duration) { +func (me *DummyMetricsEngine) RecordRequestTime(labels metrics.Labels, length time.Duration) { } // RecordStoredDataFetchTime as a noop -func (me *DummyMetricsEngine) RecordStoredDataFetchTime(labels pbsmetrics.StoredDataLabels, length time.Duration) { +func (me *DummyMetricsEngine) RecordStoredDataFetchTime(labels metrics.StoredDataLabels, length time.Duration) { } // RecordStoredDataError as a noop -func (me *DummyMetricsEngine) RecordStoredDataError(labels pbsmetrics.StoredDataLabels) { +func (me *DummyMetricsEngine) RecordStoredDataError(labels metrics.StoredDataLabels) { } // RecordAdapterPanic as a noop -func (me *DummyMetricsEngine) RecordAdapterPanic(labels pbsmetrics.AdapterLabels) { +func (me *DummyMetricsEngine) RecordAdapterPanic(labels metrics.AdapterLabels) { } // RecordAdapterRequest as a noop -func (me *DummyMetricsEngine) RecordAdapterRequest(labels pbsmetrics.AdapterLabels) { +func (me *DummyMetricsEngine) RecordAdapterRequest(labels metrics.AdapterLabels) { } // RecordAdapterConnections as a noop @@ -300,15 +300,15 @@ func (me *DummyMetricsEngine) RecordTLSHandshakeTime(tlsHandshakeTime time.Durat } // RecordAdapterBidReceived as a noop -func (me *DummyMetricsEngine) RecordAdapterBidReceived(labels pbsmetrics.AdapterLabels, bidType openrtb_ext.BidType, hasAdm bool) { +func (me *DummyMetricsEngine) RecordAdapterBidReceived(labels metrics.AdapterLabels, bidType openrtb_ext.BidType, hasAdm bool) { } // RecordAdapterPrice as a noop -func (me *DummyMetricsEngine) RecordAdapterPrice(labels pbsmetrics.AdapterLabels, cpm float64) { +func (me *DummyMetricsEngine) RecordAdapterPrice(labels metrics.AdapterLabels, cpm float64) { } // RecordAdapterTime as a noop -func (me *DummyMetricsEngine) RecordAdapterTime(labels pbsmetrics.AdapterLabels, length time.Duration) { +func (me *DummyMetricsEngine) RecordAdapterTime(labels metrics.AdapterLabels, length time.Duration) { } // RecordCookieSync as a noop @@ -320,19 +320,19 @@ func (me *DummyMetricsEngine) RecordAdapterCookieSync(adapter openrtb_ext.Bidder } // RecordUserIDSet as a noop -func (me *DummyMetricsEngine) RecordUserIDSet(userLabels pbsmetrics.UserLabels) { +func (me *DummyMetricsEngine) RecordUserIDSet(userLabels metrics.UserLabels) { } // RecordStoredReqCacheResult as a noop -func (me *DummyMetricsEngine) RecordStoredReqCacheResult(cacheResult pbsmetrics.CacheResult, inc int) { +func (me *DummyMetricsEngine) RecordStoredReqCacheResult(cacheResult metrics.CacheResult, inc int) { } // RecordStoredImpCacheResult as a noop -func (me *DummyMetricsEngine) RecordStoredImpCacheResult(cacheResult pbsmetrics.CacheResult, inc int) { +func (me *DummyMetricsEngine) RecordStoredImpCacheResult(cacheResult metrics.CacheResult, inc int) { } // RecordAccountCacheResult as a noop -func (me *DummyMetricsEngine) RecordAccountCacheResult(cacheResult pbsmetrics.CacheResult, inc int) { +func (me *DummyMetricsEngine) RecordAccountCacheResult(cacheResult metrics.CacheResult, inc int) { } // RecordPrebidCacheRequestTime as a noop @@ -340,7 +340,7 @@ func (me *DummyMetricsEngine) RecordPrebidCacheRequestTime(success bool, length } // RecordRequestQueueTime as a noop -func (me *DummyMetricsEngine) RecordRequestQueueTime(success bool, requestType pbsmetrics.RequestType, length time.Duration) { +func (me *DummyMetricsEngine) RecordRequestQueueTime(success bool, requestType metrics.RequestType, length time.Duration) { } // RecordTimeoutNotice as a noop @@ -348,5 +348,5 @@ func (me *DummyMetricsEngine) RecordTimeoutNotice(success bool) { } // RecordRequestPrivacy as a noop -func (me *DummyMetricsEngine) RecordRequestPrivacy(privacy pbsmetrics.PrivacyLabels) { +func (me *DummyMetricsEngine) RecordRequestPrivacy(privacy metrics.PrivacyLabels) { } diff --git a/pbsmetrics/config/metrics_test.go b/metrics/config/metrics_test.go similarity index 60% rename from pbsmetrics/config/metrics_test.go rename to metrics/config/metrics_test.go index f23a6e16f56..5b70b53bb1a 100644 --- a/pbsmetrics/config/metrics_test.go +++ b/metrics/config/metrics_test.go @@ -5,9 +5,9 @@ import ( "time" mainConfig "github.com/prebid/prebid-server/config" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" - "github.com/rcrowley/go-metrics" + gometrics "github.com/rcrowley/go-metrics" ) // Start a simple test to insure we get valid MetricsEngines for various configurations @@ -26,7 +26,7 @@ func TestGoMetricsEngine(t *testing.T) { cfg.Metrics.Influxdb.Host = "localhost" adapterList := make([]openrtb_ext.BidderName, 0, 2) testEngine := NewMetricsEngine(&cfg, adapterList) - _, ok := testEngine.MetricsEngine.(*pbsmetrics.Metrics) + _, ok := testEngine.MetricsEngine.(*metrics.Metrics) if !ok { t.Error("Expected a legacy Metrics as MetricsEngine, but didn't get it") } @@ -37,36 +37,36 @@ func TestMultiMetricsEngine(t *testing.T) { cfg := mainConfig.Configuration{} cfg.Metrics.Influxdb.Host = "localhost" adapterList := openrtb_ext.CoreBidderNames() - goEngine := pbsmetrics.NewMetrics(metrics.NewPrefixedRegistry("prebidserver."), adapterList, mainConfig.DisabledMetrics{}) + goEngine := metrics.NewMetrics(gometrics.NewPrefixedRegistry("prebidserver."), adapterList, mainConfig.DisabledMetrics{}) engineList := make(MultiMetricsEngine, 2) engineList[0] = goEngine engineList[1] = &DummyMetricsEngine{} - var metricsEngine pbsmetrics.MetricsEngine + var metricsEngine metrics.MetricsEngine metricsEngine = &engineList - labels := pbsmetrics.Labels{ - Source: pbsmetrics.DemandWeb, - RType: pbsmetrics.ReqTypeORTB2Web, + labels := metrics.Labels{ + Source: metrics.DemandWeb, + RType: metrics.ReqTypeORTB2Web, PubID: "test1", - CookieFlag: pbsmetrics.CookieFlagYes, - RequestStatus: pbsmetrics.RequestStatusOK, + CookieFlag: metrics.CookieFlagYes, + RequestStatus: metrics.RequestStatusOK, } - apnLabels := pbsmetrics.AdapterLabels{ - Source: pbsmetrics.DemandWeb, - RType: pbsmetrics.ReqTypeORTB2Web, + apnLabels := metrics.AdapterLabels{ + Source: metrics.DemandWeb, + RType: metrics.ReqTypeORTB2Web, Adapter: openrtb_ext.BidderAppnexus, PubID: "test1", - CookieFlag: pbsmetrics.CookieFlagYes, - AdapterBids: pbsmetrics.AdapterBidNone, + CookieFlag: metrics.CookieFlagYes, + AdapterBids: metrics.AdapterBidNone, } - pubLabels := pbsmetrics.AdapterLabels{ - Source: pbsmetrics.DemandWeb, - RType: pbsmetrics.ReqTypeORTB2Web, + pubLabels := metrics.AdapterLabels{ + Source: metrics.DemandWeb, + RType: metrics.ReqTypeORTB2Web, Adapter: openrtb_ext.BidderPubmatic, PubID: "test1", - CookieFlag: pbsmetrics.CookieFlagYes, - AdapterBids: pbsmetrics.AdapterBidPresent, + CookieFlag: metrics.CookieFlagYes, + AdapterBids: metrics.AdapterBidPresent, } - impTypeLabels := pbsmetrics.ImpLabels{ + impTypeLabels := metrics.ImpLabels{ BannerImps: true, VideoImps: false, AudioImps: true, @@ -84,20 +84,20 @@ func TestMultiMetricsEngine(t *testing.T) { metricsEngine.RecordAdapterTime(pubLabels, time.Millisecond*20) metricsEngine.RecordPrebidCacheRequestTime(true, time.Millisecond*20) } - labelsBlacklist := []pbsmetrics.Labels{ + labelsBlacklist := []metrics.Labels{ { - Source: pbsmetrics.DemandWeb, - RType: pbsmetrics.ReqTypeAMP, + Source: metrics.DemandWeb, + RType: metrics.ReqTypeAMP, PubID: "test2", - CookieFlag: pbsmetrics.CookieFlagYes, - RequestStatus: pbsmetrics.RequestStatusBlacklisted, + CookieFlag: metrics.CookieFlagYes, + RequestStatus: metrics.RequestStatusBlacklisted, }, { - Source: pbsmetrics.DemandWeb, - RType: pbsmetrics.ReqTypeVideo, + Source: metrics.DemandWeb, + RType: metrics.ReqTypeVideo, PubID: "test2", - CookieFlag: pbsmetrics.CookieFlagYes, - RequestStatus: pbsmetrics.RequestStatusBlacklisted, + CookieFlag: metrics.CookieFlagYes, + RequestStatus: metrics.RequestStatusBlacklisted, }, } for _, label := range labelsBlacklist { @@ -111,27 +111,27 @@ func TestMultiMetricsEngine(t *testing.T) { metricsEngine.RecordImps(impTypeLabels) } - metricsEngine.RecordStoredReqCacheResult(pbsmetrics.CacheMiss, 1) - metricsEngine.RecordStoredImpCacheResult(pbsmetrics.CacheMiss, 2) - metricsEngine.RecordAccountCacheResult(pbsmetrics.CacheMiss, 3) - metricsEngine.RecordStoredReqCacheResult(pbsmetrics.CacheHit, 4) - metricsEngine.RecordStoredImpCacheResult(pbsmetrics.CacheHit, 5) - metricsEngine.RecordAccountCacheResult(pbsmetrics.CacheHit, 6) + metricsEngine.RecordStoredReqCacheResult(metrics.CacheMiss, 1) + metricsEngine.RecordStoredImpCacheResult(metrics.CacheMiss, 2) + metricsEngine.RecordAccountCacheResult(metrics.CacheMiss, 3) + metricsEngine.RecordStoredReqCacheResult(metrics.CacheHit, 4) + metricsEngine.RecordStoredImpCacheResult(metrics.CacheHit, 5) + metricsEngine.RecordAccountCacheResult(metrics.CacheHit, 6) - metricsEngine.RecordRequestQueueTime(false, pbsmetrics.ReqTypeVideo, time.Duration(1)) + metricsEngine.RecordRequestQueueTime(false, metrics.ReqTypeVideo, time.Duration(1)) - //Make the metrics engine, instantiated here with goEngine, fill its RequestStatuses[RequestType][pbsmetrics.RequestStatusXX] with the new boolean values added to pbsmetrics.Labels - VerifyMetrics(t, "RequestStatuses.OpenRTB2.OK", goEngine.RequestStatuses[pbsmetrics.ReqTypeORTB2Web][pbsmetrics.RequestStatusOK].Count(), 5) - VerifyMetrics(t, "RequestStatuses.Legacy.OK", goEngine.RequestStatuses[pbsmetrics.ReqTypeLegacy][pbsmetrics.RequestStatusOK].Count(), 0) - VerifyMetrics(t, "RequestStatuses.AMP.OK", goEngine.RequestStatuses[pbsmetrics.ReqTypeAMP][pbsmetrics.RequestStatusOK].Count(), 0) - VerifyMetrics(t, "RequestStatuses.AMP.BlacklistedAcctOrApp", goEngine.RequestStatuses[pbsmetrics.ReqTypeAMP][pbsmetrics.RequestStatusBlacklisted].Count(), 1) - VerifyMetrics(t, "RequestStatuses.Video.OK", goEngine.RequestStatuses[pbsmetrics.ReqTypeVideo][pbsmetrics.RequestStatusOK].Count(), 0) - VerifyMetrics(t, "RequestStatuses.Video.Error", goEngine.RequestStatuses[pbsmetrics.ReqTypeVideo][pbsmetrics.RequestStatusErr].Count(), 0) - VerifyMetrics(t, "RequestStatuses.Video.BadInput", goEngine.RequestStatuses[pbsmetrics.ReqTypeVideo][pbsmetrics.RequestStatusBadInput].Count(), 0) - VerifyMetrics(t, "RequestStatuses.Video.BlacklistedAcctOrApp", goEngine.RequestStatuses[pbsmetrics.ReqTypeVideo][pbsmetrics.RequestStatusBlacklisted].Count(), 1) - VerifyMetrics(t, "RequestStatuses.OpenRTB2.Error", goEngine.RequestStatuses[pbsmetrics.ReqTypeORTB2Web][pbsmetrics.RequestStatusErr].Count(), 0) - VerifyMetrics(t, "RequestStatuses.OpenRTB2.BadInput", goEngine.RequestStatuses[pbsmetrics.ReqTypeORTB2Web][pbsmetrics.RequestStatusBadInput].Count(), 0) - VerifyMetrics(t, "RequestStatuses.OpenRTB2.BlacklistedAcctOrApp", goEngine.RequestStatuses[pbsmetrics.ReqTypeORTB2Web][pbsmetrics.RequestStatusBlacklisted].Count(), 0) + //Make the metrics engine, instantiated here with goEngine, fill its RequestStatuses[RequestType][metrics.RequestStatusXX] with the new boolean values added to metrics.Labels + VerifyMetrics(t, "RequestStatuses.OpenRTB2.OK", goEngine.RequestStatuses[metrics.ReqTypeORTB2Web][metrics.RequestStatusOK].Count(), 5) + VerifyMetrics(t, "RequestStatuses.Legacy.OK", goEngine.RequestStatuses[metrics.ReqTypeLegacy][metrics.RequestStatusOK].Count(), 0) + VerifyMetrics(t, "RequestStatuses.AMP.OK", goEngine.RequestStatuses[metrics.ReqTypeAMP][metrics.RequestStatusOK].Count(), 0) + VerifyMetrics(t, "RequestStatuses.AMP.BlacklistedAcctOrApp", goEngine.RequestStatuses[metrics.ReqTypeAMP][metrics.RequestStatusBlacklisted].Count(), 1) + VerifyMetrics(t, "RequestStatuses.Video.OK", goEngine.RequestStatuses[metrics.ReqTypeVideo][metrics.RequestStatusOK].Count(), 0) + VerifyMetrics(t, "RequestStatuses.Video.Error", goEngine.RequestStatuses[metrics.ReqTypeVideo][metrics.RequestStatusErr].Count(), 0) + VerifyMetrics(t, "RequestStatuses.Video.BadInput", goEngine.RequestStatuses[metrics.ReqTypeVideo][metrics.RequestStatusBadInput].Count(), 0) + VerifyMetrics(t, "RequestStatuses.Video.BlacklistedAcctOrApp", goEngine.RequestStatuses[metrics.ReqTypeVideo][metrics.RequestStatusBlacklisted].Count(), 1) + VerifyMetrics(t, "RequestStatuses.OpenRTB2.Error", goEngine.RequestStatuses[metrics.ReqTypeORTB2Web][metrics.RequestStatusErr].Count(), 0) + VerifyMetrics(t, "RequestStatuses.OpenRTB2.BadInput", goEngine.RequestStatuses[metrics.ReqTypeORTB2Web][metrics.RequestStatusBadInput].Count(), 0) + VerifyMetrics(t, "RequestStatuses.OpenRTB2.BlacklistedAcctOrApp", goEngine.RequestStatuses[metrics.ReqTypeORTB2Web][metrics.RequestStatusBlacklisted].Count(), 0) VerifyMetrics(t, "ImpsTypeBanner", goEngine.ImpsTypeBanner.Count(), 5) VerifyMetrics(t, "ImpsTypeVideo", goEngine.ImpsTypeVideo.Count(), 3) @@ -140,27 +140,27 @@ func TestMultiMetricsEngine(t *testing.T) { VerifyMetrics(t, "RecordPrebidCacheRequestTime", goEngine.PrebidCacheRequestTimerSuccess.Count(), 5) - VerifyMetrics(t, "Request", goEngine.RequestStatuses[pbsmetrics.ReqTypeORTB2Web][pbsmetrics.RequestStatusOK].Count(), 5) + VerifyMetrics(t, "Request", goEngine.RequestStatuses[metrics.ReqTypeORTB2Web][metrics.RequestStatusOK].Count(), 5) VerifyMetrics(t, "ImpMeter", goEngine.ImpMeter.Count(), 8) VerifyMetrics(t, "LegacyImpMeter", goEngine.LegacyImpMeter.Count(), 10) VerifyMetrics(t, "NoCookieMeter", goEngine.NoCookieMeter.Count(), 0) VerifyMetrics(t, "AdapterMetrics.Pubmatic.GotBidsMeter", goEngine.AdapterMetrics[openrtb_ext.BidderPubmatic].GotBidsMeter.Count(), 5) VerifyMetrics(t, "AdapterMetrics.Pubmatic.NoBidMeter", goEngine.AdapterMetrics[openrtb_ext.BidderPubmatic].NoBidMeter.Count(), 0) - for _, err := range pbsmetrics.AdapterErrors() { + for _, err := range metrics.AdapterErrors() { VerifyMetrics(t, "AdapterMetrics.Pubmatic.Request.ErrorMeter."+string(err), goEngine.AdapterMetrics[openrtb_ext.BidderPubmatic].ErrorMeters[err].Count(), 0) } VerifyMetrics(t, "AdapterMetrics.AppNexus.GotBidsMeter", goEngine.AdapterMetrics[openrtb_ext.BidderAppnexus].GotBidsMeter.Count(), 0) VerifyMetrics(t, "AdapterMetrics.AppNexus.NoBidMeter", goEngine.AdapterMetrics[openrtb_ext.BidderAppnexus].NoBidMeter.Count(), 5) - VerifyMetrics(t, "RecordRequestQueueTime.Video.Rejected", goEngine.RequestsQueueTimer[pbsmetrics.ReqTypeVideo][false].Count(), 1) - VerifyMetrics(t, "RecordRequestQueueTime.Video.Accepted", goEngine.RequestsQueueTimer[pbsmetrics.ReqTypeVideo][true].Count(), 0) + VerifyMetrics(t, "RecordRequestQueueTime.Video.Rejected", goEngine.RequestsQueueTimer[metrics.ReqTypeVideo][false].Count(), 1) + VerifyMetrics(t, "RecordRequestQueueTime.Video.Accepted", goEngine.RequestsQueueTimer[metrics.ReqTypeVideo][true].Count(), 0) - VerifyMetrics(t, "StoredReqCache.Miss", goEngine.StoredReqCacheMeter[pbsmetrics.CacheMiss].Count(), 1) - VerifyMetrics(t, "StoredImpCache.Miss", goEngine.StoredImpCacheMeter[pbsmetrics.CacheMiss].Count(), 2) - VerifyMetrics(t, "AccountCache.Miss", goEngine.AccountCacheMeter[pbsmetrics.CacheMiss].Count(), 3) - VerifyMetrics(t, "StoredReqCache.Hit", goEngine.StoredReqCacheMeter[pbsmetrics.CacheHit].Count(), 4) - VerifyMetrics(t, "StoredImpCache.Hit", goEngine.StoredImpCacheMeter[pbsmetrics.CacheHit].Count(), 5) - VerifyMetrics(t, "AccountCache.Hit", goEngine.AccountCacheMeter[pbsmetrics.CacheHit].Count(), 6) + VerifyMetrics(t, "StoredReqCache.Miss", goEngine.StoredReqCacheMeter[metrics.CacheMiss].Count(), 1) + VerifyMetrics(t, "StoredImpCache.Miss", goEngine.StoredImpCacheMeter[metrics.CacheMiss].Count(), 2) + VerifyMetrics(t, "AccountCache.Miss", goEngine.AccountCacheMeter[metrics.CacheMiss].Count(), 3) + VerifyMetrics(t, "StoredReqCache.Hit", goEngine.StoredReqCacheMeter[metrics.CacheHit].Count(), 4) + VerifyMetrics(t, "StoredImpCache.Hit", goEngine.StoredImpCacheMeter[metrics.CacheHit].Count(), 5) + VerifyMetrics(t, "AccountCache.Hit", goEngine.AccountCacheMeter[metrics.CacheHit].Count(), 6) } func VerifyMetrics(t *testing.T, name string, actual int64, expected int64) { diff --git a/pbsmetrics/go_metrics.go b/metrics/go_metrics.go similarity index 99% rename from pbsmetrics/go_metrics.go rename to metrics/go_metrics.go index 4d5dd3cb43c..ac7ed0691c4 100644 --- a/pbsmetrics/go_metrics.go +++ b/metrics/go_metrics.go @@ -1,4 +1,4 @@ -package pbsmetrics +package metrics import ( "fmt" diff --git a/pbsmetrics/go_metrics_test.go b/metrics/go_metrics_test.go similarity index 99% rename from pbsmetrics/go_metrics_test.go rename to metrics/go_metrics_test.go index dd09c3ec1eb..2d0b9097b11 100644 --- a/pbsmetrics/go_metrics_test.go +++ b/metrics/go_metrics_test.go @@ -1,4 +1,4 @@ -package pbsmetrics +package metrics import ( "testing" diff --git a/pbsmetrics/metrics.go b/metrics/metrics.go similarity index 99% rename from pbsmetrics/metrics.go rename to metrics/metrics.go index 2397364bb04..62de3afac21 100644 --- a/pbsmetrics/metrics.go +++ b/metrics/metrics.go @@ -1,4 +1,4 @@ -package pbsmetrics +package metrics import ( "time" diff --git a/pbsmetrics/metrics_mock.go b/metrics/metrics_mock.go similarity index 99% rename from pbsmetrics/metrics_mock.go rename to metrics/metrics_mock.go index e8c0ee1350e..c852eb47d24 100644 --- a/pbsmetrics/metrics_mock.go +++ b/metrics/metrics_mock.go @@ -1,4 +1,4 @@ -package pbsmetrics +package metrics import ( "time" diff --git a/pbsmetrics/prometheus/preload.go b/metrics/prometheus/preload.go similarity index 98% rename from pbsmetrics/prometheus/preload.go rename to metrics/prometheus/preload.go index 2f54de385e2..f4dfe43469d 100644 --- a/pbsmetrics/prometheus/preload.go +++ b/metrics/prometheus/preload.go @@ -1,7 +1,7 @@ package prometheusmetrics import ( - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" "github.com/prometheus/client_golang/prometheus" ) @@ -157,7 +157,7 @@ func preloadLabelValues(m *Metrics) { //to minimize memory usage, queuedTimeout metric is now supported for video endpoint only //boolean value represents 2 general request statuses: accepted and rejected preloadLabelValuesForHistogram(m.requestsQueueTimer, map[string][]string{ - requestTypeLabel: {string(pbsmetrics.ReqTypeVideo)}, + requestTypeLabel: {string(metrics.ReqTypeVideo)}, requestStatusLabel: {requestSuccessLabel, requestRejectLabel}, }) diff --git a/pbsmetrics/prometheus/preload_test.go b/metrics/prometheus/preload_test.go similarity index 100% rename from pbsmetrics/prometheus/preload_test.go rename to metrics/prometheus/preload_test.go diff --git a/pbsmetrics/prometheus/prometheus.go b/metrics/prometheus/prometheus.go similarity index 91% rename from pbsmetrics/prometheus/prometheus.go rename to metrics/prometheus/prometheus.go index a7aabd1fe76..1e384c0e438 100644 --- a/pbsmetrics/prometheus/prometheus.go +++ b/metrics/prometheus/prometheus.go @@ -5,8 +5,8 @@ import ( "time" "github.com/prebid/prebid-server/config" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" "github.com/prometheus/client_golang/prometheus" ) @@ -429,26 +429,26 @@ func (m *Metrics) RecordConnectionClose(success bool) { } } -func (m *Metrics) RecordRequest(labels pbsmetrics.Labels) { +func (m *Metrics) RecordRequest(labels metrics.Labels) { m.requests.With(prometheus.Labels{ requestTypeLabel: string(labels.RType), requestStatusLabel: string(labels.RequestStatus), }).Inc() - if labels.CookieFlag == pbsmetrics.CookieFlagNo { + if labels.CookieFlag == metrics.CookieFlagNo { m.requestsWithoutCookie.With(prometheus.Labels{ requestTypeLabel: string(labels.RType), }).Inc() } - if labels.PubID != pbsmetrics.PublisherUnknown { + if labels.PubID != metrics.PublisherUnknown { m.accountRequests.With(prometheus.Labels{ accountLabel: labels.PubID, }).Inc() } } -func (m *Metrics) RecordImps(labels pbsmetrics.ImpLabels) { +func (m *Metrics) RecordImps(labels metrics.ImpLabels) { m.impressions.With(prometheus.Labels{ isBannerLabel: strconv.FormatBool(labels.BannerImps), isVideoLabel: strconv.FormatBool(labels.VideoImps), @@ -457,73 +457,73 @@ func (m *Metrics) RecordImps(labels pbsmetrics.ImpLabels) { }).Inc() } -func (m *Metrics) RecordLegacyImps(labels pbsmetrics.Labels, numImps int) { +func (m *Metrics) RecordLegacyImps(labels metrics.Labels, numImps int) { m.impressionsLegacy.Add(float64(numImps)) } -func (m *Metrics) RecordRequestTime(labels pbsmetrics.Labels, length time.Duration) { - if labels.RequestStatus == pbsmetrics.RequestStatusOK { +func (m *Metrics) RecordRequestTime(labels metrics.Labels, length time.Duration) { + if labels.RequestStatus == metrics.RequestStatusOK { m.requestsTimer.With(prometheus.Labels{ requestTypeLabel: string(labels.RType), }).Observe(length.Seconds()) } } -func (m *Metrics) RecordStoredDataFetchTime(labels pbsmetrics.StoredDataLabels, length time.Duration) { +func (m *Metrics) RecordStoredDataFetchTime(labels metrics.StoredDataLabels, length time.Duration) { switch labels.DataType { - case pbsmetrics.AccountDataType: + case metrics.AccountDataType: m.storedAccountFetchTimer.With(prometheus.Labels{ storedDataFetchTypeLabel: string(labels.DataFetchType), }).Observe(length.Seconds()) - case pbsmetrics.AMPDataType: + case metrics.AMPDataType: m.storedAMPFetchTimer.With(prometheus.Labels{ storedDataFetchTypeLabel: string(labels.DataFetchType), }).Observe(length.Seconds()) - case pbsmetrics.CategoryDataType: + case metrics.CategoryDataType: m.storedCategoryFetchTimer.With(prometheus.Labels{ storedDataFetchTypeLabel: string(labels.DataFetchType), }).Observe(length.Seconds()) - case pbsmetrics.RequestDataType: + case metrics.RequestDataType: m.storedRequestFetchTimer.With(prometheus.Labels{ storedDataFetchTypeLabel: string(labels.DataFetchType), }).Observe(length.Seconds()) - case pbsmetrics.VideoDataType: + case metrics.VideoDataType: m.storedVideoFetchTimer.With(prometheus.Labels{ storedDataFetchTypeLabel: string(labels.DataFetchType), }).Observe(length.Seconds()) } } -func (m *Metrics) RecordStoredDataError(labels pbsmetrics.StoredDataLabels) { +func (m *Metrics) RecordStoredDataError(labels metrics.StoredDataLabels) { switch labels.DataType { - case pbsmetrics.AccountDataType: + case metrics.AccountDataType: m.storedAccountErrors.With(prometheus.Labels{ storedDataErrorLabel: string(labels.Error), }).Inc() - case pbsmetrics.AMPDataType: + case metrics.AMPDataType: m.storedAMPErrors.With(prometheus.Labels{ storedDataErrorLabel: string(labels.Error), }).Inc() - case pbsmetrics.CategoryDataType: + case metrics.CategoryDataType: m.storedCategoryErrors.With(prometheus.Labels{ storedDataErrorLabel: string(labels.Error), }).Inc() - case pbsmetrics.RequestDataType: + case metrics.RequestDataType: m.storedRequestErrors.With(prometheus.Labels{ storedDataErrorLabel: string(labels.Error), }).Inc() - case pbsmetrics.VideoDataType: + case metrics.VideoDataType: m.storedVideoErrors.With(prometheus.Labels{ storedDataErrorLabel: string(labels.Error), }).Inc() } } -func (m *Metrics) RecordAdapterRequest(labels pbsmetrics.AdapterLabels) { +func (m *Metrics) RecordAdapterRequest(labels metrics.AdapterLabels) { m.adapterRequests.With(prometheus.Labels{ adapterLabel: string(labels.Adapter), cookieLabel: string(labels.CookieFlag), - hasBidsLabel: strconv.FormatBool(labels.AdapterBids == pbsmetrics.AdapterBidPresent), + hasBidsLabel: strconv.FormatBool(labels.AdapterBids == metrics.AdapterBidPresent), }).Inc() for err := range labels.AdapterErrors { @@ -564,13 +564,13 @@ func (m *Metrics) RecordTLSHandshakeTime(tlsHandshakeTime time.Duration) { m.tlsHandhakeTimer.Observe(tlsHandshakeTime.Seconds()) } -func (m *Metrics) RecordAdapterPanic(labels pbsmetrics.AdapterLabels) { +func (m *Metrics) RecordAdapterPanic(labels metrics.AdapterLabels) { m.adapterPanics.With(prometheus.Labels{ adapterLabel: string(labels.Adapter), }).Inc() } -func (m *Metrics) RecordAdapterBidReceived(labels pbsmetrics.AdapterLabels, bidType openrtb_ext.BidType, hasAdm bool) { +func (m *Metrics) RecordAdapterBidReceived(labels metrics.AdapterLabels, bidType openrtb_ext.BidType, hasAdm bool) { markupDelivery := markupDeliveryNurl if hasAdm { markupDelivery = markupDeliveryAdm @@ -582,13 +582,13 @@ func (m *Metrics) RecordAdapterBidReceived(labels pbsmetrics.AdapterLabels, bidT }).Inc() } -func (m *Metrics) RecordAdapterPrice(labels pbsmetrics.AdapterLabels, cpm float64) { +func (m *Metrics) RecordAdapterPrice(labels metrics.AdapterLabels, cpm float64) { m.adapterPrices.With(prometheus.Labels{ adapterLabel: string(labels.Adapter), }).Observe(cpm) } -func (m *Metrics) RecordAdapterTime(labels pbsmetrics.AdapterLabels, length time.Duration) { +func (m *Metrics) RecordAdapterTime(labels metrics.AdapterLabels, length time.Duration) { if len(labels.AdapterErrors) == 0 { m.adapterRequestsTimer.With(prometheus.Labels{ adapterLabel: string(labels.Adapter), @@ -607,7 +607,7 @@ func (m *Metrics) RecordAdapterCookieSync(adapter openrtb_ext.BidderName, privac }).Inc() } -func (m *Metrics) RecordUserIDSet(labels pbsmetrics.UserLabels) { +func (m *Metrics) RecordUserIDSet(labels metrics.UserLabels) { adapter := string(labels.Bidder) if adapter != "" { m.adapterUserSync.With(prometheus.Labels{ @@ -617,19 +617,19 @@ func (m *Metrics) RecordUserIDSet(labels pbsmetrics.UserLabels) { } } -func (m *Metrics) RecordStoredReqCacheResult(cacheResult pbsmetrics.CacheResult, inc int) { +func (m *Metrics) RecordStoredReqCacheResult(cacheResult metrics.CacheResult, inc int) { m.storedRequestCacheResult.With(prometheus.Labels{ cacheResultLabel: string(cacheResult), }).Add(float64(inc)) } -func (m *Metrics) RecordStoredImpCacheResult(cacheResult pbsmetrics.CacheResult, inc int) { +func (m *Metrics) RecordStoredImpCacheResult(cacheResult metrics.CacheResult, inc int) { m.storedImpressionsCacheResult.With(prometheus.Labels{ cacheResultLabel: string(cacheResult), }).Add(float64(inc)) } -func (m *Metrics) RecordAccountCacheResult(cacheResult pbsmetrics.CacheResult, inc int) { +func (m *Metrics) RecordAccountCacheResult(cacheResult metrics.CacheResult, inc int) { m.accountCacheResult.With(prometheus.Labels{ cacheResultLabel: string(cacheResult), }).Add(float64(inc)) @@ -641,7 +641,7 @@ func (m *Metrics) RecordPrebidCacheRequestTime(success bool, length time.Duratio }).Observe(length.Seconds()) } -func (m *Metrics) RecordRequestQueueTime(success bool, requestType pbsmetrics.RequestType, length time.Duration) { +func (m *Metrics) RecordRequestQueueTime(success bool, requestType metrics.RequestType, length time.Duration) { successLabelFormatted := requestRejectLabel if success { successLabelFormatted = requestSuccessLabel @@ -664,7 +664,7 @@ func (m *Metrics) RecordTimeoutNotice(success bool) { } } -func (m *Metrics) RecordRequestPrivacy(privacy pbsmetrics.PrivacyLabels) { +func (m *Metrics) RecordRequestPrivacy(privacy metrics.PrivacyLabels) { if privacy.CCPAProvided { m.privacyCCPA.With(prometheus.Labels{ sourceLabel: sourceRequest, diff --git a/pbsmetrics/prometheus/prometheus_test.go b/metrics/prometheus/prometheus_test.go similarity index 83% rename from pbsmetrics/prometheus/prometheus_test.go rename to metrics/prometheus/prometheus_test.go index bdd1bc7a77a..b2ffff34850 100644 --- a/pbsmetrics/prometheus/prometheus_test.go +++ b/metrics/prometheus/prometheus_test.go @@ -6,8 +6,8 @@ import ( "time" "github.com/prebid/prebid-server/config" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" "github.com/prometheus/client_golang/prometheus" dto "github.com/prometheus/client_model/go" "github.com/stretchr/testify/assert" @@ -138,10 +138,10 @@ func TestConnectionMetrics(t *testing.T) { func TestRequestMetric(t *testing.T) { m := createMetricsForTesting() - requestType := pbsmetrics.ReqTypeORTB2Web - requestStatus := pbsmetrics.RequestStatusBlacklisted + requestType := metrics.ReqTypeORTB2Web + requestStatus := metrics.RequestStatusBlacklisted - m.RecordRequest(pbsmetrics.Labels{ + m.RecordRequest(metrics.Labels{ RType: requestType, RequestStatus: requestStatus, }) @@ -156,11 +156,11 @@ func TestRequestMetric(t *testing.T) { } func TestRequestMetricWithoutCookie(t *testing.T) { - requestType := pbsmetrics.ReqTypeORTB2Web - performTest := func(m *Metrics, cookieFlag pbsmetrics.CookieFlag) { - m.RecordRequest(pbsmetrics.Labels{ + requestType := metrics.ReqTypeORTB2Web + performTest := func(m *Metrics, cookieFlag metrics.CookieFlag) { + m.RecordRequest(metrics.Labels{ RType: requestType, - RequestStatus: pbsmetrics.RequestStatusBlacklisted, + RequestStatus: metrics.RequestStatusBlacklisted, CookieFlag: cookieFlag, }) } @@ -168,27 +168,27 @@ func TestRequestMetricWithoutCookie(t *testing.T) { testCases := []struct { description string testCase func(m *Metrics) - cookieFlag pbsmetrics.CookieFlag + cookieFlag metrics.CookieFlag expectedCount float64 }{ { description: "Yes", testCase: func(m *Metrics) { - performTest(m, pbsmetrics.CookieFlagYes) + performTest(m, metrics.CookieFlagYes) }, expectedCount: 0, }, { description: "No", testCase: func(m *Metrics) { - performTest(m, pbsmetrics.CookieFlagNo) + performTest(m, metrics.CookieFlagNo) }, expectedCount: 1, }, { description: "Unknown", testCase: func(m *Metrics) { - performTest(m, pbsmetrics.CookieFlagUnknown) + performTest(m, metrics.CookieFlagUnknown) }, expectedCount: 0, }, @@ -210,9 +210,9 @@ func TestRequestMetricWithoutCookie(t *testing.T) { func TestAccountMetric(t *testing.T) { knownPubID := "knownPublisher" performTest := func(m *Metrics, pubID string) { - m.RecordRequest(pbsmetrics.Labels{ - RType: pbsmetrics.ReqTypeORTB2Web, - RequestStatus: pbsmetrics.RequestStatusBlacklisted, + m.RecordRequest(metrics.Labels{ + RType: metrics.ReqTypeORTB2Web, + RequestStatus: metrics.RequestStatusBlacklisted, PubID: pubID, }) } @@ -232,7 +232,7 @@ func TestAccountMetric(t *testing.T) { { description: "Unknown", testCase: func(m *Metrics) { - performTest(m, pbsmetrics.PublisherUnknown) + performTest(m, metrics.PublisherUnknown) }, expectedCount: 0, }, @@ -253,7 +253,7 @@ func TestAccountMetric(t *testing.T) { func TestImpressionsMetric(t *testing.T) { performTest := func(m *Metrics, isBanner, isVideo, isAudio, isNative bool) { - m.RecordImps(pbsmetrics.ImpLabels{ + m.RecordImps(metrics.ImpLabels{ BannerImps: isBanner, VideoImps: isVideo, AudioImps: isAudio, @@ -357,7 +357,7 @@ func TestImpressionsMetric(t *testing.T) { func TestLegacyImpressionsMetric(t *testing.T) { m := createMetricsForTesting() - m.RecordLegacyImps(pbsmetrics.Labels{}, 42) + m.RecordLegacyImps(metrics.Labels{}, 42) expectedCount := float64(42) assertCounterValue(t, "", "impressionsLegacy", m.impressionsLegacy, @@ -365,9 +365,9 @@ func TestLegacyImpressionsMetric(t *testing.T) { } func TestRequestTimeMetric(t *testing.T) { - requestType := pbsmetrics.ReqTypeORTB2Web - performTest := func(m *Metrics, requestStatus pbsmetrics.RequestStatus, timeInMs float64) { - m.RecordRequestTime(pbsmetrics.Labels{ + requestType := metrics.ReqTypeORTB2Web + performTest := func(m *Metrics, requestStatus metrics.RequestStatus, timeInMs float64) { + m.RecordRequestTime(metrics.Labels{ RType: requestType, RequestStatus: requestStatus, }, time.Duration(timeInMs)*time.Millisecond) @@ -382,7 +382,7 @@ func TestRequestTimeMetric(t *testing.T) { { description: "Success", testCase: func(m *Metrics) { - performTest(m, pbsmetrics.RequestStatusOK, 500) + performTest(m, metrics.RequestStatusOK, 500) }, expectedCount: 1, expectedSum: 0.5, @@ -390,7 +390,7 @@ func TestRequestTimeMetric(t *testing.T) { { description: "Error", testCase: func(m *Metrics) { - performTest(m, pbsmetrics.RequestStatusErr, 500) + performTest(m, metrics.RequestStatusErr, 500) }, expectedCount: 0, expectedSum: 0, @@ -410,58 +410,58 @@ func TestRequestTimeMetric(t *testing.T) { func TestRecordStoredDataFetchTime(t *testing.T) { tests := []struct { description string - dataType pbsmetrics.StoredDataType - fetchType pbsmetrics.StoredDataFetchType + dataType metrics.StoredDataType + fetchType metrics.StoredDataFetchType }{ { description: "Update stored account histogram with all label", - dataType: pbsmetrics.AccountDataType, - fetchType: pbsmetrics.FetchAll, + dataType: metrics.AccountDataType, + fetchType: metrics.FetchAll, }, { description: "Update stored AMP histogram with all label", - dataType: pbsmetrics.AMPDataType, - fetchType: pbsmetrics.FetchAll, + dataType: metrics.AMPDataType, + fetchType: metrics.FetchAll, }, { description: "Update stored category histogram with all label", - dataType: pbsmetrics.CategoryDataType, - fetchType: pbsmetrics.FetchAll, + dataType: metrics.CategoryDataType, + fetchType: metrics.FetchAll, }, { description: "Update stored request histogram with all label", - dataType: pbsmetrics.RequestDataType, - fetchType: pbsmetrics.FetchAll, + dataType: metrics.RequestDataType, + fetchType: metrics.FetchAll, }, { description: "Update stored video histogram with all label", - dataType: pbsmetrics.VideoDataType, - fetchType: pbsmetrics.FetchAll, + dataType: metrics.VideoDataType, + fetchType: metrics.FetchAll, }, { description: "Update stored account histogram with delta label", - dataType: pbsmetrics.AccountDataType, - fetchType: pbsmetrics.FetchDelta, + dataType: metrics.AccountDataType, + fetchType: metrics.FetchDelta, }, { description: "Update stored AMP histogram with delta label", - dataType: pbsmetrics.AMPDataType, - fetchType: pbsmetrics.FetchDelta, + dataType: metrics.AMPDataType, + fetchType: metrics.FetchDelta, }, { description: "Update stored category histogram with delta label", - dataType: pbsmetrics.CategoryDataType, - fetchType: pbsmetrics.FetchDelta, + dataType: metrics.CategoryDataType, + fetchType: metrics.FetchDelta, }, { description: "Update stored request histogram with delta label", - dataType: pbsmetrics.RequestDataType, - fetchType: pbsmetrics.FetchDelta, + dataType: metrics.RequestDataType, + fetchType: metrics.FetchDelta, }, { description: "Update stored video histogram with delta label", - dataType: pbsmetrics.VideoDataType, - fetchType: pbsmetrics.FetchDelta, + dataType: metrics.VideoDataType, + fetchType: metrics.FetchDelta, }, } @@ -469,22 +469,22 @@ func TestRecordStoredDataFetchTime(t *testing.T) { m := createMetricsForTesting() fetchTime := time.Duration(0.5 * float64(time.Second)) - m.RecordStoredDataFetchTime(pbsmetrics.StoredDataLabels{ + m.RecordStoredDataFetchTime(metrics.StoredDataLabels{ DataType: tt.dataType, DataFetchType: tt.fetchType, }, fetchTime) var metricsTimer *prometheus.HistogramVec switch tt.dataType { - case pbsmetrics.AccountDataType: + case metrics.AccountDataType: metricsTimer = m.storedAccountFetchTimer - case pbsmetrics.AMPDataType: + case metrics.AMPDataType: metricsTimer = m.storedAMPFetchTimer - case pbsmetrics.CategoryDataType: + case metrics.CategoryDataType: metricsTimer = m.storedCategoryFetchTimer - case pbsmetrics.RequestDataType: + case metrics.RequestDataType: metricsTimer = m.storedRequestFetchTimer - case pbsmetrics.VideoDataType: + case metrics.VideoDataType: metricsTimer = m.storedVideoFetchTimer } @@ -499,90 +499,90 @@ func TestRecordStoredDataFetchTime(t *testing.T) { func TestRecordStoredDataError(t *testing.T) { tests := []struct { description string - dataType pbsmetrics.StoredDataType - errorType pbsmetrics.StoredDataError + dataType metrics.StoredDataType + errorType metrics.StoredDataError metricName string }{ { description: "Update stored_account_errors counter with network label", - dataType: pbsmetrics.AccountDataType, - errorType: pbsmetrics.StoredDataErrorNetwork, + dataType: metrics.AccountDataType, + errorType: metrics.StoredDataErrorNetwork, metricName: "stored_account_errors", }, { description: "Update stored_amp_errors counter with network label", - dataType: pbsmetrics.AMPDataType, - errorType: pbsmetrics.StoredDataErrorNetwork, + dataType: metrics.AMPDataType, + errorType: metrics.StoredDataErrorNetwork, metricName: "stored_amp_errors", }, { description: "Update stored_category_errors counter with network label", - dataType: pbsmetrics.CategoryDataType, - errorType: pbsmetrics.StoredDataErrorNetwork, + dataType: metrics.CategoryDataType, + errorType: metrics.StoredDataErrorNetwork, metricName: "stored_category_errors", }, { description: "Update stored_request_errors counter with network label", - dataType: pbsmetrics.RequestDataType, - errorType: pbsmetrics.StoredDataErrorNetwork, + dataType: metrics.RequestDataType, + errorType: metrics.StoredDataErrorNetwork, metricName: "stored_request_errors", }, { description: "Update stored_video_errors counter with network label", - dataType: pbsmetrics.VideoDataType, - errorType: pbsmetrics.StoredDataErrorNetwork, + dataType: metrics.VideoDataType, + errorType: metrics.StoredDataErrorNetwork, metricName: "stored_video_errors", }, { description: "Update stored_account_errors counter with undefined label", - dataType: pbsmetrics.AccountDataType, - errorType: pbsmetrics.StoredDataErrorUndefined, + dataType: metrics.AccountDataType, + errorType: metrics.StoredDataErrorUndefined, metricName: "stored_account_errors", }, { description: "Update stored_amp_errors counter with undefined label", - dataType: pbsmetrics.AMPDataType, - errorType: pbsmetrics.StoredDataErrorUndefined, + dataType: metrics.AMPDataType, + errorType: metrics.StoredDataErrorUndefined, metricName: "stored_amp_errors", }, { description: "Update stored_category_errors counter with undefined label", - dataType: pbsmetrics.CategoryDataType, - errorType: pbsmetrics.StoredDataErrorUndefined, + dataType: metrics.CategoryDataType, + errorType: metrics.StoredDataErrorUndefined, metricName: "stored_category_errors", }, { description: "Update stored_request_errors counter with undefined label", - dataType: pbsmetrics.RequestDataType, - errorType: pbsmetrics.StoredDataErrorUndefined, + dataType: metrics.RequestDataType, + errorType: metrics.StoredDataErrorUndefined, metricName: "stored_request_errors", }, { description: "Update stored_video_errors counter with undefined label", - dataType: pbsmetrics.VideoDataType, - errorType: pbsmetrics.StoredDataErrorUndefined, + dataType: metrics.VideoDataType, + errorType: metrics.StoredDataErrorUndefined, metricName: "stored_video_errors", }, } for _, tt := range tests { m := createMetricsForTesting() - m.RecordStoredDataError(pbsmetrics.StoredDataLabels{ + m.RecordStoredDataError(metrics.StoredDataLabels{ DataType: tt.dataType, Error: tt.errorType, }) var metricsCounter *prometheus.CounterVec switch tt.dataType { - case pbsmetrics.AccountDataType: + case metrics.AccountDataType: metricsCounter = m.storedAccountErrors - case pbsmetrics.AMPDataType: + case metrics.AMPDataType: metricsCounter = m.storedAMPErrors - case pbsmetrics.CategoryDataType: + case metrics.CategoryDataType: metricsCounter = m.storedCategoryErrors - case pbsmetrics.RequestDataType: + case metrics.RequestDataType: metricsCounter = m.storedRequestErrors - case pbsmetrics.VideoDataType: + case metrics.VideoDataType: metricsCounter = m.storedVideoErrors } @@ -597,7 +597,7 @@ func TestRecordStoredDataError(t *testing.T) { func TestAdapterBidReceivedMetric(t *testing.T) { adapterName := "anyName" performTest := func(m *Metrics, hasAdm bool) { - labels := pbsmetrics.AdapterLabels{ + labels := metrics.AdapterLabels{ Adapter: openrtb_ext.BidderName(adapterName), } bidType := openrtb_ext.BidTypeBanner @@ -653,7 +653,7 @@ func TestRecordAdapterPriceMetric(t *testing.T) { adapterName := "anyName" cpm := float64(42) - m.RecordAdapterPrice(pbsmetrics.AdapterLabels{ + m.RecordAdapterPrice(metrics.AdapterLabels{ Adapter: openrtb_ext.BidderName(adapterName), }, cpm) @@ -665,8 +665,8 @@ func TestRecordAdapterPriceMetric(t *testing.T) { func TestAdapterRequestMetrics(t *testing.T) { adapterName := "anyName" - performTest := func(m *Metrics, cookieFlag pbsmetrics.CookieFlag, adapterBids pbsmetrics.AdapterBid) { - labels := pbsmetrics.AdapterLabels{ + performTest := func(m *Metrics, cookieFlag metrics.CookieFlag, adapterBids metrics.AdapterBid) { + labels := metrics.AdapterLabels{ Adapter: openrtb_ext.BidderName(adapterName), CookieFlag: cookieFlag, AdapterBids: adapterBids, @@ -686,7 +686,7 @@ func TestAdapterRequestMetrics(t *testing.T) { { description: "No Cookie & No Bids", testCase: func(m *Metrics) { - performTest(m, pbsmetrics.CookieFlagNo, pbsmetrics.AdapterBidNone) + performTest(m, metrics.CookieFlagNo, metrics.AdapterBidNone) }, expectedCount: 1, expectedCookieNoCount: 1, @@ -697,7 +697,7 @@ func TestAdapterRequestMetrics(t *testing.T) { { description: "Unknown Cookie & No Bids", testCase: func(m *Metrics) { - performTest(m, pbsmetrics.CookieFlagUnknown, pbsmetrics.AdapterBidNone) + performTest(m, metrics.CookieFlagUnknown, metrics.AdapterBidNone) }, expectedCount: 1, expectedCookieNoCount: 0, @@ -708,7 +708,7 @@ func TestAdapterRequestMetrics(t *testing.T) { { description: "Has Cookie & No Bids", testCase: func(m *Metrics) { - performTest(m, pbsmetrics.CookieFlagYes, pbsmetrics.AdapterBidNone) + performTest(m, metrics.CookieFlagYes, metrics.AdapterBidNone) }, expectedCount: 1, expectedCookieNoCount: 0, @@ -719,7 +719,7 @@ func TestAdapterRequestMetrics(t *testing.T) { { description: "No Cookie & Bids Present", testCase: func(m *Metrics) { - performTest(m, pbsmetrics.CookieFlagNo, pbsmetrics.AdapterBidPresent) + performTest(m, metrics.CookieFlagNo, metrics.AdapterBidPresent) }, expectedCount: 1, expectedCookieNoCount: 1, @@ -730,7 +730,7 @@ func TestAdapterRequestMetrics(t *testing.T) { { description: "Unknown Cookie & Bids Present", testCase: func(m *Metrics) { - performTest(m, pbsmetrics.CookieFlagUnknown, pbsmetrics.AdapterBidPresent) + performTest(m, metrics.CookieFlagUnknown, metrics.AdapterBidPresent) }, expectedCount: 1, expectedCookieNoCount: 0, @@ -741,7 +741,7 @@ func TestAdapterRequestMetrics(t *testing.T) { { description: "Has Cookie & Bids Present", testCase: func(m *Metrics) { - performTest(m, pbsmetrics.CookieFlagYes, pbsmetrics.AdapterBidPresent) + performTest(m, metrics.CookieFlagYes, metrics.AdapterBidPresent) }, expectedCount: 1, expectedCookieNoCount: 0, @@ -780,11 +780,11 @@ func TestAdapterRequestMetrics(t *testing.T) { if label.GetName() == cookieLabel { switch label.GetValue() { - case string(pbsmetrics.CookieFlagNo): + case string(metrics.CookieFlagNo): totalCookieNoCount += value - case string(pbsmetrics.CookieFlagYes): + case string(metrics.CookieFlagYes): totalCookieYesCount += value - case string(pbsmetrics.CookieFlagUnknown): + case string(metrics.CookieFlagUnknown): totalCookieUnknownCount += value } } @@ -801,12 +801,12 @@ func TestAdapterRequestMetrics(t *testing.T) { func TestAdapterRequestErrorMetrics(t *testing.T) { adapterName := "anyName" - performTest := func(m *Metrics, adapterErrors map[pbsmetrics.AdapterError]struct{}) { - labels := pbsmetrics.AdapterLabels{ + performTest := func(m *Metrics, adapterErrors map[metrics.AdapterError]struct{}) { + labels := metrics.AdapterLabels{ Adapter: openrtb_ext.BidderName(adapterName), AdapterErrors: adapterErrors, - CookieFlag: pbsmetrics.CookieFlagUnknown, - AdapterBids: pbsmetrics.AdapterBidPresent, + CookieFlag: metrics.CookieFlagUnknown, + AdapterBids: metrics.AdapterBidPresent, } m.RecordAdapterRequest(labels) } @@ -828,8 +828,8 @@ func TestAdapterRequestErrorMetrics(t *testing.T) { { description: "Bad Input Error", testCase: func(m *Metrics) { - performTest(m, map[pbsmetrics.AdapterError]struct{}{ - pbsmetrics.AdapterErrorBadInput: {}, + performTest(m, map[metrics.AdapterError]struct{}{ + metrics.AdapterErrorBadInput: {}, }) }, expectedErrorsCount: 1, @@ -838,8 +838,8 @@ func TestAdapterRequestErrorMetrics(t *testing.T) { { description: "Other Error", testCase: func(m *Metrics) { - performTest(m, map[pbsmetrics.AdapterError]struct{}{ - pbsmetrics.AdapterErrorBadServerResponse: {}, + performTest(m, map[metrics.AdapterError]struct{}{ + metrics.AdapterErrorBadServerResponse: {}, }) }, expectedErrorsCount: 1, @@ -866,7 +866,7 @@ func TestAdapterRequestErrorMetrics(t *testing.T) { value := m.GetCounter().GetValue() errorsCount += value for _, label := range m.GetLabel() { - if label.GetName() == adapterErrorLabel && label.GetValue() == string(pbsmetrics.AdapterErrorBadInput) { + if label.GetName() == adapterErrorLabel && label.GetValue() == string(metrics.AdapterErrorBadInput) { badInputErrorsCount += value } } @@ -879,8 +879,8 @@ func TestAdapterRequestErrorMetrics(t *testing.T) { func TestAdapterTimeMetric(t *testing.T) { adapterName := "anyName" - performTest := func(m *Metrics, timeInMs float64, adapterErrors map[pbsmetrics.AdapterError]struct{}) { - m.RecordAdapterTime(pbsmetrics.AdapterLabels{ + performTest := func(m *Metrics, timeInMs float64, adapterErrors map[metrics.AdapterError]struct{}) { + m.RecordAdapterTime(metrics.AdapterLabels{ Adapter: openrtb_ext.BidderName(adapterName), AdapterErrors: adapterErrors, }, time.Duration(timeInMs)*time.Millisecond) @@ -895,7 +895,7 @@ func TestAdapterTimeMetric(t *testing.T) { { description: "Success", testCase: func(m *Metrics) { - performTest(m, 500, map[pbsmetrics.AdapterError]struct{}{}) + performTest(m, 500, map[metrics.AdapterError]struct{}{}) }, expectedCount: 1, expectedSum: 0.5, @@ -903,8 +903,8 @@ func TestAdapterTimeMetric(t *testing.T) { { description: "Error", testCase: func(m *Metrics) { - performTest(m, 500, map[pbsmetrics.AdapterError]struct{}{ - pbsmetrics.AdapterErrorTimeout: {}, + performTest(m, 500, map[metrics.AdapterError]struct{}{ + metrics.AdapterErrorTimeout: {}, }) }, expectedCount: 0, @@ -941,9 +941,9 @@ func TestAdapterCookieSyncMetric(t *testing.T) { func TestUserIDSetMetric(t *testing.T) { m := createMetricsForTesting() adapterName := "anyName" - action := pbsmetrics.RequestActionSet + action := metrics.RequestActionSet - m.RecordUserIDSet(pbsmetrics.UserLabels{ + m.RecordUserIDSet(metrics.UserLabels{ Bidder: openrtb_ext.BidderName(adapterName), Action: action, }) @@ -959,9 +959,9 @@ func TestUserIDSetMetric(t *testing.T) { func TestUserIDSetMetricWhenBidderEmpty(t *testing.T) { m := createMetricsForTesting() - action := pbsmetrics.RequestActionErr + action := metrics.RequestActionErr - m.RecordUserIDSet(pbsmetrics.UserLabels{ + m.RecordUserIDSet(metrics.UserLabels{ Bidder: openrtb_ext.BidderName(""), Action: action, }) @@ -978,7 +978,7 @@ func TestAdapterPanicMetric(t *testing.T) { m := createMetricsForTesting() adapterName := "anyName" - m.RecordAdapterPanic(pbsmetrics.AdapterLabels{ + m.RecordAdapterPanic(metrics.AdapterLabels{ Adapter: openrtb_ext.BidderName(adapterName), }) @@ -995,18 +995,18 @@ func TestStoredReqCacheResultMetric(t *testing.T) { hitCount := 42 missCount := 108 - m.RecordStoredReqCacheResult(pbsmetrics.CacheHit, hitCount) - m.RecordStoredReqCacheResult(pbsmetrics.CacheMiss, missCount) + m.RecordStoredReqCacheResult(metrics.CacheHit, hitCount) + m.RecordStoredReqCacheResult(metrics.CacheMiss, missCount) assertCounterVecValue(t, "", "storedRequestCacheResult:hit", m.storedRequestCacheResult, float64(hitCount), prometheus.Labels{ - cacheResultLabel: string(pbsmetrics.CacheHit), + cacheResultLabel: string(metrics.CacheHit), }) assertCounterVecValue(t, "", "storedRequestCacheResult:miss", m.storedRequestCacheResult, float64(missCount), prometheus.Labels{ - cacheResultLabel: string(pbsmetrics.CacheMiss), + cacheResultLabel: string(metrics.CacheMiss), }) } @@ -1015,18 +1015,18 @@ func TestStoredImpCacheResultMetric(t *testing.T) { hitCount := 41 missCount := 107 - m.RecordStoredImpCacheResult(pbsmetrics.CacheHit, hitCount) - m.RecordStoredImpCacheResult(pbsmetrics.CacheMiss, missCount) + m.RecordStoredImpCacheResult(metrics.CacheHit, hitCount) + m.RecordStoredImpCacheResult(metrics.CacheMiss, missCount) assertCounterVecValue(t, "", "storedImpressionsCacheResult:hit", m.storedImpressionsCacheResult, float64(hitCount), prometheus.Labels{ - cacheResultLabel: string(pbsmetrics.CacheHit), + cacheResultLabel: string(metrics.CacheHit), }) assertCounterVecValue(t, "", "storedImpressionsCacheResult:miss", m.storedImpressionsCacheResult, float64(missCount), prometheus.Labels{ - cacheResultLabel: string(pbsmetrics.CacheMiss), + cacheResultLabel: string(metrics.CacheMiss), }) } @@ -1035,18 +1035,18 @@ func TestAccountCacheResultMetric(t *testing.T) { hitCount := 37 missCount := 92 - m.RecordAccountCacheResult(pbsmetrics.CacheHit, hitCount) - m.RecordAccountCacheResult(pbsmetrics.CacheMiss, missCount) + m.RecordAccountCacheResult(metrics.CacheHit, hitCount) + m.RecordAccountCacheResult(metrics.CacheMiss, missCount) assertCounterVecValue(t, "", "accountCacheResult:hit", m.accountCacheResult, float64(hitCount), prometheus.Labels{ - cacheResultLabel: string(pbsmetrics.CacheHit), + cacheResultLabel: string(metrics.CacheHit), }) assertCounterVecValue(t, "", "accountCacheResult:miss", m.accountCacheResult, float64(missCount), prometheus.Labels{ - cacheResultLabel: string(pbsmetrics.CacheMiss), + cacheResultLabel: string(metrics.CacheMiss), }) } @@ -1080,9 +1080,9 @@ func TestPrebidCacheRequestTimeMetric(t *testing.T) { func TestMetricAccumulationSpotCheck(t *testing.T) { m := createMetricsForTesting() - m.RecordLegacyImps(pbsmetrics.Labels{}, 1) - m.RecordLegacyImps(pbsmetrics.Labels{}, 2) - m.RecordLegacyImps(pbsmetrics.Labels{}, 3) + m.RecordLegacyImps(metrics.Labels{}, 1) + m.RecordLegacyImps(metrics.Labels{}, 2) + m.RecordLegacyImps(metrics.Labels{}, 3) expectedValue := float64(1 + 2 + 3) assertCounterValue(t, "", "impressionsLegacy", m.impressionsLegacy, @@ -1090,7 +1090,7 @@ func TestMetricAccumulationSpotCheck(t *testing.T) { } func TestRecordRequestQueueTimeMetric(t *testing.T) { - performTest := func(m *Metrics, requestStatus bool, requestType pbsmetrics.RequestType, timeInSec float64) { + performTest := func(m *Metrics, requestStatus bool, requestType metrics.RequestType, timeInSec float64) { m.RecordRequestQueueTime(requestStatus, requestType, time.Duration(timeInSec*float64(time.Second))) } @@ -1105,7 +1105,7 @@ func TestRecordRequestQueueTimeMetric(t *testing.T) { description: "Success", status: requestSuccessLabel, testCase: func(m *Metrics) { - performTest(m, true, pbsmetrics.ReqTypeVideo, 2) + performTest(m, true, metrics.ReqTypeVideo, 2) }, expectedCount: 1, expectedSum: 2, @@ -1114,7 +1114,7 @@ func TestRecordRequestQueueTimeMetric(t *testing.T) { description: "TimeoutError", status: requestRejectLabel, testCase: func(m *Metrics) { - performTest(m, false, pbsmetrics.ReqTypeVideo, 50) + performTest(m, false, metrics.ReqTypeVideo, 50) }, expectedCount: 1, expectedSum: 50, @@ -1358,45 +1358,45 @@ func TestRecordRequestPrivacy(t *testing.T) { m := createMetricsForTesting() // CCPA - m.RecordRequestPrivacy(pbsmetrics.PrivacyLabels{ + m.RecordRequestPrivacy(metrics.PrivacyLabels{ CCPAEnforced: true, CCPAProvided: true, }) - m.RecordRequestPrivacy(pbsmetrics.PrivacyLabels{ + m.RecordRequestPrivacy(metrics.PrivacyLabels{ CCPAEnforced: true, CCPAProvided: false, }) - m.RecordRequestPrivacy(pbsmetrics.PrivacyLabels{ + m.RecordRequestPrivacy(metrics.PrivacyLabels{ CCPAEnforced: false, CCPAProvided: true, }) // COPPA - m.RecordRequestPrivacy(pbsmetrics.PrivacyLabels{ + m.RecordRequestPrivacy(metrics.PrivacyLabels{ COPPAEnforced: true, }) // LMT - m.RecordRequestPrivacy(pbsmetrics.PrivacyLabels{ + m.RecordRequestPrivacy(metrics.PrivacyLabels{ LMTEnforced: true, }) // GDPR - m.RecordRequestPrivacy(pbsmetrics.PrivacyLabels{ + m.RecordRequestPrivacy(metrics.PrivacyLabels{ GDPREnforced: true, - GDPRTCFVersion: pbsmetrics.TCFVersionErr, + GDPRTCFVersion: metrics.TCFVersionErr, }) - m.RecordRequestPrivacy(pbsmetrics.PrivacyLabels{ + m.RecordRequestPrivacy(metrics.PrivacyLabels{ GDPREnforced: true, - GDPRTCFVersion: pbsmetrics.TCFVersionV1, + GDPRTCFVersion: metrics.TCFVersionV1, }) - m.RecordRequestPrivacy(pbsmetrics.PrivacyLabels{ + m.RecordRequestPrivacy(metrics.PrivacyLabels{ GDPREnforced: true, - GDPRTCFVersion: pbsmetrics.TCFVersionV2, + GDPRTCFVersion: metrics.TCFVersionV2, }) - m.RecordRequestPrivacy(pbsmetrics.PrivacyLabels{ + m.RecordRequestPrivacy(metrics.PrivacyLabels{ GDPREnforced: true, - GDPRTCFVersion: pbsmetrics.TCFVersionV1, + GDPRTCFVersion: metrics.TCFVersionV1, }) assertCounterVecValue(t, "", "privacy_ccpa", m.privacyCCPA, diff --git a/pbsmetrics/prometheus/type_conversion.go b/metrics/prometheus/type_conversion.go similarity index 83% rename from pbsmetrics/prometheus/type_conversion.go rename to metrics/prometheus/type_conversion.go index 496f5076f9c..0e5c80636db 100644 --- a/pbsmetrics/prometheus/type_conversion.go +++ b/metrics/prometheus/type_conversion.go @@ -3,12 +3,12 @@ package prometheusmetrics import ( "strconv" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" - "github.com/prebid/prebid-server/pbsmetrics" ) func actionsAsString() []string { - values := pbsmetrics.RequestActions() + values := metrics.RequestActions() valuesAsString := make([]string, len(values)) for i, v := range values { valuesAsString[i] = string(v) @@ -26,7 +26,7 @@ func adaptersAsString() []string { } func adapterErrorsAsString() []string { - values := pbsmetrics.AdapterErrors() + values := metrics.AdapterErrors() valuesAsString := make([]string, len(values)) for i, v := range values { valuesAsString[i] = string(v) @@ -42,7 +42,7 @@ func boolValuesAsString() []string { } func cookieTypesAsString() []string { - values := pbsmetrics.CookieTypes() + values := metrics.CookieTypes() valuesAsString := make([]string, len(values)) for i, v := range values { valuesAsString[i] = string(v) @@ -51,7 +51,7 @@ func cookieTypesAsString() []string { } func cacheResultsAsString() []string { - values := pbsmetrics.CacheResults() + values := metrics.CacheResults() valuesAsString := make([]string, len(values)) for i, v := range values { valuesAsString[i] = string(v) @@ -60,7 +60,7 @@ func cacheResultsAsString() []string { } func requestStatusesAsString() []string { - values := pbsmetrics.RequestStatuses() + values := metrics.RequestStatuses() valuesAsString := make([]string, len(values)) for i, v := range values { valuesAsString[i] = string(v) @@ -69,7 +69,7 @@ func requestStatusesAsString() []string { } func requestTypesAsString() []string { - values := pbsmetrics.RequestTypes() + values := metrics.RequestTypes() valuesAsString := make([]string, len(values)) for i, v := range values { valuesAsString[i] = string(v) @@ -78,7 +78,7 @@ func requestTypesAsString() []string { } func storedDataTypesAsString() []string { - values := pbsmetrics.StoredDataTypes() + values := metrics.StoredDataTypes() valuesAsString := make([]string, len(values)) for i, v := range values { valuesAsString[i] = string(v) @@ -87,7 +87,7 @@ func storedDataTypesAsString() []string { } func storedDataFetchTypesAsString() []string { - values := pbsmetrics.StoredDataFetchTypes() + values := metrics.StoredDataFetchTypes() valuesAsString := make([]string, len(values)) for i, v := range values { valuesAsString[i] = string(v) @@ -96,7 +96,7 @@ func storedDataFetchTypesAsString() []string { } func storedDataErrorsAsString() []string { - values := pbsmetrics.StoredDataErrors() + values := metrics.StoredDataErrors() valuesAsString := make([]string, len(values)) for i, v := range values { valuesAsString[i] = string(v) @@ -105,7 +105,7 @@ func storedDataErrorsAsString() []string { } func tcfVersionsAsString() []string { - values := pbsmetrics.TCFVersions() + values := metrics.TCFVersions() valuesAsString := make([]string, len(values)) for i, v := range values { valuesAsString[i] = string(v) diff --git a/pbs/usersync.go b/pbs/usersync.go index 75ef16776c7..4cac3544804 100644 --- a/pbs/usersync.go +++ b/pbs/usersync.go @@ -13,7 +13,7 @@ import ( "github.com/julienschmidt/httprouter" "github.com/prebid/prebid-server/analytics" "github.com/prebid/prebid-server/config" - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/server/ssl" "github.com/prebid/prebid-server/usersync" ) @@ -40,7 +40,7 @@ type UserSyncDeps struct { ExternalUrl string RecaptchaSecret string HostCookieConfig *config.HostCookie - MetricsEngine pbsmetrics.MetricsEngine + MetricsEngine metrics.MetricsEngine PBSAnalytics analytics.PBSAnalyticsModule } diff --git a/prebid_cache_client/client.go b/prebid_cache_client/client.go index 43dfbc9abde..88c197cd0b8 100644 --- a/prebid_cache_client/client.go +++ b/prebid_cache_client/client.go @@ -13,7 +13,7 @@ import ( "time" "github.com/prebid/prebid-server/config" - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" "github.com/buger/jsonparser" "github.com/golang/glog" @@ -51,7 +51,7 @@ type Cacheable struct { Timestamp int64 `json:"timestamp,omitempty"` // this is "/vtrack" specific } -func NewClient(httpClient *http.Client, conf *config.Cache, extCache *config.ExternalCache, metrics pbsmetrics.MetricsEngine) Client { +func NewClient(httpClient *http.Client, conf *config.Cache, extCache *config.ExternalCache, metrics metrics.MetricsEngine) Client { return &clientImpl{ httpClient: httpClient, putUrl: conf.GetBaseURL() + "/cache", @@ -68,7 +68,7 @@ type clientImpl struct { externalCacheScheme string externalCacheHost string externalCachePath string - metrics pbsmetrics.MetricsEngine + metrics metrics.MetricsEngine } func (c *clientImpl) GetExtCacheData() (string, string, string) { diff --git a/prebid_cache_client/client_test.go b/prebid_cache_client/client_test.go index 0bd29699703..1ba30a6faab 100644 --- a/prebid_cache_client/client_test.go +++ b/prebid_cache_client/client_test.go @@ -11,8 +11,8 @@ import ( "testing" "github.com/prebid/prebid-server/config" - "github.com/prebid/prebid-server/pbsmetrics" - metricsConf "github.com/prebid/prebid-server/pbsmetrics/config" + "github.com/prebid/prebid-server/metrics" + metricsConf "github.com/prebid/prebid-server/metrics/config" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" @@ -25,7 +25,7 @@ func TestEmptyPut(t *testing.T) { server := httptest.NewServer(handler) defer server.Close() - metricsMock := &pbsmetrics.MetricsEngineMock{} + metricsMock := &metrics.MetricsEngineMock{} client := &clientImpl{ httpClient: server.Client(), @@ -47,7 +47,7 @@ func TestBadResponse(t *testing.T) { server := httptest.NewServer(handler) defer server.Close() - metricsMock := &pbsmetrics.MetricsEngineMock{} + metricsMock := &metrics.MetricsEngineMock{} metricsMock.On("RecordPrebidCacheRequestTime", true, mock.Anything).Once() client := &clientImpl{ @@ -115,7 +115,7 @@ func TestCancelledContext(t *testing.T) { // Run Tests for _, testCase := range testCases { - metricsMock := &pbsmetrics.MetricsEngineMock{} + metricsMock := &metrics.MetricsEngineMock{} metricsMock.On("RecordPrebidCacheRequestTime", false, mock.Anything).Once() client := &clientImpl{ @@ -142,7 +142,7 @@ func TestSuccessfulPut(t *testing.T) { server := httptest.NewServer(newHandler(2)) defer server.Close() - metricsMock := &pbsmetrics.MetricsEngineMock{} + metricsMock := &metrics.MetricsEngineMock{} metricsMock.On("RecordPrebidCacheRequestTime", true, mock.Anything).Once() client := &clientImpl{ diff --git a/router/aspects/request_timeout_handler.go b/router/aspects/request_timeout_handler.go index 23d6cef9faf..39a4341f995 100644 --- a/router/aspects/request_timeout_handler.go +++ b/router/aspects/request_timeout_handler.go @@ -1,15 +1,16 @@ package aspects import ( - "github.com/julienschmidt/httprouter" - "github.com/prebid/prebid-server/config" - "github.com/prebid/prebid-server/pbsmetrics" "net/http" "strconv" "time" + + "github.com/julienschmidt/httprouter" + "github.com/prebid/prebid-server/config" + "github.com/prebid/prebid-server/metrics" ) -func QueuedRequestTimeout(f httprouter.Handle, reqTimeoutHeaders config.RequestTimeoutHeaders, metricsEngine pbsmetrics.MetricsEngine, requestType pbsmetrics.RequestType) httprouter.Handle { +func QueuedRequestTimeout(f httprouter.Handle, reqTimeoutHeaders config.RequestTimeoutHeaders, metricsEngine metrics.MetricsEngine, requestType metrics.RequestType) httprouter.Handle { return func(w http.ResponseWriter, r *http.Request, params httprouter.Params) { diff --git a/router/aspects/request_timeout_handler_test.go b/router/aspects/request_timeout_handler_test.go index cdc920c4263..26e546dcd40 100644 --- a/router/aspects/request_timeout_handler_test.go +++ b/router/aspects/request_timeout_handler_test.go @@ -1,15 +1,16 @@ package aspects import ( - "github.com/julienschmidt/httprouter" - "github.com/prebid/prebid-server/config" - "github.com/prebid/prebid-server/pbsmetrics" "net/http" "net/http/httptest" "strconv" "testing" "time" + "github.com/julienschmidt/httprouter" + "github.com/prebid/prebid-server/config" + "github.com/prebid/prebid-server/metrics" + "github.com/stretchr/testify/assert" ) @@ -75,7 +76,7 @@ func TestAny(t *testing.T) { for _, test := range testCases { reqTimeFloat, _ := strconv.ParseFloat(test.reqTimeInQueue, 64) - result := ExecuteAspectRequest(t, test.reqTimeInQueue, test.reqTimeOut, test.setHeaders, pbsmetrics.ReqTypeVideo, test.requestStatusMetrics, reqTimeFloat) + result := ExecuteAspectRequest(t, test.reqTimeInQueue, test.reqTimeOut, test.setHeaders, metrics.ReqTypeVideo, test.requestStatusMetrics, reqTimeFloat) assert.Equal(t, test.expectedRespCode, result.Code, test.expectedRespCodeMessage) assert.Equal(t, test.expectedRespBody, string(result.Body.Bytes()), test.expectedRespBodyMessage) } @@ -89,7 +90,7 @@ func MockHandler(w http.ResponseWriter, r *http.Request, _ httprouter.Params) { w.Write([]byte("Executed")) } -func ExecuteAspectRequest(t *testing.T, timeInQueue string, reqTimeout string, setHeaders bool, requestType pbsmetrics.RequestType, status bool, requestDuration float64) *httptest.ResponseRecorder { +func ExecuteAspectRequest(t *testing.T, timeInQueue string, reqTimeout string, setHeaders bool, requestType metrics.RequestType, status bool, requestDuration float64) *httptest.ResponseRecorder { rw := httptest.NewRecorder() req, err := http.NewRequest("POST", "/test", nil) if err != nil { @@ -102,7 +103,7 @@ func ExecuteAspectRequest(t *testing.T, timeInQueue string, reqTimeout string, s customHeaders := config.RequestTimeoutHeaders{reqTimeInQueueHeaderName, reqTimeoutHeaderName} - metrics := &pbsmetrics.MetricsEngineMock{} + metrics := &metrics.MetricsEngineMock{} metrics.On("RecordRequestQueueTime", status, requestType, time.Duration(requestDuration*float64(time.Second))).Once() diff --git a/router/router.go b/router/router.go index 1a968ec5a76..c398f528ae4 100644 --- a/router/router.go +++ b/router/router.go @@ -16,7 +16,7 @@ import ( "github.com/prebid/prebid-server/endpoints/events" "github.com/prebid/prebid-server/errortypes" - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/adapters" "github.com/prebid/prebid-server/adapters/adform" @@ -39,9 +39,9 @@ import ( "github.com/prebid/prebid-server/endpoints/openrtb2" "github.com/prebid/prebid-server/exchange" "github.com/prebid/prebid-server/gdpr" + metricsConf "github.com/prebid/prebid-server/metrics/config" "github.com/prebid/prebid-server/openrtb_ext" "github.com/prebid/prebid-server/pbs" - metricsConf "github.com/prebid/prebid-server/pbsmetrics/config" pbc "github.com/prebid/prebid-server/prebid_cache_client" "github.com/prebid/prebid-server/router/aspects" "github.com/prebid/prebid-server/server/ssl" @@ -271,7 +271,7 @@ func New(cfg *config.Configuration, rateConvertor *currency.RateConverter) (r *R requestTimeoutHeaders := config.RequestTimeoutHeaders{} if cfg.RequestTimeoutHeaders != requestTimeoutHeaders { - videoEndpoint = aspects.QueuedRequestTimeout(videoEndpoint, cfg.RequestTimeoutHeaders, r.MetricsEngine, pbsmetrics.ReqTypeVideo) + videoEndpoint = aspects.QueuedRequestTimeout(videoEndpoint, cfg.RequestTimeoutHeaders, r.MetricsEngine, metrics.ReqTypeVideo) } r.POST("/auction", endpoints.Auction(cfg, syncers, gdprPerms, r.MetricsEngine, dataCache, exchanges)) diff --git a/server/listener.go b/server/listener.go index bd58e67332f..c1f57723da3 100644 --- a/server/listener.go +++ b/server/listener.go @@ -6,19 +6,19 @@ import ( "time" "github.com/golang/glog" - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" ) // monitorableListener tracks any opened connections in the metrics. type monitorableListener struct { net.Listener - metrics pbsmetrics.MetricsEngine + metrics metrics.MetricsEngine } // monitorableConnection tracks any closed connections in the metrics. type monitorableConnection struct { net.Conn - metrics pbsmetrics.MetricsEngine + metrics metrics.MetricsEngine } func (l *monitorableConnection) Close() error { diff --git a/server/listener_test.go b/server/listener_test.go index 100a6bba003..f91dbddbc54 100644 --- a/server/listener_test.go +++ b/server/listener_test.go @@ -7,8 +7,8 @@ import ( "time" "github.com/prebid/prebid-server/config" - "github.com/prebid/prebid-server/pbsmetrics" - metrics "github.com/rcrowley/go-metrics" + "github.com/prebid/prebid-server/metrics" + gometrics "github.com/rcrowley/go-metrics" ) func TestNormalConnectionMetrics(t *testing.T) { @@ -24,8 +24,8 @@ func TestCloseErrorMetrics(t *testing.T) { } func doTest(t *testing.T, allowAccept bool, allowClose bool) { - reg := metrics.NewRegistry() - me := pbsmetrics.NewMetrics(reg, nil, config.DisabledMetrics{}) + reg := gometrics.NewRegistry() + me := metrics.NewMetrics(reg, nil, config.DisabledMetrics{}) var listener net.Listener = &mockListener{ listenSuccess: allowAccept, diff --git a/server/prometheus.go b/server/prometheus.go index 67ea4403ebd..4b9f7037d0a 100644 --- a/server/prometheus.go +++ b/server/prometheus.go @@ -8,8 +8,8 @@ import ( "github.com/prometheus/client_golang/prometheus/promhttp" "github.com/prebid/prebid-server/config" - metricsconfig "github.com/prebid/prebid-server/pbsmetrics/config" - prometheusMetrics "github.com/prebid/prebid-server/pbsmetrics/prometheus" + metricsconfig "github.com/prebid/prebid-server/metrics/config" + prometheusMetrics "github.com/prebid/prebid-server/metrics/prometheus" ) func newPrometheusServer(cfg *config.Configuration, metrics *metricsconfig.DetailedMetricsEngine) *http.Server { diff --git a/server/server.go b/server/server.go index 4929eafd232..46b7e5ae610 100644 --- a/server/server.go +++ b/server/server.go @@ -14,8 +14,8 @@ import ( "github.com/NYTimes/gziphandler" "github.com/golang/glog" "github.com/prebid/prebid-server/config" - "github.com/prebid/prebid-server/pbsmetrics" - metricsconfig "github.com/prebid/prebid-server/pbsmetrics/config" + "github.com/prebid/prebid-server/metrics" + metricsconfig "github.com/prebid/prebid-server/metrics/config" ) // Listen blocks forever, serving PBS requests on the given port. This will block forever, until the process is shut down. @@ -93,7 +93,7 @@ func runServer(server *http.Server, name string, listener net.Listener) { glog.Errorf("%s server quit with error: %v", name, err) } -func newListener(address string, metrics pbsmetrics.MetricsEngine) (net.Listener, error) { +func newListener(address string, metrics metrics.MetricsEngine) (net.Listener, error) { ln, err := net.Listen("tcp", address) if err != nil { return nil, fmt.Errorf("Error listening for TCP connections on %s: %v", address, err) diff --git a/stored_requests/config/config.go b/stored_requests/config/config.go index 1036cf0af96..7f92f2521cd 100644 --- a/stored_requests/config/config.go +++ b/stored_requests/config/config.go @@ -6,7 +6,7 @@ import ( "net/http" "time" - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" "github.com/golang/glog" "github.com/julienschmidt/httprouter" @@ -42,7 +42,7 @@ type dbConnection struct { // // As a side-effect, it will add some endpoints to the router if the config calls for it. // In the future we should look for ways to simplify this so that it's not doing two things. -func CreateStoredRequests(cfg *config.StoredRequests, metricsEngine pbsmetrics.MetricsEngine, client *http.Client, router *httprouter.Router, dbc *dbConnection) (fetcher stored_requests.AllFetcher, shutdown func()) { +func CreateStoredRequests(cfg *config.StoredRequests, metricsEngine metrics.MetricsEngine, client *http.Client, router *httprouter.Router, dbc *dbConnection) (fetcher stored_requests.AllFetcher, shutdown func()) { // Create database connection if given options for one if cfg.Postgres.ConnectionInfo.Database != "" { conn := cfg.Postgres.ConnectionInfo.ConnString() @@ -107,7 +107,7 @@ func CreateStoredRequests(cfg *config.StoredRequests, metricsEngine pbsmetrics.M // // As a side-effect, it will add some endpoints to the router if the config calls for it. // In the future we should look for ways to simplify this so that it's not doing two things. -func NewStoredRequests(cfg *config.Configuration, metricsEngine pbsmetrics.MetricsEngine, client *http.Client, router *httprouter.Router) (db *sql.DB, shutdown func(), fetcher stored_requests.Fetcher, ampFetcher stored_requests.Fetcher, accountsFetcher stored_requests.AccountFetcher, categoriesFetcher stored_requests.CategoryFetcher, videoFetcher stored_requests.Fetcher) { +func NewStoredRequests(cfg *config.Configuration, metricsEngine metrics.MetricsEngine, client *http.Client, router *httprouter.Router) (db *sql.DB, shutdown func(), fetcher stored_requests.Fetcher, ampFetcher stored_requests.Fetcher, accountsFetcher stored_requests.AccountFetcher, categoriesFetcher stored_requests.CategoryFetcher, videoFetcher stored_requests.Fetcher) { // TODO: Switch this to be set in config defaults //if cfg.CategoryMapping.CacheEvents.Enabled && cfg.CategoryMapping.CacheEvents.Endpoint == "" { // cfg.CategoryMapping.CacheEvents.Endpoint = "/storedrequest/categorymapping" @@ -190,7 +190,7 @@ func newCache(cfg *config.StoredRequests) stored_requests.Cache { return cache } -func newEventProducers(cfg *config.StoredRequests, client *http.Client, db *sql.DB, metricsEngine pbsmetrics.MetricsEngine, router *httprouter.Router) (eventProducers []events.EventProducer) { +func newEventProducers(cfg *config.StoredRequests, client *http.Client, db *sql.DB, metricsEngine metrics.MetricsEngine, router *httprouter.Router) (eventProducers []events.EventProducer) { if cfg.CacheEvents.Enabled { eventProducers = append(eventProducers, newEventsAPI(router, cfg.CacheEvents.Endpoint)) } diff --git a/stored_requests/config/config_test.go b/stored_requests/config/config_test.go index 712fef32db4..6c8cd612299 100644 --- a/stored_requests/config/config_test.go +++ b/stored_requests/config/config_test.go @@ -14,7 +14,7 @@ import ( sqlmock "github.com/DATA-DOG/go-sqlmock" "github.com/julienschmidt/httprouter" "github.com/prebid/prebid-server/config" - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/stored_requests" "github.com/prebid/prebid-server/stored_requests/backends/empty_fetcher" "github.com/prebid/prebid-server/stored_requests/backends/http_fetcher" @@ -79,7 +79,7 @@ func TestNewHTTPEvents(t *testing.T) { }, } - metricsMock := &pbsmetrics.MetricsEngineMock{} + metricsMock := &metrics.MetricsEngineMock{} evProducers := newEventProducers(cfg, server1.Client(), nil, metricsMock, nil) assertSliceLength(t, evProducers, 1) @@ -119,7 +119,7 @@ func TestNewInMemoryAccountCache(t *testing.T) { } func TestNewPostgresEventProducers(t *testing.T) { - metricsMock := &pbsmetrics.MetricsEngineMock{} + metricsMock := &metrics.MetricsEngineMock{} metricsMock.Mock.On("RecordStoredDataFetchTime", mock.Anything, mock.Anything).Return() metricsMock.Mock.On("RecordStoredDataError", mock.Anything).Return() diff --git a/stored_requests/events/postgres/database.go b/stored_requests/events/postgres/database.go index 8e1269c6904..e769a55585c 100644 --- a/stored_requests/events/postgres/database.go +++ b/stored_requests/events/postgres/database.go @@ -10,7 +10,7 @@ import ( "github.com/golang/glog" "github.com/prebid/prebid-server/config" - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/stored_requests/events" "github.com/prebid/prebid-server/util/timeutil" ) @@ -19,12 +19,12 @@ func bytesNull() []byte { return []byte{'n', 'u', 'l', 'l'} } -var storedDataTypeMetricMap = map[config.DataType]pbsmetrics.StoredDataType{ - config.RequestDataType: pbsmetrics.RequestDataType, - config.CategoryDataType: pbsmetrics.CategoryDataType, - config.VideoDataType: pbsmetrics.VideoDataType, - config.AMPRequestDataType: pbsmetrics.AMPDataType, - config.AccountDataType: pbsmetrics.AccountDataType, +var storedDataTypeMetricMap = map[config.DataType]metrics.StoredDataType{ + config.RequestDataType: metrics.RequestDataType, + config.CategoryDataType: metrics.CategoryDataType, + config.VideoDataType: metrics.VideoDataType, + config.AMPRequestDataType: metrics.AMPDataType, + config.AccountDataType: metrics.AccountDataType, } type PostgresEventProducerConfig struct { @@ -34,7 +34,7 @@ type PostgresEventProducerConfig struct { CacheInitTimeout time.Duration CacheUpdateQuery string CacheUpdateTimeout time.Duration - MetricsEngine pbsmetrics.MetricsEngine + MetricsEngine metrics.MetricsEngine } type PostgresEventProducer struct { @@ -83,14 +83,14 @@ func (e *PostgresEventProducer) fetchAll() (fetchErr error) { startTime := e.time.Now().UTC() rows, err := e.cfg.DB.QueryContext(ctx, e.cfg.CacheInitQuery) elapsedTime := time.Since(startTime) - e.recordFetchTime(elapsedTime, pbsmetrics.FetchAll) + e.recordFetchTime(elapsedTime, metrics.FetchAll) if err != nil { glog.Warningf("Failed to fetch all Stored %s data from the DB: %v", e.cfg.RequestType, err) if _, ok := err.(net.Error); ok { - e.recordError(pbsmetrics.StoredDataErrorNetwork) + e.recordError(metrics.StoredDataErrorNetwork) } else { - e.recordError(pbsmetrics.StoredDataErrorUndefined) + e.recordError(metrics.StoredDataErrorUndefined) } return err } @@ -98,13 +98,13 @@ func (e *PostgresEventProducer) fetchAll() (fetchErr error) { defer func() { if err := rows.Close(); err != nil { glog.Warningf("Failed to close the Stored %s DB connection: %v", e.cfg.RequestType, err) - e.recordError(pbsmetrics.StoredDataErrorUndefined) + e.recordError(metrics.StoredDataErrorUndefined) fetchErr = err } }() if err := e.sendEvents(rows); err != nil { glog.Warningf("Failed to load all Stored %s data from the DB: %v", e.cfg.RequestType, err) - e.recordError(pbsmetrics.StoredDataErrorUndefined) + e.recordError(metrics.StoredDataErrorUndefined) return err } @@ -120,14 +120,14 @@ func (e *PostgresEventProducer) fetchDelta() (fetchErr error) { startTime := e.time.Now().UTC() rows, err := e.cfg.DB.QueryContext(ctx, e.cfg.CacheUpdateQuery, e.lastUpdate) elapsedTime := time.Since(startTime) - e.recordFetchTime(elapsedTime, pbsmetrics.FetchDelta) + e.recordFetchTime(elapsedTime, metrics.FetchDelta) if err != nil { glog.Warningf("Failed to fetch updated Stored %s data from the DB: %v", e.cfg.RequestType, err) if _, ok := err.(net.Error); ok { - e.recordError(pbsmetrics.StoredDataErrorNetwork) + e.recordError(metrics.StoredDataErrorNetwork) } else { - e.recordError(pbsmetrics.StoredDataErrorUndefined) + e.recordError(metrics.StoredDataErrorUndefined) } return err } @@ -135,13 +135,13 @@ func (e *PostgresEventProducer) fetchDelta() (fetchErr error) { defer func() { if err := rows.Close(); err != nil { glog.Warningf("Failed to close the Stored %s DB connection: %v", e.cfg.RequestType, err) - e.recordError(pbsmetrics.StoredDataErrorUndefined) + e.recordError(metrics.StoredDataErrorUndefined) fetchErr = err } }() if err := e.sendEvents(rows); err != nil { glog.Warningf("Failed to load updated Stored %s data from the DB: %v", e.cfg.RequestType, err) - e.recordError(pbsmetrics.StoredDataErrorUndefined) + e.recordError(metrics.StoredDataErrorUndefined) return err } @@ -149,17 +149,17 @@ func (e *PostgresEventProducer) fetchDelta() (fetchErr error) { return nil } -func (e *PostgresEventProducer) recordFetchTime(elapsedTime time.Duration, fetchType pbsmetrics.StoredDataFetchType) { +func (e *PostgresEventProducer) recordFetchTime(elapsedTime time.Duration, fetchType metrics.StoredDataFetchType) { e.cfg.MetricsEngine.RecordStoredDataFetchTime( - pbsmetrics.StoredDataLabels{ + metrics.StoredDataLabels{ DataType: storedDataTypeMetricMap[e.cfg.RequestType], DataFetchType: fetchType, }, elapsedTime) } -func (e *PostgresEventProducer) recordError(errorType pbsmetrics.StoredDataError) { +func (e *PostgresEventProducer) recordError(errorType metrics.StoredDataError) { e.cfg.MetricsEngine.RecordStoredDataError( - pbsmetrics.StoredDataLabels{ + metrics.StoredDataLabels{ DataType: storedDataTypeMetricMap[e.cfg.RequestType], Error: errorType, }) diff --git a/stored_requests/events/postgres/database_test.go b/stored_requests/events/postgres/database_test.go index 4471ad41638..15d0fbffbc3 100644 --- a/stored_requests/events/postgres/database_test.go +++ b/stored_requests/events/postgres/database_test.go @@ -8,7 +8,7 @@ import ( "time" "github.com/prebid/prebid-server/config" - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/stored_requests/events" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" @@ -94,10 +94,10 @@ func TestFetchAllSuccess(t *testing.T) { db, dbMock, _ := sqlmock.New() dbMock.ExpectQuery(fakeQueryRegex()).WillReturnRows(tt.giveMockRows) - metricsMock := &pbsmetrics.MetricsEngineMock{} - metricsMock.Mock.On("RecordStoredDataFetchTime", pbsmetrics.StoredDataLabels{ - DataType: pbsmetrics.RequestDataType, - DataFetchType: pbsmetrics.FetchAll, + metricsMock := &metrics.MetricsEngineMock{} + metricsMock.Mock.On("RecordStoredDataFetchTime", metrics.StoredDataLabels{ + DataType: metrics.RequestDataType, + DataFetchType: metrics.FetchAll, }, mock.Anything).Return() eventProducer := NewPostgresEventProducer(PostgresEventProducerConfig{ @@ -141,14 +141,14 @@ func TestFetchAllErrors(t *testing.T) { giveFakeTime time.Time giveTimeoutMS int giveMockRows *sqlmock.Rows - wantRecordedError pbsmetrics.StoredDataError + wantRecordedError metrics.StoredDataError wantLastUpdate time.Time }{ { description: "fetch all timeout", giveFakeTime: time.Date(2020, time.July, 1, 12, 30, 0, 0, time.UTC), giveMockRows: nil, - wantRecordedError: pbsmetrics.StoredDataErrorNetwork, + wantRecordedError: metrics.StoredDataErrorNetwork, wantLastUpdate: time.Time{}, }, { @@ -156,7 +156,7 @@ func TestFetchAllErrors(t *testing.T) { giveFakeTime: time.Date(2020, time.July, 1, 12, 30, 0, 0, time.UTC), giveTimeoutMS: 100, giveMockRows: nil, - wantRecordedError: pbsmetrics.StoredDataErrorUndefined, + wantRecordedError: metrics.StoredDataErrorUndefined, wantLastUpdate: time.Time{}, }, { @@ -166,7 +166,7 @@ func TestFetchAllErrors(t *testing.T) { giveMockRows: sqlmock.NewRows([]string{"id", "data", "dataType"}). AddRow("stored-req-id", "true", "request"). RowError(0, errors.New("Some row error.")), - wantRecordedError: pbsmetrics.StoredDataErrorUndefined, + wantRecordedError: metrics.StoredDataErrorUndefined, wantLastUpdate: time.Time{}, }, } @@ -179,13 +179,13 @@ func TestFetchAllErrors(t *testing.T) { dbMock.ExpectQuery(fakeQueryRegex()).WillReturnRows(tt.giveMockRows) } - metricsMock := &pbsmetrics.MetricsEngineMock{} - metricsMock.Mock.On("RecordStoredDataFetchTime", pbsmetrics.StoredDataLabels{ - DataType: pbsmetrics.RequestDataType, - DataFetchType: pbsmetrics.FetchAll, + metricsMock := &metrics.MetricsEngineMock{} + metricsMock.Mock.On("RecordStoredDataFetchTime", metrics.StoredDataLabels{ + DataType: metrics.RequestDataType, + DataFetchType: metrics.FetchAll, }, mock.Anything).Return() - metricsMock.Mock.On("RecordStoredDataError", pbsmetrics.StoredDataLabels{ - DataType: pbsmetrics.RequestDataType, + metricsMock.Mock.On("RecordStoredDataError", metrics.StoredDataLabels{ + DataType: metrics.RequestDataType, Error: tt.wantRecordedError, }).Return() @@ -307,10 +307,10 @@ func TestFetchDeltaSuccess(t *testing.T) { db, dbMock, _ := sqlmock.New() dbMock.ExpectQuery(fakeQueryRegex()).WillReturnRows(tt.giveMockRows) - metricsMock := &pbsmetrics.MetricsEngineMock{} - metricsMock.Mock.On("RecordStoredDataFetchTime", pbsmetrics.StoredDataLabels{ - DataType: pbsmetrics.RequestDataType, - DataFetchType: pbsmetrics.FetchDelta, + metricsMock := &metrics.MetricsEngineMock{} + metricsMock.Mock.On("RecordStoredDataFetchTime", metrics.StoredDataLabels{ + DataType: metrics.RequestDataType, + DataFetchType: metrics.FetchDelta, }, mock.Anything).Return() eventProducer := NewPostgresEventProducer(PostgresEventProducerConfig{ @@ -356,7 +356,7 @@ func TestFetchDeltaErrors(t *testing.T) { giveTimeoutMS int giveLastUpdate time.Time giveMockRows *sqlmock.Rows - wantRecordedError pbsmetrics.StoredDataError + wantRecordedError metrics.StoredDataError wantLastUpdate time.Time }{ { @@ -364,7 +364,7 @@ func TestFetchDeltaErrors(t *testing.T) { giveFakeTime: time.Date(2020, time.July, 1, 12, 30, 0, 0, time.UTC), giveLastUpdate: time.Date(2020, time.June, 30, 6, 0, 0, 0, time.UTC), giveMockRows: nil, - wantRecordedError: pbsmetrics.StoredDataErrorNetwork, + wantRecordedError: metrics.StoredDataErrorNetwork, wantLastUpdate: time.Date(2020, time.June, 30, 6, 0, 0, 0, time.UTC), }, { @@ -373,7 +373,7 @@ func TestFetchDeltaErrors(t *testing.T) { giveTimeoutMS: 100, giveLastUpdate: time.Date(2020, time.June, 30, 6, 0, 0, 0, time.UTC), giveMockRows: nil, - wantRecordedError: pbsmetrics.StoredDataErrorUndefined, + wantRecordedError: metrics.StoredDataErrorUndefined, wantLastUpdate: time.Date(2020, time.June, 30, 6, 0, 0, 0, time.UTC), }, { @@ -384,7 +384,7 @@ func TestFetchDeltaErrors(t *testing.T) { giveMockRows: sqlmock.NewRows([]string{"id", "data", "dataType"}). AddRow("stored-req-id", "true", "request"). RowError(0, errors.New("Some row error.")), - wantRecordedError: pbsmetrics.StoredDataErrorUndefined, + wantRecordedError: metrics.StoredDataErrorUndefined, wantLastUpdate: time.Date(2020, time.June, 30, 6, 0, 0, 0, time.UTC), }, } @@ -397,13 +397,13 @@ func TestFetchDeltaErrors(t *testing.T) { dbMock.ExpectQuery(fakeQueryRegex()).WillReturnRows(tt.giveMockRows) } - metricsMock := &pbsmetrics.MetricsEngineMock{} - metricsMock.Mock.On("RecordStoredDataFetchTime", pbsmetrics.StoredDataLabels{ - DataType: pbsmetrics.RequestDataType, - DataFetchType: pbsmetrics.FetchDelta, + metricsMock := &metrics.MetricsEngineMock{} + metricsMock.Mock.On("RecordStoredDataFetchTime", metrics.StoredDataLabels{ + DataType: metrics.RequestDataType, + DataFetchType: metrics.FetchDelta, }, mock.Anything).Return() - metricsMock.Mock.On("RecordStoredDataError", pbsmetrics.StoredDataLabels{ - DataType: pbsmetrics.RequestDataType, + metricsMock.Mock.On("RecordStoredDataError", metrics.StoredDataLabels{ + DataType: metrics.RequestDataType, Error: tt.wantRecordedError, }).Return() diff --git a/stored_requests/fetcher.go b/stored_requests/fetcher.go index 096f9060b6d..865231ee757 100644 --- a/stored_requests/fetcher.go +++ b/stored_requests/fetcher.go @@ -5,7 +5,7 @@ import ( "encoding/json" "fmt" - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" ) // Fetcher knows how to fetch Stored Request data by id. @@ -146,14 +146,14 @@ func (c ComposedCache) Save(ctx context.Context, data map[string]json.RawMessage type fetcherWithCache struct { fetcher AllFetcher cache Cache - metricsEngine pbsmetrics.MetricsEngine + metricsEngine metrics.MetricsEngine } // WithCache returns a Fetcher which uses the given Caches before delegating to the original. // This can be called multiple times to compose Cache layers onto the backing Fetcher, though // it is usually more desirable to first compose caches with Compose, ensuring propagation of updates // and invalidations through all cache layers. -func WithCache(fetcher AllFetcher, cache Cache, metricsEngine pbsmetrics.MetricsEngine) AllFetcher { +func WithCache(fetcher AllFetcher, cache Cache, metricsEngine metrics.MetricsEngine) AllFetcher { return &fetcherWithCache{ cache: cache, fetcher: fetcher, @@ -171,11 +171,11 @@ func (f *fetcherWithCache) FetchRequests(ctx context.Context, requestIDs []strin leftoverReqs := findLeftovers(requestIDs, requestData) // Record cache hits for stored requests and stored imps - f.metricsEngine.RecordStoredReqCacheResult(pbsmetrics.CacheHit, len(requestIDs)-len(leftoverReqs)) - f.metricsEngine.RecordStoredImpCacheResult(pbsmetrics.CacheHit, len(impIDs)-len(leftoverImps)) + f.metricsEngine.RecordStoredReqCacheResult(metrics.CacheHit, len(requestIDs)-len(leftoverReqs)) + f.metricsEngine.RecordStoredImpCacheResult(metrics.CacheHit, len(impIDs)-len(leftoverImps)) // Record cache misses for stored requests and stored imps - f.metricsEngine.RecordStoredReqCacheResult(pbsmetrics.CacheMiss, len(leftoverReqs)) - f.metricsEngine.RecordStoredImpCacheResult(pbsmetrics.CacheMiss, len(leftoverImps)) + f.metricsEngine.RecordStoredReqCacheResult(metrics.CacheMiss, len(leftoverReqs)) + f.metricsEngine.RecordStoredImpCacheResult(metrics.CacheMiss, len(leftoverImps)) if len(leftoverReqs) > 0 || len(leftoverImps) > 0 { fetcherReqData, fetcherImpData, fetcherErrs := f.fetcher.FetchRequests(ctx, leftoverReqs, leftoverImps) @@ -195,10 +195,10 @@ func (f *fetcherWithCache) FetchAccount(ctx context.Context, accountID string) ( accountData := f.cache.Accounts.Get(ctx, []string{accountID}) // TODO: add metrics if account, ok := accountData[accountID]; ok { - f.metricsEngine.RecordAccountCacheResult(pbsmetrics.CacheHit, 1) + f.metricsEngine.RecordAccountCacheResult(metrics.CacheHit, 1) return account, errs } else { - f.metricsEngine.RecordAccountCacheResult(pbsmetrics.CacheMiss, 1) + f.metricsEngine.RecordAccountCacheResult(metrics.CacheMiss, 1) } account, errs = f.fetcher.FetchAccount(ctx, accountID) if len(errs) == 0 { diff --git a/stored_requests/fetcher_test.go b/stored_requests/fetcher_test.go index 7fb56124db3..e77bc75c310 100644 --- a/stored_requests/fetcher_test.go +++ b/stored_requests/fetcher_test.go @@ -6,17 +6,17 @@ import ( "errors" "testing" - "github.com/prebid/prebid-server/pbsmetrics" + "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/stored_requests/caches/nil_cache" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) -func setupFetcherWithCacheDeps() (*mockCache, *mockCache, *mockFetcher, AllFetcher, *pbsmetrics.MetricsEngineMock) { +func setupFetcherWithCacheDeps() (*mockCache, *mockCache, *mockFetcher, AllFetcher, *metrics.MetricsEngineMock) { reqCache := &mockCache{} impCache := &mockCache{} - metricsEngine := &pbsmetrics.MetricsEngineMock{} + metricsEngine := &metrics.MetricsEngineMock{} fetcher := &mockFetcher{} afetcherWithCache := WithCache(fetcher, Cache{reqCache, impCache, &nil_cache.NilCache{}}, metricsEngine) @@ -37,10 +37,10 @@ func TestPerfectCache(t *testing.T) { map[string]json.RawMessage{ "known": json.RawMessage(`{}`), }) - metricsEngine.On("RecordStoredReqCacheResult", pbsmetrics.CacheHit, 1) - metricsEngine.On("RecordStoredReqCacheResult", pbsmetrics.CacheMiss, 0) - metricsEngine.On("RecordStoredImpCacheResult", pbsmetrics.CacheHit, 1) - metricsEngine.On("RecordStoredImpCacheResult", pbsmetrics.CacheMiss, 0) + metricsEngine.On("RecordStoredReqCacheResult", metrics.CacheHit, 1) + metricsEngine.On("RecordStoredReqCacheResult", metrics.CacheMiss, 0) + metricsEngine.On("RecordStoredImpCacheResult", metrics.CacheHit, 1) + metricsEngine.On("RecordStoredImpCacheResult", metrics.CacheMiss, 0) reqData, impData, errs := aFetcherWithCache.FetchRequests(ctx, reqIDs, impIDs) @@ -77,10 +77,10 @@ func TestImperfectCache(t *testing.T) { "uncached": json.RawMessage(`false`), }) reqCache.On("Save", ctx, map[string]json.RawMessage{}) - metricsEngine.On("RecordStoredReqCacheResult", pbsmetrics.CacheHit, 0) - metricsEngine.On("RecordStoredReqCacheResult", pbsmetrics.CacheMiss, 0) - metricsEngine.On("RecordStoredImpCacheResult", pbsmetrics.CacheHit, 1) - metricsEngine.On("RecordStoredImpCacheResult", pbsmetrics.CacheMiss, 1) + metricsEngine.On("RecordStoredReqCacheResult", metrics.CacheHit, 0) + metricsEngine.On("RecordStoredReqCacheResult", metrics.CacheMiss, 0) + metricsEngine.On("RecordStoredImpCacheResult", metrics.CacheHit, 1) + metricsEngine.On("RecordStoredImpCacheResult", metrics.CacheMiss, 1) reqData, impData, errs := aFetcherWithCache.FetchRequests(ctx, nil, impIDs) @@ -116,10 +116,10 @@ func TestMissingData(t *testing.T) { reqCache.On("Save", ctx, map[string]json.RawMessage{}, ) - metricsEngine.On("RecordStoredReqCacheResult", pbsmetrics.CacheHit, 0) - metricsEngine.On("RecordStoredReqCacheResult", pbsmetrics.CacheMiss, 0) - metricsEngine.On("RecordStoredImpCacheResult", pbsmetrics.CacheHit, 0) - metricsEngine.On("RecordStoredImpCacheResult", pbsmetrics.CacheMiss, 1) + metricsEngine.On("RecordStoredReqCacheResult", metrics.CacheHit, 0) + metricsEngine.On("RecordStoredReqCacheResult", metrics.CacheMiss, 0) + metricsEngine.On("RecordStoredImpCacheResult", metrics.CacheHit, 0) + metricsEngine.On("RecordStoredImpCacheResult", metrics.CacheMiss, 1) reqData, impData, errs := aFetcherWithCache.FetchRequests(ctx, nil, impIDs) @@ -144,10 +144,10 @@ func TestCacheSaves(t *testing.T) { }) reqCache.On("Get", ctx, []string(nil)).Return( map[string]json.RawMessage{}) - metricsEngine.On("RecordStoredReqCacheResult", pbsmetrics.CacheHit, 0) - metricsEngine.On("RecordStoredReqCacheResult", pbsmetrics.CacheMiss, 0) - metricsEngine.On("RecordStoredImpCacheResult", pbsmetrics.CacheHit, 2) - metricsEngine.On("RecordStoredImpCacheResult", pbsmetrics.CacheMiss, 0) + metricsEngine.On("RecordStoredReqCacheResult", metrics.CacheHit, 0) + metricsEngine.On("RecordStoredReqCacheResult", metrics.CacheMiss, 0) + metricsEngine.On("RecordStoredImpCacheResult", metrics.CacheHit, 2) + metricsEngine.On("RecordStoredImpCacheResult", metrics.CacheMiss, 0) _, impData, errs := aFetcherWithCache.FetchRequests(ctx, nil, []string{"abc", "abc"}) @@ -159,9 +159,9 @@ func TestCacheSaves(t *testing.T) { assert.Len(t, errs, 0, "FetchRequests with duplicate IDs shouldn't return an error") } -func setupAccountFetcherWithCacheDeps() (*mockCache, *mockFetcher, AllFetcher, *pbsmetrics.MetricsEngineMock) { +func setupAccountFetcherWithCacheDeps() (*mockCache, *mockFetcher, AllFetcher, *metrics.MetricsEngineMock) { accCache := &mockCache{} - metricsEngine := &pbsmetrics.MetricsEngineMock{} + metricsEngine := &metrics.MetricsEngineMock{} fetcher := &mockFetcher{} afetcherWithCache := WithCache(fetcher, Cache{&nil_cache.NilCache{}, &nil_cache.NilCache{}, accCache}, metricsEngine) @@ -179,7 +179,7 @@ func TestAccountCacheHit(t *testing.T) { "known": json.RawMessage(`true`), }) - metricsEngine.On("RecordAccountCacheResult", pbsmetrics.CacheHit, 1) + metricsEngine.On("RecordAccountCacheResult", metrics.CacheHit, 1) account, errs := aFetcherWithCache.FetchAccount(ctx, "known") accCache.AssertExpectations(t) @@ -201,7 +201,7 @@ func TestAccountCacheMiss(t *testing.T) { accCache.On("Get", ctx, uncachedAccounts).Return(map[string]json.RawMessage{}) accCache.On("Save", ctx, uncachedAccountsData) fetcher.On("FetchAccount", ctx, "uncached").Return(uncachedAccountsData["uncached"], []error{}) - metricsEngine.On("RecordAccountCacheResult", pbsmetrics.CacheMiss, 1) + metricsEngine.On("RecordAccountCacheResult", metrics.CacheMiss, 1) account, errs := aFetcherWithCache.FetchAccount(ctx, "uncached") @@ -222,7 +222,7 @@ func TestComposedCache(t *testing.T) { Requests: ComposedCache{c1, c2, c3, c4}, Imps: impCache, } - metricsEngine := &pbsmetrics.MetricsEngineMock{} + metricsEngine := &metrics.MetricsEngineMock{} fetcher := &mockFetcher{} aFetcherWithCache := WithCache(fetcher, cache, metricsEngine) reqIDs := []string{"1", "2", "3"} @@ -242,10 +242,10 @@ func TestComposedCache(t *testing.T) { "3": json.RawMessage(`{"id": "3"}`), }) impCache.On("Get", ctx, []string{}).Return(map[string]json.RawMessage{}) - metricsEngine.On("RecordStoredReqCacheResult", pbsmetrics.CacheHit, 3) - metricsEngine.On("RecordStoredReqCacheResult", pbsmetrics.CacheMiss, 0) - metricsEngine.On("RecordStoredImpCacheResult", pbsmetrics.CacheHit, 0) - metricsEngine.On("RecordStoredImpCacheResult", pbsmetrics.CacheMiss, 0) + metricsEngine.On("RecordStoredReqCacheResult", metrics.CacheHit, 3) + metricsEngine.On("RecordStoredReqCacheResult", metrics.CacheMiss, 0) + metricsEngine.On("RecordStoredImpCacheResult", metrics.CacheHit, 0) + metricsEngine.On("RecordStoredImpCacheResult", metrics.CacheMiss, 0) reqData, impData, errs := aFetcherWithCache.FetchRequests(ctx, reqIDs, impIDs)