From dc1093c100d347b7e0d3ff616aeed8eb50093575 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Mon, 2 Sep 2024 20:28:34 +0530 Subject: [PATCH 01/66] publisher: log: add tests --- publisher/log/internal/testproto/gen.go | 2 + publisher/log/internal/testproto/test.pb.go | 161 ++++++++++++++++++++ publisher/log/internal/testproto/test.proto | 11 ++ publisher/log/log.go | 16 +- publisher/log/log_test.go | 110 +++++++++++++ 5 files changed, 297 insertions(+), 3 deletions(-) create mode 100644 publisher/log/internal/testproto/gen.go create mode 100644 publisher/log/internal/testproto/test.pb.go create mode 100644 publisher/log/internal/testproto/test.proto create mode 100644 publisher/log/log_test.go diff --git a/publisher/log/internal/testproto/gen.go b/publisher/log/internal/testproto/gen.go new file mode 100644 index 00000000..605b6b00 --- /dev/null +++ b/publisher/log/internal/testproto/gen.go @@ -0,0 +1,2 @@ +//go:generate protoc --go_out=paths=source_relative:. ./test.proto +package testproto diff --git a/publisher/log/internal/testproto/test.pb.go b/publisher/log/internal/testproto/test.pb.go new file mode 100644 index 00000000..726c78b5 --- /dev/null +++ b/publisher/log/internal/testproto/test.pb.go @@ -0,0 +1,161 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.34.2 +// protoc v3.12.4 +// source: test.proto + +package testproto + +import ( + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +type TestEvent struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Description string `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"` + Count int32 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` + Tags []string `protobuf:"bytes,3,rep,name=tags,proto3" json:"tags,omitempty"` +} + +func (x *TestEvent) Reset() { + *x = TestEvent{} + if protoimpl.UnsafeEnabled { + mi := &file_test_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *TestEvent) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*TestEvent) ProtoMessage() {} + +func (x *TestEvent) ProtoReflect() protoreflect.Message { + mi := &file_test_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use TestEvent.ProtoReflect.Descriptor instead. +func (*TestEvent) Descriptor() ([]byte, []int) { + return file_test_proto_rawDescGZIP(), []int{0} +} + +func (x *TestEvent) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *TestEvent) GetCount() int32 { + if x != nil { + return x.Count + } + return 0 +} + +func (x *TestEvent) GetTags() []string { + if x != nil { + return x.Tags + } + return nil +} + +var File_test_proto protoreflect.FileDescriptor + +var file_test_proto_rawDesc = []byte{ + 0x0a, 0x0a, 0x74, 0x65, 0x73, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x72, 0x61, + 0x63, 0x63, 0x6f, 0x6f, 0x6e, 0x2e, 0x74, 0x65, 0x73, 0x74, 0x22, 0x57, 0x0a, 0x09, 0x54, 0x65, + 0x73, 0x74, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, + 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, + 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x74, + 0x61, 0x67, 0x73, 0x42, 0x0d, 0x5a, 0x0b, 0x2e, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_test_proto_rawDescOnce sync.Once + file_test_proto_rawDescData = file_test_proto_rawDesc +) + +func file_test_proto_rawDescGZIP() []byte { + file_test_proto_rawDescOnce.Do(func() { + file_test_proto_rawDescData = protoimpl.X.CompressGZIP(file_test_proto_rawDescData) + }) + return file_test_proto_rawDescData +} + +var file_test_proto_msgTypes = make([]protoimpl.MessageInfo, 1) +var file_test_proto_goTypes = []any{ + (*TestEvent)(nil), // 0: raccoon.test.TestEvent +} +var file_test_proto_depIdxs = []int32{ + 0, // [0:0] is the sub-list for method output_type + 0, // [0:0] is the sub-list for method input_type + 0, // [0:0] is the sub-list for extension type_name + 0, // [0:0] is the sub-list for extension extendee + 0, // [0:0] is the sub-list for field type_name +} + +func init() { file_test_proto_init() } +func file_test_proto_init() { + if File_test_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_test_proto_msgTypes[0].Exporter = func(v any, i int) any { + switch v := v.(*TestEvent); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_test_proto_rawDesc, + NumEnums: 0, + NumMessages: 1, + NumExtensions: 0, + NumServices: 0, + }, + GoTypes: file_test_proto_goTypes, + DependencyIndexes: file_test_proto_depIdxs, + MessageInfos: file_test_proto_msgTypes, + }.Build() + File_test_proto = out.File + file_test_proto_rawDesc = nil + file_test_proto_goTypes = nil + file_test_proto_depIdxs = nil +} diff --git a/publisher/log/internal/testproto/test.proto b/publisher/log/internal/testproto/test.proto new file mode 100644 index 00000000..f0a52432 --- /dev/null +++ b/publisher/log/internal/testproto/test.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; + +package raccoon.test; + +option go_package = "./testproto"; + +message TestEvent { + string description = 1; + int32 count = 2; + repeated string tags = 3; +} \ No newline at end of file diff --git a/publisher/log/log.go b/publisher/log/log.go index 1c068805..5de24caf 100644 --- a/publisher/log/log.go +++ b/publisher/log/log.go @@ -3,6 +3,7 @@ package log import ( "cmp" "encoding/json" + "fmt" "github.com/raystack/raccoon/logger" pb "github.com/raystack/raccoon/proto" @@ -10,9 +11,13 @@ import ( "github.com/turtleDev/protoraw" ) +type logEmitter func(string) + // Publisher publishes message to the standard logger // This is intended for development use. -type Publisher struct{} +type Publisher struct { + emit logEmitter +} func (p Publisher) ProduceBulk(events []*pb.Event, connGroup string) error { var errs []error @@ -34,12 +39,13 @@ func (p Publisher) ProduceBulk(events []*pb.Event, connGroup string) error { continue } } - logger.Infof( + line := fmt.Sprintf( "[LogPublisher] kind = %s, event_type = %s, event = %s", kind, typ, event, ) + p.emit(line) } if cmp.Or(errs...) != nil { return &publisher.BulkError{ @@ -58,5 +64,9 @@ func (p Publisher) Close() error { } func New() Publisher { - return Publisher{} + return Publisher{ + emit: func(line string) { + logger.Info(line) + }, + } } diff --git a/publisher/log/log_test.go b/publisher/log/log_test.go new file mode 100644 index 00000000..27c432a9 --- /dev/null +++ b/publisher/log/log_test.go @@ -0,0 +1,110 @@ +package log + +import ( + "fmt" + "testing" + + raccoonv1 "github.com/raystack/raccoon/proto" + "github.com/raystack/raccoon/publisher/log/internal/testproto" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "google.golang.org/protobuf/proto" +) + +type emitterProbe struct { + Messages []string +} + +func (pe *emitterProbe) Emit(value string) { + pe.Messages = append(pe.Messages, value) +} + +func TestLogPublisher(t *testing.T) { + + t.Run("should return an error if payload type is not of protobuf or json", func(t *testing.T) { + payload := []*raccoonv1.Event{ + { + Type: "unknown", + EventBytes: []byte("]{}"), + }, + } + p := Publisher{ + emit: func(string) {}, // noop + } + err := p.ProduceBulk(payload, "") + assert.Error(t, err) + }) + t.Run("should emit json events correctly", func(t *testing.T) { + payload := []*raccoonv1.Event{ + { + Type: "unknown", + EventBytes: []byte(`{"key":"value"}`), + }, + } + em := &emitterProbe{} + p := Publisher{ + emit: em.Emit, + } + err := p.ProduceBulk(payload, "") + assert.NoError(t, err) + assert.Len(t, em.Messages, 1) + + expected := fmt.Sprintf( + "[LogPublisher] kind = %s, event_type = %s, event = %s", + "json", + "unknown", + `{"key":"value"}`, + ) + assert.Equal(t, expected, em.Messages[0]) + }) + t.Run("should emit protobuf events correctly", func(t *testing.T) { + msg := &testproto.TestEvent{ + Description: "test event", + Count: 420, + Tags: []string{"log", "protobuf"}, + } + bytes, err := proto.Marshal(msg) + require.NoError(t, err) + payload := []*raccoonv1.Event{ + { + Type: "unknown", + EventBytes: bytes, + }, + } + em := &emitterProbe{} + p := Publisher{ + emit: em.Emit, + } + err = p.ProduceBulk(payload, "") + assert.NoError(t, err) + assert.Len(t, em.Messages, 1) + + expected := fmt.Sprintf( + "[LogPublisher] kind = %s, event_type = %s, event = %s", + "protobuf", + "unknown", + `1:"test event" 2:420 3:"log" 3:"protobuf"`, + ) + assert.Equal(t, expected, em.Messages[0]) + }) + t.Run("publisher should have the name 'log'", func(t *testing.T) { + assert.Equal(t, "log", Publisher{}.Name()) + }) + t.Run("publisher should Close() without error", func(t *testing.T) { + p := Publisher{ + emit: (&emitterProbe{}).Emit, + } + err := p.ProduceBulk([]*raccoonv1.Event{ + { + EventBytes: []byte("{}"), + Type: "unknown", + }, + }, "") + assert.NoError(t, err) + assert.NoError(t, p.Close()) + }) + + t.Run("publisher should initialise without panic", func(t *testing.T) { + _ = New() + }) +} From a0203e15ca5185709463b8748c48758cbf72eb08 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Tue, 3 Sep 2024 17:01:53 +0530 Subject: [PATCH 02/66] worker: test for ack function --- worker/mocks.go | 8 ++++++++ worker/worker_test.go | 26 +++++++++++++++++++++++++- 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/worker/mocks.go b/worker/mocks.go index 7e177825..19f265da 100644 --- a/worker/mocks.go +++ b/worker/mocks.go @@ -31,3 +31,11 @@ func (m *mockMetric) Count(bucket string, val int, tags string) { func (m *mockMetric) Timing(bucket string, t int64, tags string) { m.Called(bucket, t, tags) } + +type mockAck struct { + mock.Mock +} + +func (m *mockAck) Ack(err error) { + m.Called(err) +} diff --git a/worker/worker_test.go b/worker/worker_test.go index f791a4a2..9b2bc342 100644 --- a/worker/worker_test.go +++ b/worker/worker_test.go @@ -44,10 +44,34 @@ func TestWorker(t *testing.T) { kp.On("Name").Return("kafka") bc <- *request bc <- *request - time.Sleep(10 * time.Millisecond) + + worker.FlushWithTimeOut(5 * time.Millisecond) kp.AssertExpectations(t) }) + t.Run("should call ack function", func(t *testing.T) { + kp := mockKafkaPublisher{} + kp.On("ProduceBulk", mock.Anything, mock.Anything, mock.Anything).Return(nil).Once() + kp.On("Name").Return("kafka") + defer kp.AssertExpectations(t) + + q := make(chan collector.CollectRequest, 1) + worker := Pool{ + Size: 1, + EventsChannel: q, + producer: &kp, + wg: sync.WaitGroup{}, + } + worker.StartWorkers() + + ackMock := &mockAck{} + ackMock.On("Ack", nil).Return().Once() + defer ackMock.AssertExpectations(t) + r := *request + r.AckFunc = ackMock.Ack + q <- r + worker.FlushWithTimeOut(5 * time.Millisecond) + }) }) t.Run("Flush", func(t *testing.T) { From bad90fda1e19763eb8d5db54cb7761539d80c109 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Tue, 3 Sep 2024 17:04:30 +0530 Subject: [PATCH 03/66] worker: tests: remove unused mocks --- worker/mocks.go | 12 ------------ worker/worker_test.go | 8 -------- 2 files changed, 20 deletions(-) diff --git a/worker/mocks.go b/worker/mocks.go index 19f265da..6bf29a71 100644 --- a/worker/mocks.go +++ b/worker/mocks.go @@ -20,18 +20,6 @@ func (m *mockKafkaPublisher) Name() string { return m.Called().String(0) } -type mockMetric struct { - mock.Mock -} - -func (m *mockMetric) Count(bucket string, val int, tags string) { - m.Called(bucket, val, tags) -} - -func (m *mockMetric) Timing(bucket string, t int64, tags string) { - m.Called(bucket, t, tags) -} - type mockAck struct { mock.Mock } diff --git a/worker/worker_test.go b/worker/worker_test.go index 9b2bc342..4ef9241d 100644 --- a/worker/worker_test.go +++ b/worker/worker_test.go @@ -27,10 +27,6 @@ func TestWorker(t *testing.T) { t.Run("StartWorkers", func(t *testing.T) { t.Run("Should publish messages on bufferChannel to kafka", func(t *testing.T) { kp := mockKafkaPublisher{} - m := &mockMetric{} - m.On("Timing", "processing.latency", mock.Anything, "") - m.On("Count", "kafka_messages_delivered_total", 0, "success=true") - m.On("Count", "kafka_messages_delivered_total", 0, "success=false") bc := make(chan collector.CollectRequest, 2) worker := Pool{ Size: 1, @@ -78,10 +74,6 @@ func TestWorker(t *testing.T) { t.Run("Should block until all messages is processed", func(t *testing.T) { kp := mockKafkaPublisher{} bc := make(chan collector.CollectRequest, 2) - m := &mockMetric{} - m.On("Timing", "processing.latency", mock.Anything, "") - m.On("Count", "kafka_messages_delivered_total", 0, "success=false") - m.On("Count", "kafka_messages_delivered_total", 0, "success=true") worker := Pool{ Size: 1, From fb5572401d12c28fbf15ba7c28336929acef93a8 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Tue, 3 Sep 2024 17:34:33 +0530 Subject: [PATCH 04/66] worker: add tests for error cases --- worker/worker_test.go | 44 ++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 43 insertions(+), 1 deletion(-) diff --git a/worker/worker_test.go b/worker/worker_test.go index 4ef9241d..7cbc5985 100644 --- a/worker/worker_test.go +++ b/worker/worker_test.go @@ -1,6 +1,7 @@ package worker import ( + "fmt" "sync" "testing" "time" @@ -8,6 +9,7 @@ import ( "github.com/raystack/raccoon/collector" "github.com/raystack/raccoon/identification" pb "github.com/raystack/raccoon/proto" + "github.com/raystack/raccoon/publisher" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "google.golang.org/protobuf/types/known/timestamppb" @@ -21,6 +23,11 @@ func TestWorker(t *testing.T) { }, SendEventRequest: &pb.SendEventRequest{ SentTime: ×tamppb.Timestamp{}, + Events: []*pb.Event{ + { + Type: "synthetic_event", + }, + }, }, } @@ -45,7 +52,7 @@ func TestWorker(t *testing.T) { kp.AssertExpectations(t) }) - t.Run("should call ack function", func(t *testing.T) { + t.Run("Should call ack function", func(t *testing.T) { kp := mockKafkaPublisher{} kp.On("ProduceBulk", mock.Anything, mock.Anything, mock.Anything).Return(nil).Once() kp.On("Name").Return("kafka") @@ -68,6 +75,41 @@ func TestWorker(t *testing.T) { q <- r worker.FlushWithTimeOut(5 * time.Millisecond) }) + t.Run("Should handle publisher error", func(t *testing.T) { + + e := &publisher.BulkError{ + Errors: []error{ + fmt.Errorf("simulated error"), + }, + } + kp := mockKafkaPublisher{} + kp.On("ProduceBulk", mock.Anything, mock.Anything, mock.Anything). + Return(e). + Once() + kp.On("ProduceBulk", mock.Anything, mock.Anything, mock.Anything). + Return(fmt.Errorf("publisher failure")). + Once() + kp.On("Name").Return("kafka") + defer kp.AssertExpectations(t) + + q := make(chan collector.CollectRequest, 2) + worker := Pool{ + Size: 1, + EventsChannel: q, + producer: &kp, + wg: sync.WaitGroup{}, + } + worker.StartWorkers() + + ackMock := &mockAck{} + ackMock.On("Ack", mock.Anything).Return().Twice() + defer ackMock.AssertExpectations(t) + r := *request + r.AckFunc = ackMock.Ack + q <- r + q <- r + worker.FlushWithTimeOut(5 * time.Millisecond) + }) }) t.Run("Flush", func(t *testing.T) { From 873be45a1f31b69b38f1f09fb942c41031ac4552 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Tue, 3 Sep 2024 17:44:27 +0530 Subject: [PATCH 05/66] worker: test: use constructor for tests --- worker/worker_test.go | 37 ++++++++++++------------------------- 1 file changed, 12 insertions(+), 25 deletions(-) diff --git a/worker/worker_test.go b/worker/worker_test.go index 7cbc5985..07485434 100644 --- a/worker/worker_test.go +++ b/worker/worker_test.go @@ -2,7 +2,6 @@ package worker import ( "fmt" - "sync" "testing" "time" @@ -35,12 +34,9 @@ func TestWorker(t *testing.T) { t.Run("Should publish messages on bufferChannel to kafka", func(t *testing.T) { kp := mockKafkaPublisher{} bc := make(chan collector.CollectRequest, 2) - worker := Pool{ - Size: 1, - EventsChannel: bc, - producer: &kp, - wg: sync.WaitGroup{}, - } + worker := CreateWorkerPool( + 1, bc, &kp, + ) worker.StartWorkers() kp.On("ProduceBulk", mock.Anything, mock.Anything, mock.Anything).Return(nil).Twice() @@ -59,12 +55,9 @@ func TestWorker(t *testing.T) { defer kp.AssertExpectations(t) q := make(chan collector.CollectRequest, 1) - worker := Pool{ - Size: 1, - EventsChannel: q, - producer: &kp, - wg: sync.WaitGroup{}, - } + worker := CreateWorkerPool( + 1, q, &kp, + ) worker.StartWorkers() ackMock := &mockAck{} @@ -93,12 +86,9 @@ func TestWorker(t *testing.T) { defer kp.AssertExpectations(t) q := make(chan collector.CollectRequest, 2) - worker := Pool{ - Size: 1, - EventsChannel: q, - producer: &kp, - wg: sync.WaitGroup{}, - } + worker := CreateWorkerPool( + 1, q, &kp, + ) worker.StartWorkers() ackMock := &mockAck{} @@ -117,12 +107,9 @@ func TestWorker(t *testing.T) { kp := mockKafkaPublisher{} bc := make(chan collector.CollectRequest, 2) - worker := Pool{ - Size: 1, - EventsChannel: bc, - producer: &kp, - wg: sync.WaitGroup{}, - } + worker := CreateWorkerPool( + 1, bc, &kp, + ) worker.StartWorkers() kp.On("ProduceBulk", mock.Anything, mock.Anything, mock.Anything).Return(nil).Times(3).After(3 * time.Millisecond) kp.On("Name").Return("kafka") From e78a9d6cb8f0f4716e7cbfd7e82e30c4b291447a Mon Sep 17 00:00:00 2001 From: turtleDev Date: Tue, 3 Sep 2024 21:25:59 +0530 Subject: [PATCH 06/66] app: worker: fix flush always timing out --- app/server.go | 14 ++++++++++---- worker/worker_test.go | 28 ++++++++++++++++++++-------- 2 files changed, 30 insertions(+), 12 deletions(-) diff --git a/app/server.go b/app/server.go index bb728d27..faaa39a3 100644 --- a/app/server.go +++ b/app/server.go @@ -64,16 +64,18 @@ func shutDownServer(ctx context.Context, cancel context.CancelFunc, httpServices sig := <-signalChan switch sig { case syscall.SIGHUP, syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT: - logger.Info(fmt.Sprintf("[App.Server] Received a signal %s", sig)) + logger.Infof("[App.Server] Received a signal %s", sig) httpServices.Shutdown(ctx) logger.Info("Server shutdown all the listeners") + close(bufferChannel) + logger.Info("Flushing workers") timedOut := workerPool.FlushWithTimeOut(workerFlushTimeout) if timedOut { - logger.Info(fmt.Sprintf("WorkerPool flush timedout %t", timedOut)) + logger.Infof("WorkerPool flush timed out") } flushInterval := config.Publisher.Kafka.FlushInterval logger.Infof("Closing %q producer\n", pub.Name()) - logger.Info(fmt.Sprintf("Wait %d ms for all messages to be delivered", flushInterval)) + logger.Infof("Wait %d ms for all messages to be delivered", flushInterval) eventsInProducer := 0 err := pub.Close() @@ -101,9 +103,13 @@ func shutDownServer(ctx context.Context, cancel context.CancelFunc, httpServices }, ) logger.Info("Exiting server") + + // NOTE(turtledev): what's the purpose of this? + // everything has exited at this point. The context + // is not saved by any instances. cancel() default: - logger.Info(fmt.Sprintf("[App.Server] Received a unexpected signal %s", sig)) + logger.Infof("[App.Server] Received a unexpected signal %s", sig) } } } diff --git a/worker/worker_test.go b/worker/worker_test.go index 07485434..b5f2b477 100644 --- a/worker/worker_test.go +++ b/worker/worker_test.go @@ -33,20 +33,24 @@ func TestWorker(t *testing.T) { t.Run("StartWorkers", func(t *testing.T) { t.Run("Should publish messages on bufferChannel to kafka", func(t *testing.T) { kp := mockKafkaPublisher{} + kp.On("ProduceBulk", mock.Anything, mock.Anything, mock.Anything).Return(nil).Twice() + kp.On("Name").Return("kafka") + defer kp.AssertExpectations(t) + bc := make(chan collector.CollectRequest, 2) worker := CreateWorkerPool( 1, bc, &kp, ) worker.StartWorkers() - kp.On("ProduceBulk", mock.Anything, mock.Anything, mock.Anything).Return(nil).Twice() - kp.On("Name").Return("kafka") bc <- *request bc <- *request + close(bc) - worker.FlushWithTimeOut(5 * time.Millisecond) - - kp.AssertExpectations(t) + assert.False( + t, + worker.FlushWithTimeOut(time.Second), + ) }) t.Run("Should call ack function", func(t *testing.T) { kp := mockKafkaPublisher{} @@ -66,7 +70,11 @@ func TestWorker(t *testing.T) { r := *request r.AckFunc = ackMock.Ack q <- r - worker.FlushWithTimeOut(5 * time.Millisecond) + close(q) + assert.False( + t, + worker.FlushWithTimeOut(time.Second), + ) }) t.Run("Should handle publisher error", func(t *testing.T) { @@ -98,7 +106,11 @@ func TestWorker(t *testing.T) { r.AckFunc = ackMock.Ack q <- r q <- r - worker.FlushWithTimeOut(5 * time.Millisecond) + close(q) + assert.False( + t, + worker.FlushWithTimeOut(time.Second), + ) }) }) @@ -117,7 +129,7 @@ func TestWorker(t *testing.T) { bc <- *request bc <- *request close(bc) - timedOut := worker.FlushWithTimeOut(1 * time.Second) + timedOut := worker.FlushWithTimeOut(time.Second) assert.False(t, timedOut) assert.Equal(t, 0, len(bc)) kp.AssertExpectations(t) From 3699a3a8d7ffda32920060d92ad9610607562382 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Tue, 3 Sep 2024 21:34:01 +0530 Subject: [PATCH 07/66] app: move kafka specific log messages to kafka publisher package --- app/server.go | 4 ++-- publisher/kafka/kafka.go | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/app/server.go b/app/server.go index faaa39a3..a2f33f7b 100644 --- a/app/server.go +++ b/app/server.go @@ -73,9 +73,7 @@ func shutDownServer(ctx context.Context, cancel context.CancelFunc, httpServices if timedOut { logger.Infof("WorkerPool flush timed out") } - flushInterval := config.Publisher.Kafka.FlushInterval logger.Infof("Closing %q producer\n", pub.Name()) - logger.Infof("Wait %d ms for all messages to be delivered", flushInterval) eventsInProducer := 0 err := pub.Close() @@ -93,6 +91,8 @@ func shutDownServer(ctx context.Context, cancel context.CancelFunc, httpServices Until then we fall back to approximation */ eventsInChannel := len(bufferChannel) * 7 logger.Info(fmt.Sprintf("Outstanding unprocessed events in the channel, data lost ~ (No batches %d * 5 events) = ~%d", len(bufferChannel), eventsInChannel)) + + // NOTE(turtledev): aren't these metrics misleading? metrics.Count( fmt.Sprintf("%s_messages_delivered_total", pub.Name()), int64(eventsInChannel+eventsInProducer), diff --git a/publisher/kafka/kafka.go b/publisher/kafka/kafka.go index 5f27d442..44736242 100644 --- a/publisher/kafka/kafka.go +++ b/publisher/kafka/kafka.go @@ -154,8 +154,9 @@ func (pr *Kafka) ReportStats() { // Close wait for outstanding messages to be delivered within given flush interval timeout. func (pr *Kafka) Close() error { + logger.Infof("Flushing kafka producer (timeout: %d ms)", pr.flushInterval) remaining := pr.kp.Flush(pr.flushInterval) - logger.Info(fmt.Sprintf("Outstanding events still un-flushed : %d", remaining)) + logger.Infof("Outstanding events still un-flushed : %d", remaining) pr.kp.Close() if remaining > 0 { return &publisher.UnflushedEventsError{Count: remaining} From 9f37c5420ecf87e9de1242b49fff81fdf2805e9a Mon Sep 17 00:00:00 2001 From: turtleDev Date: Tue, 3 Sep 2024 23:31:50 +0530 Subject: [PATCH 08/66] wip: worker metrics test --- metrics/metric_test.go | 57 ++------------------------- metrics/metrics.go | 6 +++ metrics/mock.go | 51 ++++++++++++++++++++++++ worker/mocks.go | 10 +++++ worker/timesource.go | 15 +++++++ worker/worker.go | 4 ++ worker/worker_test.go | 88 ++++++++++++++++++++++++++++++++++++------ 7 files changed, 167 insertions(+), 64 deletions(-) create mode 100644 metrics/mock.go create mode 100644 worker/timesource.go diff --git a/metrics/metric_test.go b/metrics/metric_test.go index 8c6ecb1c..9777d9da 100644 --- a/metrics/metric_test.go +++ b/metrics/metric_test.go @@ -7,57 +7,8 @@ import ( "github.com/raystack/raccoon/config" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" ) -type mockMetricInstrument struct { - mock.Mock -} - -func (m *mockMetricInstrument) Increment(metricName string, labels map[string]string) error { - args := m.Called(metricName, labels) - err := args.Get(0) - if err != nil { - return args.Get(0).(error) - } else { - return nil - } -} - -func (m *mockMetricInstrument) Count(metricName string, count int64, labels map[string]string) error { - args := m.Called(metricName, count, labels) - err := args.Get(0) - if err != nil { - return args.Get(0).(error) - } else { - return nil - } -} - -func (m *mockMetricInstrument) Gauge(metricName string, value interface{}, labels map[string]string) error { - args := m.Called(metricName, value, labels) - err := args.Get(0) - if err != nil { - return args.Get(0).(error) - } else { - return nil - } -} - -func (m *mockMetricInstrument) Histogram(metricName string, value int64, labels map[string]string) error { - args := m.Called(metricName, value, labels) - err := args.Get(0) - if err != nil { - return args.Get(0).(error) - } else { - return nil - } -} - -func (m *mockMetricInstrument) Close() { - m.Called() -} - func Test_Prometheus_Setup(t *testing.T) { config.Metric.Prometheus.Enabled = true config.Metric.StatsD.Enabled = false @@ -82,7 +33,7 @@ func Test_Error_On_Both_Enabled(t *testing.T) { } func Test_Count_Calls_Instrument_Count(t *testing.T) { - mockInstrumentImpl := &mockMetricInstrument{} + mockInstrumentImpl := &MockInstrument{} instrument = mockInstrumentImpl metricName := "abcd" countValue := int64(9000) @@ -95,7 +46,7 @@ func Test_Count_Calls_Instrument_Count(t *testing.T) { } func Test_Gauge_Calls_Instrument_Gauge(t *testing.T) { - mockInstrumentImpl := &mockMetricInstrument{} + mockInstrumentImpl := &MockInstrument{} instrument = mockInstrumentImpl metricName := "abcd" countValue := int64(9000) @@ -108,7 +59,7 @@ func Test_Gauge_Calls_Instrument_Gauge(t *testing.T) { } func Test_Histogram_Calls_Instrument_Histogram(t *testing.T) { - mockInstrumentImpl := &mockMetricInstrument{} + mockInstrumentImpl := &MockInstrument{} instrument = mockInstrumentImpl metricName := "abcd" countValue := int64(9000) @@ -121,7 +72,7 @@ func Test_Histogram_Calls_Instrument_Histogram(t *testing.T) { } func Test_Close_Calls_Instrument_Close(t *testing.T) { - mockInstrumentImpl := &mockMetricInstrument{} + mockInstrumentImpl := &MockInstrument{} instrument = mockInstrumentImpl mockInstrumentImpl.On("Close") Close() diff --git a/metrics/metrics.go b/metrics/metrics.go index aa24eaef..ba652996 100644 --- a/metrics/metrics.go +++ b/metrics/metrics.go @@ -111,6 +111,12 @@ func SetVoid() { instrument = voidInstrument{} } +// Instrument returns the configured MetricInstrument +// should be called once Setup() is done. +func Instrument() MetricInstrument { + return instrument +} + func Close() { if instrument != nil { instrument.Close() diff --git a/metrics/mock.go b/metrics/mock.go new file mode 100644 index 00000000..4aaee33c --- /dev/null +++ b/metrics/mock.go @@ -0,0 +1,51 @@ +package metrics + +import "github.com/stretchr/testify/mock" + +type MockInstrument struct { + mock.Mock +} + +func (m *MockInstrument) Increment(metricName string, labels map[string]string) error { + args := m.Called(metricName, labels) + err := args.Get(0) + if err != nil { + return args.Get(0).(error) + } else { + return nil + } +} + +func (m *MockInstrument) Count(metricName string, count int64, labels map[string]string) error { + args := m.Called(metricName, count, labels) + err := args.Get(0) + if err != nil { + return args.Get(0).(error) + } else { + return nil + } +} + +func (m *MockInstrument) Gauge(metricName string, value interface{}, labels map[string]string) error { + args := m.Called(metricName, value, labels) + err := args.Get(0) + if err != nil { + return args.Get(0).(error) + } else { + return nil + } +} + +func (m *MockInstrument) Histogram(metricName string, value int64, labels map[string]string) error { + args := m.Called(metricName, value, labels) + err := args.Get(0) + if err != nil { + return args.Get(0).(error) + } else { + return nil + } +} + +func (m *MockInstrument) Close() { + m.Called() +} diff --git a/worker/mocks.go b/worker/mocks.go index 6bf29a71..6d0a6de8 100644 --- a/worker/mocks.go +++ b/worker/mocks.go @@ -1,6 +1,8 @@ package worker import ( + "time" + pb "github.com/raystack/raccoon/proto" mock "github.com/stretchr/testify/mock" ) @@ -27,3 +29,11 @@ type mockAck struct { func (m *mockAck) Ack(err error) { m.Called(err) } + +type mockTimeSource struct { + mock.Mock +} + +func (m *mockTimeSource) Now() time.Time { + return m.Called().Get(0).(time.Time) +} diff --git a/worker/timesource.go b/worker/timesource.go new file mode 100644 index 00000000..e97d118f --- /dev/null +++ b/worker/timesource.go @@ -0,0 +1,15 @@ +package worker + +import "time" + +type TimeSource interface { + Now() time.Time +} + +type defaultTimeSource struct{} + +func (defaultTimeSource) Now() time.Time { + return time.Now() +} + +var DefaultTimeSource = defaultTimeSource{} diff --git a/worker/worker.go b/worker/worker.go index 4bfdc2b1..b4cd3b8b 100644 --- a/worker/worker.go +++ b/worker/worker.go @@ -27,6 +27,8 @@ type Pool struct { EventsChannel <-chan collector.CollectRequest producer Producer wg sync.WaitGroup + instrument metrics.MetricInstrument + timeSource TimeSource } // CreateWorkerPool create new Pool struct given size and EventsChannel worker. @@ -36,6 +38,8 @@ func CreateWorkerPool(size int, eventsChannel <-chan collector.CollectRequest, p EventsChannel: eventsChannel, producer: producer, wg: sync.WaitGroup{}, + instrument: metrics.Instrument(), + timeSource: DefaultTimeSource, } } diff --git a/worker/worker_test.go b/worker/worker_test.go index b5f2b477..e2e0620d 100644 --- a/worker/worker_test.go +++ b/worker/worker_test.go @@ -7,6 +7,7 @@ import ( "github.com/raystack/raccoon/collector" "github.com/raystack/raccoon/identification" + "github.com/raystack/raccoon/metrics" pb "github.com/raystack/raccoon/proto" "github.com/raystack/raccoon/publisher" "github.com/stretchr/testify/assert" @@ -67,9 +68,9 @@ func TestWorker(t *testing.T) { ackMock := &mockAck{} ackMock.On("Ack", nil).Return().Once() defer ackMock.AssertExpectations(t) - r := *request - r.AckFunc = ackMock.Ack - q <- r + req := *request + req.AckFunc = ackMock.Ack + q <- req close(q) assert.False( t, @@ -98,20 +99,85 @@ func TestWorker(t *testing.T) { 1, q, &kp, ) worker.StartWorkers() - - ackMock := &mockAck{} - ackMock.On("Ack", mock.Anything).Return().Twice() - defer ackMock.AssertExpectations(t) - r := *request - r.AckFunc = ackMock.Ack - q <- r - q <- r + q <- *request + q <- *request close(q) assert.False( t, worker.FlushWithTimeOut(time.Second), ) }) + t.Run("should publish metrics related to workers", func(t *testing.T) { + kp := mockKafkaPublisher{} + kp.On("ProduceBulk", mock.Anything, mock.Anything, mock.Anything).Return(nil).Once() + kp.On("Name").Return("kafka") + defer kp.AssertExpectations(t) + + eventsChannel := make(chan collector.CollectRequest, 1) + + now := time.Now() + mockTs := &mockTimeSource{} + mockTs.On("Now").Return(now).Once() + mockTs.On("Now").Return(now.Add(2 * time.Millisecond)).Once() + defer mockTs.AssertExpectations(t) + + req := *request + req.TimePushed = now.Add(-time.Millisecond) + + mockInstrument := &metrics.MockInstrument{} + mockInstrument.On( + "Histogram", + "batch_idle_in_channel_milliseconds", + 1, + mock.Anything, + ).Return().Once() + + mockInstrument.On( + "Histogram", + "kafka_producebulk_tt_ms", + 2, + mock.Anything, + ).Return().Once() + + // TODO + mockInstrument.On( + "Histogram", + "event_processing_duration_milliseconds", + mock.Anything, + mock.Anything, + ).Return().Once() + mockInstrument.On( + "Histogram", + "worker_processing_duration_milliseconds", + mock.Anything, + mock.Anything, + ).Return().Once() + mockInstrument.On( + "Histogram", + "server_processing_latency_milliseconds", + mock.Anything, + mock.Anything, + ).Return().Once() + // end TODO + + defer mockInstrument.AssertExpectations(t) + + worker := &Pool{ + Size: 1, + EventsChannel: eventsChannel, + producer: &kp, + instrument: mockInstrument, + timeSource: mockTs, + } + worker.StartWorkers() + + eventsChannel <- req + close(eventsChannel) + assert.False( + t, + worker.FlushWithTimeOut(time.Second), + ) + }) }) t.Run("Flush", func(t *testing.T) { From 1612548436b6812ba52692a8cbcd4a3b8a7c98d0 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Wed, 4 Sep 2024 16:40:48 +0530 Subject: [PATCH 09/66] worker: add tests against instrumented metrics --- metrics/metrics.go | 2 +- worker/worker.go | 20 ++++++++++---------- worker/worker_test.go | 37 ++++++++++++++++++------------------- 3 files changed, 29 insertions(+), 30 deletions(-) diff --git a/metrics/metrics.go b/metrics/metrics.go index ba652996..0ae09c12 100644 --- a/metrics/metrics.go +++ b/metrics/metrics.go @@ -7,7 +7,7 @@ import ( "github.com/raystack/raccoon/logger" ) -var instrument MetricInstrument +var instrument MetricInstrument = voidInstrument{} type MetricInstrument interface { Increment(metricName string, labels map[string]string) error diff --git a/worker/worker.go b/worker/worker.go index b4cd3b8b..83f92eb6 100644 --- a/worker/worker.go +++ b/worker/worker.go @@ -46,18 +46,18 @@ func CreateWorkerPool(size int, eventsChannel <-chan collector.CollectRequest, p func (w *Pool) worker(name string) { logger.Info("Running worker: " + name) for request := range w.EventsChannel { - metrics.Histogram( + batchReadTime := w.timeSource.Now() + w.instrument.Histogram( "batch_idle_in_channel_milliseconds", - time.Since(request.TimePushed).Milliseconds(), + batchReadTime.Sub(request.TimePushed).Milliseconds(), map[string]string{"worker": name}) - batchReadTime := time.Now() //@TODO - Should add integration tests to prove that the worker receives the same message that it produced, on the delivery channel it created err := w.producer.ProduceBulk(request.GetEvents(), request.ConnectionIdentifier.Group) - produceTime := time.Since(batchReadTime) - metrics.Histogram( + produceTime := w.timeSource.Now().Sub(batchReadTime) + w.instrument.Histogram( fmt.Sprintf("%s_producebulk_tt_ms", w.producer.Name()), produceTime.Milliseconds(), map[string]string{}, @@ -85,17 +85,17 @@ func (w *Pool) worker(name string) { lenBatch := int64(len(request.GetEvents())) logger.Debug(fmt.Sprintf("Success sending messages, %v", lenBatch-int64(totalErr))) if lenBatch > 0 { - eventTimingMs := time.Since(request.GetSentTime().AsTime()).Milliseconds() / lenBatch - metrics.Histogram( + eventTimingMs := w.timeSource.Now().Sub(request.GetSentTime().AsTime()).Milliseconds() / lenBatch + w.instrument.Histogram( "event_processing_duration_milliseconds", eventTimingMs, map[string]string{"conn_group": request.ConnectionIdentifier.Group}) - now := time.Now() - metrics.Histogram( + now := w.timeSource.Now() + w.instrument.Histogram( "worker_processing_duration_milliseconds", (now.Sub(batchReadTime).Milliseconds())/lenBatch, map[string]string{"worker": name}) - metrics.Histogram( + w.instrument.Histogram( "server_processing_latency_milliseconds", (now.Sub(request.TimeConsumed)).Milliseconds()/lenBatch, map[string]string{"conn_group": request.ConnectionIdentifier.Group}) diff --git a/worker/worker_test.go b/worker/worker_test.go index e2e0620d..3e287533 100644 --- a/worker/worker_test.go +++ b/worker/worker_test.go @@ -117,48 +117,48 @@ func TestWorker(t *testing.T) { now := time.Now() mockTs := &mockTimeSource{} - mockTs.On("Now").Return(now).Once() - mockTs.On("Now").Return(now.Add(2 * time.Millisecond)).Once() + mockTs.On("Now").Return(now).Once() // batchReadTime + mockTs.On("Now").Return(now.Add(2 * time.Millisecond)).Once() // produceTime + mockTs.On("Now").Return(now.Add(3 * time.Millisecond)).Once() // eventTimingMs + mockTs.On("Now").Return(now.Add(4 * time.Millisecond)).Once() // (worker_server)_processing_* defer mockTs.AssertExpectations(t) req := *request - req.TimePushed = now.Add(-time.Millisecond) + req.SentTime = timestamppb.New(now.Add(-3 * time.Millisecond)) // time when client sends the request + req.TimeConsumed = now.Add(-2 * time.Millisecond) // time when service handlers send request to collector + req.TimePushed = now.Add(-time.Millisecond) // time when collector sends request to channel mockInstrument := &metrics.MockInstrument{} mockInstrument.On( "Histogram", "batch_idle_in_channel_milliseconds", - 1, + int64(1), mock.Anything, - ).Return().Once() - + ).Return(nil).Once() mockInstrument.On( "Histogram", "kafka_producebulk_tt_ms", - 2, + int64(2), mock.Anything, - ).Return().Once() - - // TODO + ).Return(nil).Once() mockInstrument.On( "Histogram", "event_processing_duration_milliseconds", + int64(6), mock.Anything, - mock.Anything, - ).Return().Once() + ).Return(nil).Once() mockInstrument.On( "Histogram", "worker_processing_duration_milliseconds", + int64(4), mock.Anything, - mock.Anything, - ).Return().Once() + ).Return(nil).Once() mockInstrument.On( "Histogram", "server_processing_latency_milliseconds", + int64(6), mock.Anything, - mock.Anything, - ).Return().Once() - // end TODO + ).Return(nil).Once() defer mockInstrument.AssertExpectations(t) @@ -174,8 +174,7 @@ func TestWorker(t *testing.T) { eventsChannel <- req close(eventsChannel) assert.False( - t, - worker.FlushWithTimeOut(time.Second), + t, worker.FlushWithTimeOut(time.Second), ) }) }) From cf724d771c861fff6a1134ed8cf348f0493e6609 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Wed, 4 Sep 2024 16:44:19 +0530 Subject: [PATCH 10/66] worker: rename timeSource to Clock --- worker/clock.go | 15 +++++++++++++++ worker/mocks.go | 4 ++-- worker/timesource.go | 15 --------------- worker/worker.go | 12 ++++++------ worker/worker_test.go | 14 +++++++------- 5 files changed, 30 insertions(+), 30 deletions(-) create mode 100644 worker/clock.go delete mode 100644 worker/timesource.go diff --git a/worker/clock.go b/worker/clock.go new file mode 100644 index 00000000..dfc63e48 --- /dev/null +++ b/worker/clock.go @@ -0,0 +1,15 @@ +package worker + +import "time" + +type Clock interface { + Now() time.Time +} + +type defaultClock struct{} + +func (defaultClock) Now() time.Time { + return time.Now() +} + +var DefaultClock = defaultClock{} diff --git a/worker/mocks.go b/worker/mocks.go index 6d0a6de8..d6e54d81 100644 --- a/worker/mocks.go +++ b/worker/mocks.go @@ -30,10 +30,10 @@ func (m *mockAck) Ack(err error) { m.Called(err) } -type mockTimeSource struct { +type mockClock struct { mock.Mock } -func (m *mockTimeSource) Now() time.Time { +func (m *mockClock) Now() time.Time { return m.Called().Get(0).(time.Time) } diff --git a/worker/timesource.go b/worker/timesource.go deleted file mode 100644 index e97d118f..00000000 --- a/worker/timesource.go +++ /dev/null @@ -1,15 +0,0 @@ -package worker - -import "time" - -type TimeSource interface { - Now() time.Time -} - -type defaultTimeSource struct{} - -func (defaultTimeSource) Now() time.Time { - return time.Now() -} - -var DefaultTimeSource = defaultTimeSource{} diff --git a/worker/worker.go b/worker/worker.go index 83f92eb6..36e567b4 100644 --- a/worker/worker.go +++ b/worker/worker.go @@ -28,7 +28,7 @@ type Pool struct { producer Producer wg sync.WaitGroup instrument metrics.MetricInstrument - timeSource TimeSource + clock Clock } // CreateWorkerPool create new Pool struct given size and EventsChannel worker. @@ -39,14 +39,14 @@ func CreateWorkerPool(size int, eventsChannel <-chan collector.CollectRequest, p producer: producer, wg: sync.WaitGroup{}, instrument: metrics.Instrument(), - timeSource: DefaultTimeSource, + clock: DefaultClock, } } func (w *Pool) worker(name string) { logger.Info("Running worker: " + name) for request := range w.EventsChannel { - batchReadTime := w.timeSource.Now() + batchReadTime := w.clock.Now() w.instrument.Histogram( "batch_idle_in_channel_milliseconds", batchReadTime.Sub(request.TimePushed).Milliseconds(), @@ -56,7 +56,7 @@ func (w *Pool) worker(name string) { err := w.producer.ProduceBulk(request.GetEvents(), request.ConnectionIdentifier.Group) - produceTime := w.timeSource.Now().Sub(batchReadTime) + produceTime := w.clock.Now().Sub(batchReadTime) w.instrument.Histogram( fmt.Sprintf("%s_producebulk_tt_ms", w.producer.Name()), produceTime.Milliseconds(), @@ -85,12 +85,12 @@ func (w *Pool) worker(name string) { lenBatch := int64(len(request.GetEvents())) logger.Debug(fmt.Sprintf("Success sending messages, %v", lenBatch-int64(totalErr))) if lenBatch > 0 { - eventTimingMs := w.timeSource.Now().Sub(request.GetSentTime().AsTime()).Milliseconds() / lenBatch + eventTimingMs := w.clock.Now().Sub(request.GetSentTime().AsTime()).Milliseconds() / lenBatch w.instrument.Histogram( "event_processing_duration_milliseconds", eventTimingMs, map[string]string{"conn_group": request.ConnectionIdentifier.Group}) - now := w.timeSource.Now() + now := w.clock.Now() w.instrument.Histogram( "worker_processing_duration_milliseconds", (now.Sub(batchReadTime).Milliseconds())/lenBatch, diff --git a/worker/worker_test.go b/worker/worker_test.go index 3e287533..4d8af0f9 100644 --- a/worker/worker_test.go +++ b/worker/worker_test.go @@ -116,12 +116,12 @@ func TestWorker(t *testing.T) { eventsChannel := make(chan collector.CollectRequest, 1) now := time.Now() - mockTs := &mockTimeSource{} - mockTs.On("Now").Return(now).Once() // batchReadTime - mockTs.On("Now").Return(now.Add(2 * time.Millisecond)).Once() // produceTime - mockTs.On("Now").Return(now.Add(3 * time.Millisecond)).Once() // eventTimingMs - mockTs.On("Now").Return(now.Add(4 * time.Millisecond)).Once() // (worker_server)_processing_* - defer mockTs.AssertExpectations(t) + clk := &mockClock{} + clk.On("Now").Return(now).Once() // batchReadTime + clk.On("Now").Return(now.Add(2 * time.Millisecond)).Once() // produceTime + clk.On("Now").Return(now.Add(3 * time.Millisecond)).Once() // eventTimingMs + clk.On("Now").Return(now.Add(4 * time.Millisecond)).Once() // (worker_server)_processing_* + defer clk.AssertExpectations(t) req := *request req.SentTime = timestamppb.New(now.Add(-3 * time.Millisecond)) // time when client sends the request @@ -167,7 +167,7 @@ func TestWorker(t *testing.T) { EventsChannel: eventsChannel, producer: &kp, instrument: mockInstrument, - timeSource: mockTs, + clock: clk, } worker.StartWorkers() From 4e53510f411bc3d8a09aa90b47dce5a2d6e6cc49 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Wed, 4 Sep 2024 17:04:47 +0530 Subject: [PATCH 11/66] chore: remove client example --- example/example.proto | 7 --- example/main.go | 70 --------------------- example/readme.md | 104 ------------------------------- example/sample.pb.go | 142 ------------------------------------------ 4 files changed, 323 deletions(-) delete mode 100644 example/example.proto delete mode 100644 example/main.go delete mode 100644 example/readme.md delete mode 100644 example/sample.pb.go diff --git a/example/example.proto b/example/example.proto deleted file mode 100644 index 2312eb9e..00000000 --- a/example/example.proto +++ /dev/null @@ -1,7 +0,0 @@ -syntax = "proto3"; - -package raystack.raccoon; - -message SampleEvent { - string description = 1; -} diff --git a/example/main.go b/example/main.go deleted file mode 100644 index ace2d210..00000000 --- a/example/main.go +++ /dev/null @@ -1,70 +0,0 @@ -package main - -import ( - "fmt" - "net/http" - "time" - - "github.com/gorilla/websocket" - pb "github.com/raystack/raccoon/proto" - "google.golang.org/protobuf/proto" - "google.golang.org/protobuf/types/known/timestamppb" -) - -var ( - url = "ws://localhost:8080/api/v1/events" - header = http.Header{ - "X-User-ID": []string{"1234"}, - } - pingInterval = 5 * time.Second -) - -func main() { - ws, _, err := websocket.DefaultDialer.Dial(url, header) - if err != nil { - panic("Fail to make websocket connection") - } - // Gorilla websocket has default PingHandler which sends Pong. No need to explicitly heandle it. - go pinger(ws) - - event1 := generateSampleEvent() - event2 := generateSampleEvent() - eventBatch := []*pb.Event{ - event1, - event2, - } - - sentTime := time.Now() - request := &pb.SendEventRequest{ - ReqGuid: "55F648D1-9A73-4F6C-8657-4D26A6C1F168", - SentTime: ×tamppb.Timestamp{ - Seconds: sentTime.Unix(), - Nanos: int32(sentTime.Nanosecond()), - }, - Events: eventBatch, - } - reqBin, _ := proto.Marshal(request) - ws.WriteMessage(websocket.BinaryMessage, reqBin) - - _, response, _ := ws.ReadMessage() - SendEventResponse := &pb.SendEventResponse{} - proto.Unmarshal(response, SendEventResponse) - // Handle the response accordingly - fmt.Printf("%v", SendEventResponse) -} - -func generateSampleEvent() *pb.Event { - sampleEvent := &SampleEvent{Description: "user_click"} - sampleBin, _ := proto.Marshal(sampleEvent) - event := &pb.Event{EventBytes: sampleBin, Type: "some-type"} - return event -} - -func pinger(ws *websocket.Conn) { - ticker := time.Tick(pingInterval) - for { - <-ticker - ws.WriteControl(websocket.PingMessage, []byte("--ping--"), time.Now().Add(pingInterval)) - fmt.Println("ping") - } -} diff --git a/example/readme.md b/example/readme.md deleted file mode 100644 index b58bdc24..00000000 --- a/example/readme.md +++ /dev/null @@ -1,104 +0,0 @@ -There are couple of things the client to handle to start send events to Raccoon. - -- [Establish Websocket Connection](#establish-websocket-connection) -- [Set Ping/Pong Handler](#set-pingpong-handler) -- [Batch Events](#batch-events) -- [Send The Batch](#send-the-batch) -- [Handle The Response](#handle-the-response) - -Below are the explanation of sample client in [main.go](https://github.com/raystack/raccoon/tree/main/docs/example/main.go) - -## Establish Websocket Connection - -You are free to use any websocket client as long as it supports passing header. You can connect to `/api/v1/events` endpoint with uniq id header set. You'll also need to handle retry in case Raccon reject the connection because [max connection is reached](). - -```go -var ( - url = "ws://localhost:8080/api/v1/events" - header = http.Header{ - "X-User-ID": []string{"1234"}, - } -) - -func main() { - ws, _, err := websocket.DefaultDialer.Dial(url, header) - if err != nil { - panic("Fail to make websocket connection") - } -``` - -## Set Ping/Pong Handler - -Raccoon needs to maintain many alive connections. To clean up dead connection, Raccoon will close client that couldn't response to Ping on time. Because of that, the client need to handle Ping if not handled by the websocket client. - -```go - // Gorilla websocket has default PingHandler which sends Pong. No need to explicitly heandle it. - go pinger(ws) -``` - -You can also check the liveliness of the server by having Pinger function and close the connection if necessary - -```go -func pinger(ws *websocket.Conn) { - ticker := time.Tick(pingInterval) - for { - <-ticker - ws.WriteControl(websocket.PingMessage, []byte("--ping--"), time.Now().Add(pingInterval)) - fmt.Println("ping") - } -} -``` - -## Batch Events - -When the connection is set, all you need to do is collect the event and send them in batch. - -```go - event1 := generateSampleEvent() - event2 := generateSampleEvent() - eventBatch := []*pb.Event{ - event1, - event2, - } -``` - -Where `generateSampleEvent` is - -```go -func generateSampleEvent() *pb.Event { - sampleEvent := &SampleEvent{Description: "user_click"} - sampleBin, _ := proto.Marshal(sampleEvent) - event := &pb.Event{EventBytes: sampleBin, Type: "some-type"} - return event -} -``` - -## Send The Batch - -Now you have websocket connection and batch of event ready, all you need is send the batch. Don't forget to fill `send_time` field before sending the request. - -```go - sentTime := time.Now() - request := &pb.SendEventRequest{ - ReqGuid: "55F648D1-9A73-4F6C-8657-4D26A6C1F168", - SentTime: ×tamppb.Timestamp{ - Seconds: sentTime.Unix(), - Nanos: int32(sentTime.Nanosecond()), - }, - Events: eventBatch, - } - reqBin, _ := proto.Marshal(request) - ws.WriteMessage(websocket.BinaryMessage, reqBin) -``` - -## Handle The Response - -Raccoon sends SendEventResponse for every batch of events sent from the client. The ReqGuid in the response identifies the batch that the client sent. The response object could be used for client's telemetry in terms of how may batches succeeded, failed etc., The clients can retry based on failures however server side kafka send failures are not sent as failures due to the [acknowledgement design as explained here](https://github.com/raystack/raccoon/blob/main/docs/concepts/architecture.md#acknowledging-events). - -```go - _, response, _ := ws.ReadMessage() - SendEventResponse := &pb.SendEventResponse{} - proto.Unmarshal(response, SendEventResponse) - // Handle the response accordingly - fmt.Printf("%v", SendEventResponse) -``` diff --git a/example/sample.pb.go b/example/sample.pb.go deleted file mode 100644 index 97e79a34..00000000 --- a/example/sample.pb.go +++ /dev/null @@ -1,142 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// versions: -// protoc-gen-go v1.26.0-rc.1 -// protoc v3.14.0 -// source: example.proto - -package main - -import ( - protoreflect "google.golang.org/protobuf/reflect/protoreflect" - protoimpl "google.golang.org/protobuf/runtime/protoimpl" - reflect "reflect" - sync "sync" -) - -const ( - // Verify that this generated code is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) - // Verify that runtime/protoimpl is sufficiently up-to-date. - _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) -) - -type SampleEvent struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Description string `protobuf:"bytes,1,opt,name=description,proto3" json:"description,omitempty"` -} - -func (x *SampleEvent) Reset() { - *x = SampleEvent{} - if protoimpl.UnsafeEnabled { - mi := &file_example_proto_msgTypes[0] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *SampleEvent) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*SampleEvent) ProtoMessage() {} - -func (x *SampleEvent) ProtoReflect() protoreflect.Message { - mi := &file_example_proto_msgTypes[0] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use SampleEvent.ProtoReflect.Descriptor instead. -func (*SampleEvent) Descriptor() ([]byte, []int) { - return file_example_proto_rawDescGZIP(), []int{0} -} - -func (x *SampleEvent) GetDescription() string { - if x != nil { - return x.Description - } - return "" -} - -var File_example_proto protoreflect.FileDescriptor - -var file_example_proto_rawDesc = []byte{ - 0x0a, 0x0d, 0x65, 0x78, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, - 0x0c, 0x6f, 0x64, 0x70, 0x66, 0x2e, 0x72, 0x61, 0x63, 0x63, 0x6f, 0x6f, 0x6e, 0x22, 0x2f, 0x0a, - 0x0b, 0x53, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x20, 0x0a, 0x0b, - 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x62, 0x06, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, -} - -var ( - file_example_proto_rawDescOnce sync.Once - file_example_proto_rawDescData = file_example_proto_rawDesc -) - -func file_example_proto_rawDescGZIP() []byte { - file_example_proto_rawDescOnce.Do(func() { - file_example_proto_rawDescData = protoimpl.X.CompressGZIP(file_example_proto_rawDescData) - }) - return file_example_proto_rawDescData -} - -var file_example_proto_msgTypes = make([]protoimpl.MessageInfo, 1) -var file_example_proto_goTypes = []interface{}{ - (*SampleEvent)(nil), // 0: raystack.raccoon.SampleEvent -} -var file_example_proto_depIdxs = []int32{ - 0, // [0:0] is the sub-list for method output_type - 0, // [0:0] is the sub-list for method input_type - 0, // [0:0] is the sub-list for extension type_name - 0, // [0:0] is the sub-list for extension extendee - 0, // [0:0] is the sub-list for field type_name -} - -func init() { file_example_proto_init() } -func file_example_proto_init() { - if File_example_proto != nil { - return - } - if !protoimpl.UnsafeEnabled { - file_example_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*SampleEvent); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - } - type x struct{} - out := protoimpl.TypeBuilder{ - File: protoimpl.DescBuilder{ - GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_example_proto_rawDesc, - NumEnums: 0, - NumMessages: 1, - NumExtensions: 0, - NumServices: 0, - }, - GoTypes: file_example_proto_goTypes, - DependencyIndexes: file_example_proto_depIdxs, - MessageInfos: file_example_proto_msgTypes, - }.Build() - File_example_proto = out.File - file_example_proto_rawDesc = nil - file_example_proto_goTypes = nil - file_example_proto_depIdxs = nil -} From 5cc9f1aa8ceb09c5e57aaab5b800975caba3cf71 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Wed, 4 Sep 2024 17:13:23 +0530 Subject: [PATCH 12/66] worker: use idiomatic null device for tests --- worker/init_test.go | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/worker/init_test.go b/worker/init_test.go index 336231aa..ed469cf5 100644 --- a/worker/init_test.go +++ b/worker/init_test.go @@ -1,18 +1,14 @@ package worker import ( + "io" "os" "testing" "github.com/raystack/raccoon/logger" ) -type void struct{} - -func (v void) Write(_ []byte) (int, error) { - return 0, nil -} func TestMain(t *testing.M) { - logger.SetOutput(void{}) + logger.SetOutput(io.Discard) os.Exit(t.Run()) } From 1bbe539994924a768542da13dadaad0948d9f6d4 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Wed, 4 Sep 2024 19:32:25 +0530 Subject: [PATCH 13/66] config: add tests for validation errors --- config/config_test.go | 138 ++++++++++++++++++++++++++++++++++++++++++ config/errors.go | 10 ++- 2 files changed, 145 insertions(+), 3 deletions(-) create mode 100644 config/config_test.go diff --git a/config/config_test.go b/config/config_test.go new file mode 100644 index 00000000..d716561a --- /dev/null +++ b/config/config_test.go @@ -0,0 +1,138 @@ +package config_test + +import ( + "fmt" + "os" + "testing" + + "github.com/MakeNowJust/heredoc" + "github.com/raystack/raccoon/config" + "github.com/stretchr/testify/assert" +) + +func TestLoad(t *testing.T) { + var testCases = []struct { + Desc string + Cfg string + Err error + }{ + { + Desc: "should return an error if websocket.conn.id.header is not specified", + Err: config.ConfigError{ + Env: "SERVER_WEBSOCKET_CONN_ID_HEADER", + Flag: "server.websocket.conn.id.header", + }, + }, + { + Desc: "should return an error if publisher type is pubsub and ProjectID is not specified", + Cfg: heredoc.Doc(` + server: + websocket: + conn: + id_header: "X-User-ID" + publisher: + type: "pubsub" + `), + Err: config.ConfigError{ + Env: "PUBLISHER_PUBSUB_PROJECT_ID", + Flag: "publisher.pubsub.project.id", + }, + }, + { + Desc: "should return an error if publisher type is pubsub and credentials are not specified", + Cfg: heredoc.Doc(` + server: + websocket: + conn: + id_header: "X-User-ID" + publisher: + type: "pubsub" + pubsub: + project_id: simulated-project-001 + `), + Err: config.ConfigError{ + Env: "PUBLISHER_PUBSUB_CREDENTIALS", + Flag: "publisher.pubsub.credentials", + }, + }, + { + Desc: "should return an error if publisher type is kinesis and credentials are not specified", + Cfg: heredoc.Doc(` + server: + websocket: + conn: + id_header: "X-User-ID" + publisher: + type: "kinesis" + `), + Err: config.ConfigError{ + Env: "PUBLISHER_KINESIS_CREDENTIALS", + Flag: "publisher.kinesis.credentials", + }, + }, + { + Desc: "should return an error if publisher type is kafka and bootstrap servers are not specified", + Cfg: heredoc.Doc(` + server: + websocket: + conn: + id_header: "X-User-ID" + publisher: + type: "kafka" + `), + Err: config.ConfigError{ + Env: "PUBLISHER_KAFKA_CLIENT_BOOTSTRAP_SERVERS", + Flag: "publisher.kafka.client.bootstrap.servers", + }, + }, + { + Desc: "should return an error if an unknown publisher type is specified", + Cfg: heredoc.Doc(` + server: + websocket: + conn: + id_header: "X-User-ID" + publisher: + type: "non-existent" + `), + Err: fmt.Errorf("unknown publisher: non-existent"), + }, + } + + for _, testCase := range testCases { + t.Run(testCase.Desc, func(t *testing.T) { + fd, err := newTempFile() + if err != nil { + t.Errorf("error creating temporary file: %v", err) + return + } + defer fd.Close() + + _, err = fmt.Fprint(fd, testCase.Cfg) + if err != nil { + t.Errorf("error writing test config: %v", err) + return + } + + err = config.Load(fd.Name()) + assert.Equal(t, err, testCase.Err) + }) + } +} + +type tempFile struct { + *os.File +} + +func (f tempFile) Close() error { + f.File.Close() + return os.Remove(f.File.Name()) +} + +func newTempFile() (tempFile, error) { + fd, err := os.CreateTemp("", "raccoon-test-*") + if err != nil { + return tempFile{}, err + } + return tempFile{fd}, nil +} diff --git a/config/errors.go b/config/errors.go index 53d63bf8..ea90de4e 100644 --- a/config/errors.go +++ b/config/errors.go @@ -60,9 +60,13 @@ func errCfgRequired(path string) error { if err != nil { return err } - return errRequired(env, cmd) + return ConfigError{env, cmd} } -func errRequired(env, cmd string) error { - return fmt.Errorf("%s (--%s) is required", env, cmd) +type ConfigError struct { + Env, Flag string +} + +func (e ConfigError) Error() string { + return fmt.Sprintf("%s (--%s) is required", e.Env, e.Flag) } From 03b8f6b9f13283527d04df22ceefc76491c66217 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Wed, 4 Sep 2024 22:57:49 +0530 Subject: [PATCH 14/66] config: add test for valid configuration --- config/config.go | 3 +++ config/config_test.go | 14 ++++++++++++++ 2 files changed, 17 insertions(+) diff --git a/config/config.go b/config/config.go index ca801ee7..6760f7d7 100644 --- a/config/config.go +++ b/config/config.go @@ -33,6 +33,9 @@ func prepare() { // add default CORS headers corsHeaders := []string{"Content-Type"} + + // TODO(turtledev): evalute the need for this block. + // It may be a candiate for removal provisionalHeaders := []string{ "SERVER_WEBSOCKET_CONN_GROUP_HEADER", "SERVER_WEBSOCKET_CONN_ID_HEADER", diff --git a/config/config_test.go b/config/config_test.go index d716561a..1df00ee7 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -97,6 +97,20 @@ func TestLoad(t *testing.T) { `), Err: fmt.Errorf("unknown publisher: non-existent"), }, + { + Desc: "Should succeed when a valid config is specified", + Cfg: heredoc.Doc(` + server: + websocket: + conn: + id_header: "X-User-ID" + publisher: + type: "kafka" + kafka: + client: + bootstrap_servers: localhost:8082 + `), + }, } for _, testCase := range testCases { From 5f4c22b42264c2fca1dcd98c8a833b18b252e781 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Wed, 4 Sep 2024 23:11:24 +0530 Subject: [PATCH 15/66] config: add tests for internal prepare() --- config/config_test.go | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/config/config_test.go b/config/config_test.go index 1df00ee7..8b52dc0c 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -134,6 +134,40 @@ func TestLoad(t *testing.T) { } } +func TestPrepare(t *testing.T) { + t.Run("should use ${GOOGLE_APPLICATION_CREDENTIALS} as value of pubsub credentials if latter is unspecified", func(t *testing.T) { + fd, err := newTempFile() + if err != nil { + t.Errorf("error creating temporary file: %v", err) + return + } + defer fd.Close() + + cfg := heredoc.Doc(` + server: + websocket: + conn: + id_header: "X-User-ID" + publisher: + type: "pubsub" + pubsub: + project_id: simulated-project-001 + `) + _, err = fmt.Fprint(fd, cfg) + if err != nil { + t.Errorf("error writing test config: %v", err) + return + } + + creds := "/path/to/credentials" + os.Setenv("GOOGLE_APPLICATION_CREDENTIALS", creds) + defer os.Unsetenv("GOOGLE_APPLICATION_CREDENTIALS") + + assert.Nil(t, config.Load(fd.Name())) + assert.Equal(t, config.Publisher.PubSub.CredentialsFile, creds) + }) +} + type tempFile struct { *os.File } From 8c4f37daa4cd9b7a3efb54bc42ba5a0f4d9caa52 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Wed, 4 Sep 2024 23:19:14 +0530 Subject: [PATCH 16/66] config: add tests for Walk() --- config/walk_test.go | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 config/walk_test.go diff --git a/config/walk_test.go b/config/walk_test.go new file mode 100644 index 00000000..bd28c994 --- /dev/null +++ b/config/walk_test.go @@ -0,0 +1,29 @@ +package config_test + +import ( + "testing" + + "github.com/raystack/raccoon/config" +) + +func TestWalk(t *testing.T) { + var cfgSet = make(map[string]bool) + for _, cfg := range config.Walk() { + cfgSet[cfg.Meta.Tag.Get("cmdx")] = true + } + + var samples = []string{ + "publisher.type", + "server.websocket.conn.id.header", + "publisher.kafka.client.bootstrap.servers", + "publisher.pubsub.project.id", + "publisher.pubsub.credentials", + "publisher.kinesis.credentials", + } + + for _, cfg := range samples { + if !cfgSet[cfg] { + t.Errorf("expected Walk() to return config with cmdx tag %s, but it didn't", cfg) + } + } +} From 5d8a779eeb9b10f918c903175ed225b2fe1a4e6a Mon Sep 17 00:00:00 2001 From: turtleDev Date: Wed, 4 Sep 2024 23:24:13 +0530 Subject: [PATCH 17/66] config: kafka: add tests for ToConfigMap() --- config/publisher_test.go | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 config/publisher_test.go diff --git a/config/publisher_test.go b/config/publisher_test.go new file mode 100644 index 00000000..0847ebc4 --- /dev/null +++ b/config/publisher_test.go @@ -0,0 +1,23 @@ +package config + +import ( + "testing" + + "github.com/confluentinc/confluent-kafka-go/kafka" + "github.com/stretchr/testify/assert" +) + +func TestKafkaPublisher(t *testing.T) { + p := publisherKafka{ + ClientConfig: kafkaClientConfig{ + BootstrapServers: "localhost:8082", + Acks: "1", + }, + } + + cm := p.ToKafkaConfigMap() + expected := &kafka.ConfigMap{} + expected.SetKey("bootstrap.servers", "localhost:8082") + expected.SetKey("acks", "1") + assert.Equal(t, cm, expected) +} From ad765fdf7f22ac2d5fe69993564c1b8ca35ee7ae Mon Sep 17 00:00:00 2001 From: turtleDev Date: Wed, 4 Sep 2024 23:27:58 +0530 Subject: [PATCH 18/66] config: kafka: add more tests for config map serializer --- config/publisher_test.go | 28 +++++++++++++++++----------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/config/publisher_test.go b/config/publisher_test.go index 0847ebc4..7be96b32 100644 --- a/config/publisher_test.go +++ b/config/publisher_test.go @@ -8,16 +8,22 @@ import ( ) func TestKafkaPublisher(t *testing.T) { - p := publisherKafka{ - ClientConfig: kafkaClientConfig{ - BootstrapServers: "localhost:8082", - Acks: "1", - }, - } + t.Run("should convert client config to equivalent config map", func(t *testing.T) { + p := publisherKafka{ + ClientConfig: kafkaClientConfig{ + BootstrapServers: "localhost:8082", + Acks: "1", + }, + } - cm := p.ToKafkaConfigMap() - expected := &kafka.ConfigMap{} - expected.SetKey("bootstrap.servers", "localhost:8082") - expected.SetKey("acks", "1") - assert.Equal(t, cm, expected) + cm := p.ToKafkaConfigMap() + expected := &kafka.ConfigMap{} + expected.SetKey("bootstrap.servers", "localhost:8082") + expected.SetKey("acks", "1") + assert.Equal(t, cm, expected) + }) + + t.Run("should only add configs to config map that have non-zero values", func(t *testing.T) { + assert.Equal(t, &kafka.ConfigMap{}, publisherKafka{}.ToKafkaConfigMap()) + }) } From 1ddad34e318e419a24f1cb542483c96d8bdc1307 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Wed, 4 Sep 2024 23:45:42 +0530 Subject: [PATCH 19/66] config: add tests for internal helper functions --- config/errors.go | 10 +++++----- config/errors_test.go | 20 ++++++++++++++++++++ 2 files changed, 25 insertions(+), 5 deletions(-) create mode 100644 config/errors_test.go diff --git a/config/errors.go b/config/errors.go index ea90de4e..c711c37a 100644 --- a/config/errors.go +++ b/config/errors.go @@ -34,19 +34,19 @@ func cfgMetadata(path string) (string, string, error) { } if !found { - return "", "", fmt.Errorf("%s is missing field %s", parent, strings.Join(hist, ".")) + return "", "", fmt.Errorf("config is missing field %s", strings.Join(hist, ".")) } - envPartial := strings.ToUpper(field.Tag.Get("mapstructure")) + envPartial := field.Tag.Get("mapstructure") if strings.TrimSpace(envPartial) == "" { - return "", "", fmt.Errorf("%s.%s is missing mapstructure tag or is empty", parent, strings.Join(hist, ".")) + return "", "", fmt.Errorf("config.%s is missing mapstructure tag or is empty", strings.Join(hist, ".")) } - env = append(env, envPartial) + env = append(env, strings.ToUpper(envPartial)) if len(fields) == 1 { cmdxTag := field.Tag.Get("cmdx") if strings.TrimSpace(cmdxTag) == "" { - return "", "", fmt.Errorf("%s.%s is missing cmdx tag or is empty", parent, strings.Join(hist, ".")) + return "", "", fmt.Errorf("config.%s is missing cmdx tag or is empty", strings.Join(hist, ".")) } cmd = cmdxTag } diff --git a/config/errors_test.go b/config/errors_test.go new file mode 100644 index 00000000..13f423aa --- /dev/null +++ b/config/errors_test.go @@ -0,0 +1,20 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestCfgMetadata(t *testing.T) { + t.Run("should return an error for a non-existent field", func(t *testing.T) { + _, _, err := cfgMetadata("unknown.field") + assert.Error(t, err) + }) + t.Run("should return an error if the terminal field is missing cmdx tag", func(t *testing.T) { + // this is an intermediate field, so it shouldn't contain cmdx tag + _, _, err := cfgMetadata("Publisher.Kafka") + assert.Error(t, err) + }) + +} From 5132317be867d6e2ccb20b3bbaf27f8043328b14 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Thu, 5 Sep 2024 15:57:55 +0530 Subject: [PATCH 20/66] refactor: extract Clock to a stand alone package --- clock/clock.go | 21 +++++++++++++++++++++ clock/mock.go | 16 ++++++++++++++++ worker/clock.go | 15 --------------- worker/mocks.go | 10 ---------- worker/worker.go | 5 +++-- worker/worker_test.go | 3 ++- 6 files changed, 42 insertions(+), 28 deletions(-) create mode 100644 clock/clock.go create mode 100644 clock/mock.go delete mode 100644 worker/clock.go diff --git a/clock/clock.go b/clock/clock.go new file mode 100644 index 00000000..b71ea129 --- /dev/null +++ b/clock/clock.go @@ -0,0 +1,21 @@ +package clock + +import ( + "time" +) + +// Clock represents a time source +// It can be used as a way to abstract away the implicit +// dependency on time.Now() from code. +type Clock interface { + Now() time.Time +} + +type defaultClock struct{} + +func (defaultClock) Now() time.Time { + return time.Now() +} + +// Default clock. Uses time.Now() internally. +var Default = defaultClock{} diff --git a/clock/mock.go b/clock/mock.go new file mode 100644 index 00000000..8c492b6c --- /dev/null +++ b/clock/mock.go @@ -0,0 +1,16 @@ +package clock + +import ( + "time" + + "github.com/stretchr/testify/mock" +) + +// Mock clock for testing +type Mock struct { + mock.Mock +} + +func (m *Mock) Now() time.Time { + return m.Called().Get(0).(time.Time) +} diff --git a/worker/clock.go b/worker/clock.go deleted file mode 100644 index dfc63e48..00000000 --- a/worker/clock.go +++ /dev/null @@ -1,15 +0,0 @@ -package worker - -import "time" - -type Clock interface { - Now() time.Time -} - -type defaultClock struct{} - -func (defaultClock) Now() time.Time { - return time.Now() -} - -var DefaultClock = defaultClock{} diff --git a/worker/mocks.go b/worker/mocks.go index d6e54d81..6bf29a71 100644 --- a/worker/mocks.go +++ b/worker/mocks.go @@ -1,8 +1,6 @@ package worker import ( - "time" - pb "github.com/raystack/raccoon/proto" mock "github.com/stretchr/testify/mock" ) @@ -29,11 +27,3 @@ type mockAck struct { func (m *mockAck) Ack(err error) { m.Called(err) } - -type mockClock struct { - mock.Mock -} - -func (m *mockClock) Now() time.Time { - return m.Called().Get(0).(time.Time) -} diff --git a/worker/worker.go b/worker/worker.go index 36e567b4..8df63283 100644 --- a/worker/worker.go +++ b/worker/worker.go @@ -5,6 +5,7 @@ import ( "sync" "time" + "github.com/raystack/raccoon/clock" "github.com/raystack/raccoon/collector" "github.com/raystack/raccoon/logger" "github.com/raystack/raccoon/metrics" @@ -28,7 +29,7 @@ type Pool struct { producer Producer wg sync.WaitGroup instrument metrics.MetricInstrument - clock Clock + clock clock.Clock } // CreateWorkerPool create new Pool struct given size and EventsChannel worker. @@ -39,7 +40,7 @@ func CreateWorkerPool(size int, eventsChannel <-chan collector.CollectRequest, p producer: producer, wg: sync.WaitGroup{}, instrument: metrics.Instrument(), - clock: DefaultClock, + clock: clock.Default, } } diff --git a/worker/worker_test.go b/worker/worker_test.go index 4d8af0f9..ef19b032 100644 --- a/worker/worker_test.go +++ b/worker/worker_test.go @@ -5,6 +5,7 @@ import ( "testing" "time" + "github.com/raystack/raccoon/clock" "github.com/raystack/raccoon/collector" "github.com/raystack/raccoon/identification" "github.com/raystack/raccoon/metrics" @@ -116,7 +117,7 @@ func TestWorker(t *testing.T) { eventsChannel := make(chan collector.CollectRequest, 1) now := time.Now() - clk := &mockClock{} + clk := &clock.Mock{} clk.On("Now").Return(now).Once() // batchReadTime clk.On("Now").Return(now.Add(2 * time.Millisecond)).Once() // produceTime clk.On("Now").Return(now.Add(3 * time.Millisecond)).Once() // eventTimingMs From e779dd0bbaf84fef2384ccaa3c19a027900dfd20 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Thu, 5 Sep 2024 16:07:27 +0530 Subject: [PATCH 21/66] collector: add tests for TimePushed mutation --- collector/service.go | 11 +++++++---- collector/service_test.go | 37 ++++++++++++++++++++++++++++++++++++- 2 files changed, 43 insertions(+), 5 deletions(-) diff --git a/collector/service.go b/collector/service.go index 0fed828c..0562a3c9 100644 --- a/collector/service.go +++ b/collector/service.go @@ -2,21 +2,24 @@ package collector import ( "context" - "time" + + "github.com/raystack/raccoon/clock" ) type ChannelCollector struct { - ch chan CollectRequest + ch chan CollectRequest + clock clock.Clock } func NewChannelCollector(c chan CollectRequest) Collector { return &ChannelCollector{ - ch: c, + ch: c, + clock: clock.Default, } } func (c *ChannelCollector) Collect(ctx context.Context, req *CollectRequest) error { - req.TimePushed = time.Now() + req.TimePushed = c.clock.Now() c.ch <- *req return nil } diff --git a/collector/service_test.go b/collector/service_test.go index 8b3f8d4c..f7e89991 100644 --- a/collector/service_test.go +++ b/collector/service_test.go @@ -1,8 +1,13 @@ package collector import ( + "context" "reflect" "testing" + "time" + + "github.com/raystack/raccoon/clock" + "github.com/stretchr/testify/assert" ) func TestNewChannelCollector(t *testing.T) { @@ -22,7 +27,8 @@ func TestNewChannelCollector(t *testing.T) { c: c, }, want: &ChannelCollector{ - ch: c, + ch: c, + clock: clock.Default, }, }, } @@ -34,3 +40,32 @@ func TestNewChannelCollector(t *testing.T) { }) } } + +func TestCollect(t *testing.T) { + t.Run("It should mutate TimePushed to the time the collect request is acknowledged", func(t *testing.T) { + now := time.Now() + clk := &clock.Mock{} + clk.On("Now").Return(now).Once() + defer clk.AssertExpectations(t) + + ch := make(chan CollectRequest) + defer close(ch) + + collector := &ChannelCollector{ + ch: ch, + clock: clk, + } + + consumer := func(requests chan CollectRequest) { + for range requests { + } + } + go consumer(collector.ch) + + req := &CollectRequest{} + assert.Nil( + t, collector.Collect(context.Background(), req), + ) + assert.Equal(t, req.TimePushed, now) + }) +} From 855968e00a4f06914d019e22ea1c76b1ff9d1131 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Thu, 5 Sep 2024 16:15:13 +0530 Subject: [PATCH 22/66] serialization: add tests for SerialiseProto --- serialization/json_test.go | 2 -- serialization/proto_test.go | 22 ++++++++++++++++++++++ 2 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 serialization/proto_test.go diff --git a/serialization/json_test.go b/serialization/json_test.go index 098747d0..670e79ee 100644 --- a/serialization/json_test.go +++ b/serialization/json_test.go @@ -1,7 +1,6 @@ package serialization import ( - "fmt" "reflect" "testing" @@ -36,7 +35,6 @@ func TestJSONSerializer_Serialize(t *testing.T) { t.Errorf("JSONSerializer.Serialize() error = %v, wantErr %v", err, tt.wantErr) return } - fmt.Println(string(got)) if !reflect.DeepEqual(got, tt.want) { t.Errorf("JSONSerializer.Serialize() = %v, want %v", got, tt.want) } diff --git a/serialization/proto_test.go b/serialization/proto_test.go new file mode 100644 index 00000000..a3522d3e --- /dev/null +++ b/serialization/proto_test.go @@ -0,0 +1,22 @@ +package serialization_test + +import ( + "testing" + + pb "github.com/raystack/raccoon/proto" + "github.com/raystack/raccoon/serialization" + "github.com/stretchr/testify/assert" +) + +func TestSerialiseProto(t *testing.T) { + t.Run("should return an error if argument is a non-protobuf message", func(t *testing.T) { + arg := struct{}{} + _, err := serialization.SerializeProto(arg) + assert.Equal(t, err, serialization.ErrInvalidProtoMessage) + }) + t.Run("should serialize a proto message", func(t *testing.T) { + v := &pb.SendEventRequest{} + _, err := serialization.SerializeProto(v) + assert.Nil(t, err) + }) +} From fbc699c3232e022e5cd51475c088f5b43eab4ce7 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Thu, 5 Sep 2024 16:17:51 +0530 Subject: [PATCH 23/66] deserialization: add tests for DeserializeFunc --- deserialization/proto_test.go | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/deserialization/proto_test.go b/deserialization/proto_test.go index 25463ff9..cb188068 100644 --- a/deserialization/proto_test.go +++ b/deserialization/proto_test.go @@ -26,6 +26,15 @@ func TestProtoDeserilizer_Deserialize(t *testing.T) { }, wantErr: false, }, + { + name: "Return error for non-proto message", + d: DeserializeProto, + args: args{ + b: []byte{}, + i: struct{}{}, + }, + wantErr: true, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { From 977904e9252ed4b715cd1d437c12decfc44204b6 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Thu, 5 Sep 2024 16:42:09 +0530 Subject: [PATCH 24/66] worker: add test for flush time out --- worker/worker_test.go | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/worker/worker_test.go b/worker/worker_test.go index ef19b032..7607b4e0 100644 --- a/worker/worker_test.go +++ b/worker/worker_test.go @@ -200,5 +200,20 @@ func TestWorker(t *testing.T) { assert.Equal(t, 0, len(bc)) kp.AssertExpectations(t) }) + + t.Run("Should return true if timed-out", func(t *testing.T) { + + kp := mockKafkaPublisher{} + defer kp.AssertExpectations(t) + + bc := make(chan collector.CollectRequest) + defer close(bc) + + worker := CreateWorkerPool( + 1, bc, &kp, + ) + worker.StartWorkers() + assert.True(t, worker.FlushWithTimeOut(time.Millisecond)) + }) }) } From 713f7c2e092a4d1ca6a3deda13bdb7448c325e48 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Thu, 5 Sep 2024 19:49:28 +0530 Subject: [PATCH 25/66] config: refactor ack value names --- config/event.go | 4 ++-- services/grpc/handler.go | 4 ++-- services/grpc/handler_test.go | 12 +++--------- services/rest/handler.go | 4 ++-- services/rest/websocket/handler.go | 4 ++-- 5 files changed, 11 insertions(+), 17 deletions(-) diff --git a/config/event.go b/config/event.go index 0aa3b05c..8d08aec7 100644 --- a/config/event.go +++ b/config/event.go @@ -5,8 +5,8 @@ var Event event type AckType int const ( - Asynchronous AckType = 0 - Synchronous AckType = 1 + AckTypeAsync AckType = 0 + AckTypeSync AckType = 1 ) type event struct { diff --git a/services/grpc/handler.go b/services/grpc/handler.go index 2a8fecea..6e81ece8 100644 --- a/services/grpc/handler.go +++ b/services/grpc/handler.go @@ -60,7 +60,7 @@ func (h *Handler) SendEvent(ctx context.Context, req *pb.SendEventRequest) (*pb. func (h *Handler) Ack(responseChannel chan *pb.SendEventResponse, reqGuid, connGroup string) collector.AckFunc { switch config.Event.Ack { - case config.Asynchronous: + case config.AckTypeAsync: responseChannel <- &pb.SendEventResponse{ Status: pb.Status_STATUS_SUCCESS, Code: pb.Code_CODE_OK, @@ -70,7 +70,7 @@ func (h *Handler) Ack(responseChannel chan *pb.SendEventResponse, reqGuid, connG }, } return nil - case config.Synchronous: + case config.AckTypeSync: return func(err error) { if err != nil { logger.Errorf("[grpc.Ack] publish message failed for %s: %v", connGroup, err) diff --git a/services/grpc/handler_test.go b/services/grpc/handler_test.go index eaa365a0..f7c28a9a 100644 --- a/services/grpc/handler_test.go +++ b/services/grpc/handler_test.go @@ -2,6 +2,7 @@ package grpc import ( "context" + "io" "reflect" "testing" @@ -15,23 +16,16 @@ import ( "google.golang.org/protobuf/types/known/timestamppb" ) -type void struct{} - -func (v void) Write(_ []byte) (int, error) { - return 0, nil -} - func TestHandler_SendEvent(t *testing.T) { type fields struct { - C collector.Collector - UnimplementedEventServiceServer pb.UnimplementedEventServiceServer + C collector.Collector } type args struct { ctx context.Context req *pb.SendEventRequest } - logger.SetOutput(void{}) + logger.SetOutput(io.Discard) metrics.SetVoid() collector := new(collector.MockCollector) ctx := context.Background() diff --git a/services/rest/handler.go b/services/rest/handler.go index 94c12e4a..575340b3 100644 --- a/services/rest/handler.go +++ b/services/rest/handler.go @@ -141,7 +141,7 @@ func (h *Handler) Ack(rw http.ResponseWriter, resChannel chan struct{}, s serial SendEventResponse: &pb.SendEventResponse{}, } switch config.Event.Ack { - case config.Asynchronous: + case config.AckTypeAsync: rw.WriteHeader(http.StatusOK) _, err := res.SetCode(pb.Code_CODE_OK).SetStatus(pb.Status_STATUS_SUCCESS).SetSentTime(time.Now().Unix()). @@ -151,7 +151,7 @@ func (h *Handler) Ack(rw http.ResponseWriter, resChannel chan struct{}, s serial } resChannel <- struct{}{} return nil - case config.Synchronous: + case config.AckTypeSync: return func(err error) { if err != nil { rw.WriteHeader(http.StatusInternalServerError) diff --git a/services/rest/websocket/handler.go b/services/rest/websocket/handler.go index bbdfb4c5..dff4fe23 100644 --- a/services/rest/websocket/handler.go +++ b/services/rest/websocket/handler.go @@ -126,10 +126,10 @@ func (h *Handler) HandlerWSEvents(w http.ResponseWriter, r *http.Request) { func (h *Handler) Ack(conn connection.Conn, resChannel chan AckInfo, s serialization.SerializeFunc, messageType int, reqGuid string, timeConsumed time.Time) collector.AckFunc { switch config.Event.Ack { - case config.Asynchronous: + case config.AckTypeAsync: writeSuccessResponse(conn, s, messageType, reqGuid) return nil - case config.Synchronous: + case config.AckTypeSync: return func(err error) { if config.Server.Batch.DedupEnabled { if err != nil { From fa3d4c58eb66c8720d0950ee2cd9bfeb2a970788 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Thu, 5 Sep 2024 20:48:53 +0530 Subject: [PATCH 26/66] tests: integration e2e tests results in coverage data --- .github/workflows/test.yaml | 4 +++- Dockerfile | 2 +- Makefile | 5 +++++ docker-compose.yml | 3 +++ 4 files changed, 12 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 7f26b199..46ad0dc5 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -21,7 +21,7 @@ jobs: - name: Start raccoon run: make docker-run - name: Run tests - run: go test ./... -v -coverprofile=coverage.out + run: go test ./... -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ env: INTEGTEST_BOOTSTRAP_SERVER: 'localhost:9094' INTEGTEST_HOST: 'localhost:8080' @@ -29,6 +29,8 @@ jobs: GRPC_SERVER_ADDR: 'localhost:8081' PUBSUB_EMULATOR_HOST: 'localhost:8085' LOCALSTACK_HOST: 'http://localhost:4566' + - name: Merge coverage data + run: go tool covdata -i=raccoon-coverage -o coverage.out - name: Upload coverage data uses: shogo82148/actions-goveralls@v1 with: diff --git a/Dockerfile b/Dockerfile index aab37cfc..d7afb9f7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,7 +8,7 @@ RUN PROTOC_ZIP=protoc-3.17.3-linux-x86_64.zip && \ unzip -o $PROTOC_ZIP -d /usr/local 'include/*' && \ rm -f $PROTOC_ZIP COPY . . -RUN make build +RUN make build-cov FROM debian:bookworm-slim diff --git a/Makefile b/Makefile index bb29175a..c7e3b160 100644 --- a/Makefile +++ b/Makefile @@ -46,6 +46,11 @@ build: ## Build the raccoon binary go build ${BUILD_FLAGS} ${LD_FLAGS} ${NAME} @echo "Build complete" +build-cov: ## Build raccoon with coverage instrumentation + @echo "Building raccoon version ${VERSION}..." + go build -cover ${BUILD_FLAGS} ${LD_FLAGS} ${NAME} + @echo "Build complete" + install: @echo "Installing Raccoon to ${GOBIN}..." go install ${BUILD_FLAGS} ${LD_FLAGS} ${NAME} diff --git a/docker-compose.yml b/docker-compose.yml index 323b1222..edfcaa2f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -94,11 +94,14 @@ services: METRIC_STATSD_ADDRESS: "telegraf:8125" METRIC_STATSD_FLUSH_PERIOD_MS: 100 LOG_LEVEL: "info" + GOCOVERDIR: /app/raccoon-coverage ports: - "8080:8080" - "8081:8081" networks: - cs-network + volumes: + - ./raccoon-coverage/:/app/raccoon-coverage # telegraf: # image: telegraf # volumes: From 527b475c8c3740ad0aadd33b6f1ccdfe085b6919 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Thu, 5 Sep 2024 21:09:54 +0530 Subject: [PATCH 27/66] ci: test: fix coverage merge failing --- .github/workflows/test.yaml | 6 ++++-- docker-compose.yml | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 46ad0dc5..e1e7aa65 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -21,7 +21,7 @@ jobs: - name: Start raccoon run: make docker-run - name: Run tests - run: go test ./... -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ + run: go test ./... -v -cover -test.gocoverdir=/tmp/raccoon-coverage/ env: INTEGTEST_BOOTSTRAP_SERVER: 'localhost:9094' INTEGTEST_HOST: 'localhost:8080' @@ -29,8 +29,10 @@ jobs: GRPC_SERVER_ADDR: 'localhost:8081' PUBSUB_EMULATOR_HOST: 'localhost:8085' LOCALSTACK_HOST: 'http://localhost:4566' + - name: Stop raccoon + run: docker compose down - name: Merge coverage data - run: go tool covdata -i=raccoon-coverage -o coverage.out + run: go tool covdata -i=/tmp/raccoon-coverage -o coverage.out - name: Upload coverage data uses: shogo82148/actions-goveralls@v1 with: diff --git a/docker-compose.yml b/docker-compose.yml index edfcaa2f..e722502c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -101,7 +101,7 @@ services: networks: - cs-network volumes: - - ./raccoon-coverage/:/app/raccoon-coverage + - /tmp/raccoon-coverage/:/app/raccoon-coverage # telegraf: # image: telegraf # volumes: From 84fabf395f559e8ee713b809998f465e8406f697 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Thu, 5 Sep 2024 21:20:12 +0530 Subject: [PATCH 28/66] ci: prospective fix for coverdata #1 --- .github/workflows/test.yaml | 6 ++++-- docker-compose.yml | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index e1e7aa65..8b182e83 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -20,8 +20,10 @@ jobs: uses: actions/checkout@v2 - name: Start raccoon run: make docker-run + - name: Create coverage data directory + run: mkdir raccoon-coverage - name: Run tests - run: go test ./... -v -cover -test.gocoverdir=/tmp/raccoon-coverage/ + run: go test ./... -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ env: INTEGTEST_BOOTSTRAP_SERVER: 'localhost:9094' INTEGTEST_HOST: 'localhost:8080' @@ -32,7 +34,7 @@ jobs: - name: Stop raccoon run: docker compose down - name: Merge coverage data - run: go tool covdata -i=/tmp/raccoon-coverage -o coverage.out + run: go tool covdata -i=raccoon-coverage -o coverage.out - name: Upload coverage data uses: shogo82148/actions-goveralls@v1 with: diff --git a/docker-compose.yml b/docker-compose.yml index e722502c..edfcaa2f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -101,7 +101,7 @@ services: networks: - cs-network volumes: - - /tmp/raccoon-coverage/:/app/raccoon-coverage + - ./raccoon-coverage/:/app/raccoon-coverage # telegraf: # image: telegraf # volumes: From 1d47e976ac523d8ceb13b32cd1b72fc3cdbf7550 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Thu, 5 Sep 2024 21:24:33 +0530 Subject: [PATCH 29/66] ci: prospective fix for coverdata #2 --- .github/workflows/test.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 8b182e83..3e03b11b 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -18,10 +18,10 @@ jobs: go-version: "1.22.4" - name: Checkout code uses: actions/checkout@v2 - - name: Start raccoon - run: make docker-run - name: Create coverage data directory run: mkdir raccoon-coverage + - name: Start raccoon + run: make docker-run - name: Run tests run: go test ./... -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ env: From af606d474659bbfcdc4d556132232afe6d359ec2 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Thu, 5 Sep 2024 21:30:40 +0530 Subject: [PATCH 30/66] ci: prospective fix for coverdata #3 --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 3e03b11b..1735f74d 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -34,7 +34,7 @@ jobs: - name: Stop raccoon run: docker compose down - name: Merge coverage data - run: go tool covdata -i=raccoon-coverage -o coverage.out + run: go tool covdata textfmt -i=raccoon-coverage -o coverage.out - name: Upload coverage data uses: shogo82148/actions-goveralls@v1 with: From 810e404e280ec53737a0b2dcac855ca092de053b Mon Sep 17 00:00:00 2001 From: turtleDev Date: Thu, 5 Sep 2024 21:43:51 +0530 Subject: [PATCH 31/66] ci: upload test coverage as artifact --- .github/workflows/test.yaml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 1735f74d..b8a639d7 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -35,10 +35,15 @@ jobs: run: docker compose down - name: Merge coverage data run: go tool covdata textfmt -i=raccoon-coverage -o coverage.out - - name: Upload coverage data + - name: Upload coverage data to coveralls uses: shogo82148/actions-goveralls@v1 with: path-to-profile: coverage.out + - name: Upload coverage data as workflow artifact + uses: actions/upload-artifact@v4 + with: + name: coverage + path: coverage.out smoke-test: runs-on: ubuntu-latest steps: From b6535f60cbcbd09e75cbf51ae92a3aefc728e066 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 17:20:54 +0530 Subject: [PATCH 32/66] ci: refactor Docker build for release and testing --- .goreleaser.yaml | 1 - Dockerfile | 17 ++--------------- Dockerfile.release | 4 ---- Dockerfile.test | 17 +++++++++++++++++ docker-compose.yml | 1 + 5 files changed, 20 insertions(+), 20 deletions(-) delete mode 100644 Dockerfile.release create mode 100644 Dockerfile.test diff --git a/.goreleaser.yaml b/.goreleaser.yaml index fda23aff..8fc9cf1c 100644 --- a/.goreleaser.yaml +++ b/.goreleaser.yaml @@ -66,7 +66,6 @@ archives: dockers: - id: dockerhub - dockerfile: Dockerfile.release image_templates: - "docker.io/raystack/{{.ProjectName}}:latest" - "docker.io/raystack/{{.ProjectName}}:{{ .Version }}" diff --git a/Dockerfile b/Dockerfile index d7afb9f7..b80fdc1c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,17 +1,4 @@ -FROM golang:1.22.4 - -WORKDIR /app -RUN apt-get update && apt-get install unzip --no-install-recommends --assume-yes -RUN PROTOC_ZIP=protoc-3.17.3-linux-x86_64.zip && \ - curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v3.17.3/$PROTOC_ZIP && \ - unzip -o $PROTOC_ZIP -d /usr/local bin/protoc && \ - unzip -o $PROTOC_ZIP -d /usr/local 'include/*' && \ - rm -f $PROTOC_ZIP -COPY . . -RUN make build-cov - - FROM debian:bookworm-slim WORKDIR /app -COPY --from=0 /app/raccoon ./raccoon -ENTRYPOINT [ "/app/raccoon" ] +COPY raccoon . +ENTRYPOINT [ "/app/raccoon" ] diff --git a/Dockerfile.release b/Dockerfile.release deleted file mode 100644 index b80fdc1c..00000000 --- a/Dockerfile.release +++ /dev/null @@ -1,4 +0,0 @@ -FROM debian:bookworm-slim -WORKDIR /app -COPY raccoon . -ENTRYPOINT [ "/app/raccoon" ] diff --git a/Dockerfile.test b/Dockerfile.test new file mode 100644 index 00000000..d7afb9f7 --- /dev/null +++ b/Dockerfile.test @@ -0,0 +1,17 @@ +FROM golang:1.22.4 + +WORKDIR /app +RUN apt-get update && apt-get install unzip --no-install-recommends --assume-yes +RUN PROTOC_ZIP=protoc-3.17.3-linux-x86_64.zip && \ + curl -OL https://github.com/protocolbuffers/protobuf/releases/download/v3.17.3/$PROTOC_ZIP && \ + unzip -o $PROTOC_ZIP -d /usr/local bin/protoc && \ + unzip -o $PROTOC_ZIP -d /usr/local 'include/*' && \ + rm -f $PROTOC_ZIP +COPY . . +RUN make build-cov + + +FROM debian:bookworm-slim +WORKDIR /app +COPY --from=0 /app/raccoon ./raccoon +ENTRYPOINT [ "/app/raccoon" ] diff --git a/docker-compose.yml b/docker-compose.yml index edfcaa2f..f30c73a3 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -55,6 +55,7 @@ services: cs: build: context: . + dockerfile: Dockerfile.test command: ["server"] hostname: cs container_name: cs From 05a09c426a553beff0d40572702e5f5dcd0df8e8 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 17:46:43 +0530 Subject: [PATCH 33/66] services: grpc: add tests for synchronous event handling --- services/grpc/handler.go | 13 +------ services/grpc/handler_test.go | 71 +++++++++++++++++++++++++++++++---- services/grpc/service.go | 6 ++- 3 files changed, 71 insertions(+), 19 deletions(-) diff --git a/services/grpc/handler.go b/services/grpc/handler.go index 6e81ece8..a3ddcead 100644 --- a/services/grpc/handler.go +++ b/services/grpc/handler.go @@ -17,6 +17,7 @@ import ( type Handler struct { C collector.Collector pb.UnimplementedEventServiceServer + ackType config.AckType } func (h *Handler) SendEvent(ctx context.Context, req *pb.SendEventRequest) (*pb.SendEventResponse, error) { @@ -59,17 +60,7 @@ func (h *Handler) SendEvent(ctx context.Context, req *pb.SendEventRequest) (*pb. } func (h *Handler) Ack(responseChannel chan *pb.SendEventResponse, reqGuid, connGroup string) collector.AckFunc { - switch config.Event.Ack { - case config.AckTypeAsync: - responseChannel <- &pb.SendEventResponse{ - Status: pb.Status_STATUS_SUCCESS, - Code: pb.Code_CODE_OK, - SentTime: time.Now().Unix(), - Data: map[string]string{ - "req_guid": reqGuid, - }, - } - return nil + switch h.ackType { case config.AckTypeSync: return func(err error) { if err != nil { diff --git a/services/grpc/handler_test.go b/services/grpc/handler_test.go index f7c28a9a..135a04dc 100644 --- a/services/grpc/handler_test.go +++ b/services/grpc/handler_test.go @@ -2,6 +2,7 @@ package grpc import ( "context" + "fmt" "io" "reflect" "testing" @@ -17,8 +18,14 @@ import ( ) func TestHandler_SendEvent(t *testing.T) { + // todo(turtledev): refactor this test + // it has the following issues: + // 1. collector is shared across all tests cases + // 2. test case specific parameters are kept in shared scope + type fields struct { - C collector.Collector + C collector.Collector + ack config.AckType } type args struct { ctx context.Context @@ -27,7 +34,6 @@ func TestHandler_SendEvent(t *testing.T) { logger.SetOutput(io.Discard) metrics.SetVoid() - collector := new(collector.MockCollector) ctx := context.Background() meta := metadata.MD{} meta.Set(config.Server.Websocket.Conn.GroupHeader, "group") @@ -38,25 +44,36 @@ func TestHandler_SendEvent(t *testing.T) { SentTime: sentTime, Events: []*pb.Event{}, } + mockCollector := new(collector.MockCollector) contextWithIDGroup := metadata.NewIncomingContext(ctx, meta) - collector.On("Collect", contextWithIDGroup, mock.Anything).Return(nil) + mockCollector.On("Collect", contextWithIDGroup, mock.Anything).Return(nil).Once() + + mockCollector.On("Collect", contextWithIDGroup, mock.Anything).Return(nil).Once().Run(func(args mock.Arguments) { + args.Get(1).(*collector.CollectRequest).AckFunc(nil) + }) + + mockCollector.On("Collect", contextWithIDGroup, mock.Anything).Return(nil).Once().Run(func(args mock.Arguments) { + args.Get(1).(*collector.CollectRequest).AckFunc(fmt.Errorf("simulated error")) + }) metaWithoutGroup := metadata.MD{} metaWithoutGroup.Set(config.Server.Websocket.Conn.IDHeader, "1235") contextWithoutGroup := metadata.NewIncomingContext(ctx, metaWithoutGroup) - collector.On("Collect", contextWithoutGroup, mock.Anything).Return(nil) + mockCollector.On("Collect", contextWithoutGroup, mock.Anything).Return(nil).Once() tests := []struct { name string fields fields args args + ack config.AckType want *pb.SendEventResponse wantErr bool }{ { name: "Sending normal event", fields: fields{ - C: collector, + C: mockCollector, + ack: config.AckTypeAsync, }, args: args{ ctx: contextWithIDGroup, @@ -71,10 +88,49 @@ func TestHandler_SendEvent(t *testing.T) { }, }, }, + { + name: "Sending normal event with synchronous ack", + fields: fields{ + C: mockCollector, + ack: config.AckTypeSync, + }, + args: args{ + ctx: contextWithIDGroup, + req: req, + }, + want: &pb.SendEventResponse{ + Status: pb.Status_STATUS_SUCCESS, + Code: pb.Code_CODE_OK, + SentTime: sentTime.Seconds, + Data: map[string]string{ + "req_guid": req.ReqGuid, + }, + }, + }, + { + name: "Sending normal event with synchronous ack and collector error", + fields: fields{ + C: mockCollector, + ack: config.AckTypeSync, + }, + args: args{ + ctx: contextWithIDGroup, + req: req, + }, + want: &pb.SendEventResponse{ + Status: pb.Status_STATUS_ERROR, + Code: pb.Code_CODE_INTERNAL_ERROR, + SentTime: sentTime.Seconds, + Data: map[string]string{ + "req_guid": req.ReqGuid, + }, + }, + }, { name: "Sending without group", fields: fields{ - C: collector, + C: mockCollector, + ack: config.AckTypeAsync, }, args: args{ ctx: contextWithoutGroup, @@ -93,7 +149,8 @@ func TestHandler_SendEvent(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { h := &Handler{ - C: tt.fields.C, + C: tt.fields.C, + ackType: tt.fields.ack, } got, err := h.SendEvent(tt.args.ctx, tt.args.req) if (err != nil) != tt.wantErr { diff --git a/services/grpc/service.go b/services/grpc/service.go index b873243a..d425ca5f 100644 --- a/services/grpc/service.go +++ b/services/grpc/service.go @@ -18,7 +18,11 @@ type Service struct { func NewGRPCService(c collector.Collector) *Service { server := grpc.NewServer() - pb.RegisterEventServiceServer(server, &Handler{C: c}) + handler := &Handler{ + C: c, + ackType: config.Event.Ack, + } + pb.RegisterEventServiceServer(server, handler) return &Service{ s: server, Collector: c, From c3d9a03f51938c8c6ee7d3fd3192f8c51802d6ff Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 17:49:42 +0530 Subject: [PATCH 34/66] services: grpc: add tests to cover metric instrumentation --- services/grpc/handler_test.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/services/grpc/handler_test.go b/services/grpc/handler_test.go index 135a04dc..25bff036 100644 --- a/services/grpc/handler_test.go +++ b/services/grpc/handler_test.go @@ -42,7 +42,11 @@ func TestHandler_SendEvent(t *testing.T) { req := &pb.SendEventRequest{ ReqGuid: "abcd", SentTime: sentTime, - Events: []*pb.Event{}, + Events: []*pb.Event{ + { + Type: "unknown", + }, + }, } mockCollector := new(collector.MockCollector) contextWithIDGroup := metadata.NewIncomingContext(ctx, meta) From 76c45913a51ec224c556f224f1f7f0ea43773045 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 18:22:10 +0530 Subject: [PATCH 35/66] services: rest: remove deadcode For an incoming request, req.Body is never nil https://pkg.go.dev/net/http#Request --- services/rest/handler.go | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/services/rest/handler.go b/services/rest/handler.go index 575340b3..a6c147ff 100644 --- a/services/rest/handler.go +++ b/services/rest/handler.go @@ -80,18 +80,6 @@ func (h *Handler) RESTAPIHandler(rw http.ResponseWriter, r *http.Request) { Group: group, } - if r.Body == nil { - metrics.Increment("batches_read_total", map[string]string{"status": "failed", "reason": "emptybody", "conn_group": identifier.Group}) - logger.Errorf("[rest.GetRESTAPIHandler] %s no body", identifier) - rw.WriteHeader(http.StatusBadRequest) - _, err := res.SetCode(pb.Code_CODE_BAD_REQUEST).SetStatus(pb.Status_STATUS_ERROR).SetReason("no body present"). - SetSentTime(time.Now().Unix()).Write(rw, s) - if err != nil { - logger.Errorf("[rest.GetRESTAPIHandler] %s error sending response: %v", identifier, err) - } - return - } - defer io.Copy(io.Discard, r.Body) defer r.Body.Close() From f3c6d4e8b386f4500990a0819d62740fae2bb59f Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 18:37:14 +0530 Subject: [PATCH 36/66] services: rest: add tests for error reading request body --- services/rest/handler_test.go | 51 +++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 services/rest/handler_test.go diff --git a/services/rest/handler_test.go b/services/rest/handler_test.go new file mode 100644 index 00000000..8c63163c --- /dev/null +++ b/services/rest/handler_test.go @@ -0,0 +1,51 @@ +package rest + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/http/httptest" + "os" + "testing" + "testing/iotest" + + "github.com/raystack/raccoon/logger" + "github.com/stretchr/testify/assert" + + pb "github.com/raystack/raccoon/proto" +) + +func TestMain(m *testing.M) { + logger.SetOutput(io.Discard) + os.Exit(m.Run()) +} + +type apiResponse struct { + Status pb.Status `json:"status"` + Code pb.Code `json:"code"` + Reason string `json:"reason"` +} + +func TestHandler(t *testing.T) { + + t.Run("should return an error if reading request body fails", func(t *testing.T) { + h := NewHandler(nil) + + e := fmt.Errorf("simulated error") + rr := httptest.NewRequest("POST", "/api/v1/events", iotest.ErrReader(e)) + rr.Header.Set("Content-Type", "application/json") + + rw := httptest.NewRecorder() + + h.RESTAPIHandler(rw, rr) + + assert.Equal(t, rw.Code, http.StatusInternalServerError) + + res := &apiResponse{} + assert.Nil(t, json.NewDecoder(rw.Body).Decode(res)) + assert.Equal(t, res.Code, pb.Code_CODE_INTERNAL_ERROR) + assert.Equal(t, res.Status, pb.Status_STATUS_ERROR) + assert.Equal(t, res.Reason, "deserialization failure") + }) +} From c63c73faa805c6b8d6aeb773a5339c7c13b9bb1c Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 18:40:45 +0530 Subject: [PATCH 37/66] services: rest: add tests for malformed request body --- services/rest/handler_test.go | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/services/rest/handler_test.go b/services/rest/handler_test.go index 8c63163c..bbfac68b 100644 --- a/services/rest/handler_test.go +++ b/services/rest/handler_test.go @@ -1,6 +1,7 @@ package rest import ( + "bytes" "encoding/json" "fmt" "io" @@ -48,4 +49,25 @@ func TestHandler(t *testing.T) { assert.Equal(t, res.Status, pb.Status_STATUS_ERROR) assert.Equal(t, res.Reason, "deserialization failure") }) + + t.Run("should return an error if request body is malformed", func(t *testing.T) { + h := NewHandler(nil) + + payload := "}{}" + rr := httptest.NewRequest("POST", "/api/v1/events", bytes.NewBufferString(payload)) + rr.Header.Set("Content-Type", "application/json") + + rw := httptest.NewRecorder() + + h.RESTAPIHandler(rw, rr) + + assert.Equal(t, rw.Code, http.StatusBadRequest) + + res := &apiResponse{} + assert.Nil(t, json.NewDecoder(rw.Body).Decode(res)) + assert.Equal(t, res.Code, pb.Code_CODE_BAD_REQUEST) + assert.Equal(t, res.Status, pb.Status_STATUS_ERROR) + assert.Equal(t, res.Reason, "deserialization failure") + + }) } From e9815a3767ccdd132d142b0bae92a08760494c26 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 18:43:46 +0530 Subject: [PATCH 38/66] services: rest: refactor ack handling --- services/rest/handler.go | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/services/rest/handler.go b/services/rest/handler.go index a6c147ff..46d7a1a9 100644 --- a/services/rest/handler.go +++ b/services/rest/handler.go @@ -28,6 +28,7 @@ type serDe struct { type Handler struct { serDeMap map[string]*serDe collector collector.Collector + ackType config.AckType } func NewHandler(collector collector.Collector) *Handler { @@ -44,6 +45,7 @@ func NewHandler(collector collector.Collector) *Handler { return &Handler{ serDeMap: serDeMap, collector: collector, + ackType: config.Event.Ack, } } @@ -128,17 +130,7 @@ func (h *Handler) Ack(rw http.ResponseWriter, resChannel chan struct{}, s serial res := &Response{ SendEventResponse: &pb.SendEventResponse{}, } - switch config.Event.Ack { - case config.AckTypeAsync: - - rw.WriteHeader(http.StatusOK) - _, err := res.SetCode(pb.Code_CODE_OK).SetStatus(pb.Status_STATUS_SUCCESS).SetSentTime(time.Now().Unix()). - SetDataMap(map[string]string{"req_guid": reqGuid}).Write(rw, s) - if err != nil { - logger.Errorf("[RESTAPIHandler.Ack] %s error sending error response: %v", connGroup, err) - } - resChannel <- struct{}{} - return nil + switch h.ackType { case config.AckTypeSync: return func(err error) { if err != nil { From 56b7e22abbff53cfede5282a81ada17c64dae2c2 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 19:28:28 +0530 Subject: [PATCH 39/66] services: rest: add tests for different error paths --- services/rest/handler_test.go | 173 +++++++++++++++++++++++++++------- 1 file changed, 139 insertions(+), 34 deletions(-) diff --git a/services/rest/handler_test.go b/services/rest/handler_test.go index bbfac68b..eafa9e0e 100644 --- a/services/rest/handler_test.go +++ b/services/rest/handler_test.go @@ -11,8 +11,11 @@ import ( "testing" "testing/iotest" + "github.com/raystack/raccoon/collector" + "github.com/raystack/raccoon/config" "github.com/raystack/raccoon/logger" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" pb "github.com/raystack/raccoon/proto" ) @@ -30,44 +33,146 @@ type apiResponse struct { func TestHandler(t *testing.T) { - t.Run("should return an error if reading request body fails", func(t *testing.T) { - h := NewHandler(nil) - - e := fmt.Errorf("simulated error") - rr := httptest.NewRequest("POST", "/api/v1/events", iotest.ErrReader(e)) - rr.Header.Set("Content-Type", "application/json") - - rw := httptest.NewRecorder() - - h.RESTAPIHandler(rw, rr) - - assert.Equal(t, rw.Code, http.StatusInternalServerError) - - res := &apiResponse{} - assert.Nil(t, json.NewDecoder(rw.Body).Decode(res)) - assert.Equal(t, res.Code, pb.Code_CODE_INTERNAL_ERROR) - assert.Equal(t, res.Status, pb.Status_STATUS_ERROR) - assert.Equal(t, res.Reason, "deserialization failure") - }) - - t.Run("should return an error if request body is malformed", func(t *testing.T) { - h := NewHandler(nil) - - payload := "}{}" - rr := httptest.NewRequest("POST", "/api/v1/events", bytes.NewBufferString(payload)) - rr.Header.Set("Content-Type", "application/json") + var testCases = []struct { + Desc string + Req func() *http.Request + Collector func() collector.Collector + Response *apiResponse + Status int + AckType config.AckType + }{ + { + Desc: "should return an error if reading request body fails", + Req: func() *http.Request { + e := fmt.Errorf("simulated error") + rr := httptest.NewRequest("POST", "/api/v1/events", iotest.ErrReader(e)) + rr.Header.Set("Content-Type", "application/json") + return rr + }, + Collector: func() collector.Collector { return nil }, + Response: &apiResponse{ + Code: pb.Code_CODE_INTERNAL_ERROR, + Status: pb.Status_STATUS_ERROR, + Reason: "deserialization failure", + }, + Status: http.StatusInternalServerError, + }, + { + Desc: "should return an error if request body is malformed", + Req: func() *http.Request { + payload := "}{}" + rr := httptest.NewRequest("POST", "/api/v1/events", bytes.NewBufferString(payload)) + rr.Header.Set("Content-Type", "application/json") + return rr + }, + Collector: func() collector.Collector { return nil }, + Response: &apiResponse{ + Code: pb.Code_CODE_BAD_REQUEST, + Status: pb.Status_STATUS_ERROR, + Reason: "deserialization failure", + }, + Status: http.StatusBadRequest, + }, + { + Desc: "should return an error if content-type is unrecognised", + Req: func() *http.Request { + payload := "}{}" + rr := httptest.NewRequest("POST", "/api/v1/events", bytes.NewBufferString(payload)) + return rr + }, + Collector: func() collector.Collector { return nil }, + Response: &apiResponse{ + Code: pb.Code_CODE_BAD_REQUEST, + Status: pb.Status_STATUS_ERROR, + Reason: "invalid content type", + }, + Status: http.StatusBadRequest, + }, + { + Desc: "should return an error if collector fails to consume request (ack type = sync)", + Req: func() *http.Request { + payload := "{}" + rr := httptest.NewRequest("POST", "/api/v1/events", bytes.NewBufferString(payload)) + rr.Header.Set("Content-Type", "application/json") + return rr + }, + Collector: func() collector.Collector { + mockCollector := &collector.MockCollector{} + mockCollector.On("Collect", mock.Anything, mock.Anything). + Return(nil). + Once(). + Run(func(args mock.Arguments) { + args.Get(1).(*collector.CollectRequest).AckFunc(fmt.Errorf("simulated error")) + }) + return mockCollector + }, + Response: &apiResponse{ + Code: pb.Code_CODE_INTERNAL_ERROR, + Status: pb.Status_STATUS_ERROR, + Reason: "cannot publish events: simulated error", + }, + Status: http.StatusInternalServerError, + AckType: config.AckTypeSync, + }, + { + Desc: "should successfully process event sent (ack type = sync)", + Req: func() *http.Request { + payload := "{}" + rr := httptest.NewRequest("POST", "/api/v1/events", bytes.NewBufferString(payload)) + rr.Header.Set("Content-Type", "application/json") + return rr + }, + Collector: func() collector.Collector { + mockCollector := &collector.MockCollector{} + mockCollector.On("Collect", mock.Anything, mock.Anything). + Return(nil). + Once(). + Run(func(args mock.Arguments) { + args.Get(1).(*collector.CollectRequest).AckFunc(nil) + }) + return mockCollector + }, + Response: &apiResponse{ + Code: pb.Code_CODE_OK, + Status: pb.Status_STATUS_SUCCESS, + }, + Status: http.StatusOK, + AckType: config.AckTypeSync, + }, + { + Desc: "should successfully process event sent (ack type = async)", + Req: func() *http.Request { + payload := "{}" + rr := httptest.NewRequest("POST", "/api/v1/events", bytes.NewBufferString(payload)) + rr.Header.Set("Content-Type", "application/json") + return rr + }, + Collector: func() collector.Collector { + mockCollector := &collector.MockCollector{} + mockCollector.On("Collect", mock.Anything, mock.Anything). + Return(nil). + Once() + return mockCollector + }, + Response: &apiResponse{ + Code: pb.Code_CODE_OK, + Status: pb.Status_STATUS_SUCCESS, + }, + Status: http.StatusOK, + }, + } + + for _, testCase := range testCases { rw := httptest.NewRecorder() + h := NewHandler(testCase.Collector()) + h.ackType = testCase.AckType + h.RESTAPIHandler(rw, testCase.Req()) - h.RESTAPIHandler(rw, rr) - - assert.Equal(t, rw.Code, http.StatusBadRequest) + assert.Equal(t, testCase.Status, rw.Code) res := &apiResponse{} assert.Nil(t, json.NewDecoder(rw.Body).Decode(res)) - assert.Equal(t, res.Code, pb.Code_CODE_BAD_REQUEST) - assert.Equal(t, res.Status, pb.Status_STATUS_ERROR) - assert.Equal(t, res.Reason, "deserialization failure") - - }) + assert.Equal(t, testCase.Response, res) + } } From 9a4ad1ba759d0b1878ad18270d97837666879524 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 19:38:55 +0530 Subject: [PATCH 40/66] ci: cache image pulls to speed up tests --- .github/workflows/test.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index b8a639d7..289b2b73 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -10,6 +10,10 @@ jobs: steps: - name: Setup Docker uses: docker-practice/actions-setup-docker@master + - name: Cache Docker images. + uses: ScribeMD/docker-cache@0.5.0 + with: + key: docker-${{ runner.os }}-${{ hashFiles('docker-compose.yml') }} - name: Install Protoc uses: arduino/setup-protoc@v1 - name: Setup Go From 39aceac844d16e0397582e83373122d1d9481e1e Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 19:40:26 +0530 Subject: [PATCH 41/66] ci: fix docker image caching --- .github/workflows/test.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 289b2b73..fc25d124 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -12,8 +12,8 @@ jobs: uses: docker-practice/actions-setup-docker@master - name: Cache Docker images. uses: ScribeMD/docker-cache@0.5.0 - with: - key: docker-${{ runner.os }}-${{ hashFiles('docker-compose.yml') }} + with: + key: docker-${{ runner.os }}-${{ hashFiles('docker-compose.yml') }} - name: Install Protoc uses: arduino/setup-protoc@v1 - name: Setup Go From 75d08076fb5fbdd210b064de5ed28bebd043995e Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 20:25:46 +0530 Subject: [PATCH 42/66] ci: ignore proto package during tests --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index fc25d124..7d25a369 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -27,7 +27,7 @@ jobs: - name: Start raccoon run: make docker-run - name: Run tests - run: go test ./... -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ + run: go test $(go list ./... | grep -v "proto") -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ env: INTEGTEST_BOOTSTRAP_SERVER: 'localhost:9094' INTEGTEST_HOST: 'localhost:8080' From aaa0571c36978f6f3396810feae3d558867a0695 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 20:29:10 +0530 Subject: [PATCH 43/66] ci: remove docker image caching --- .github/workflows/test.yaml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 7d25a369..fe4e410f 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -10,10 +10,6 @@ jobs: steps: - name: Setup Docker uses: docker-practice/actions-setup-docker@master - - name: Cache Docker images. - uses: ScribeMD/docker-cache@0.5.0 - with: - key: docker-${{ runner.os }}-${{ hashFiles('docker-compose.yml') }} - name: Install Protoc uses: arduino/setup-protoc@v1 - name: Setup Go From 1038f46d0467170f1b0f0a5d0a21f79fbc35675d Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 20:38:02 +0530 Subject: [PATCH 44/66] Revert "ci: ignore proto package during tests" This reverts commit 75d08076fb5fbdd210b064de5ed28bebd043995e. --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index fe4e410f..b8a639d7 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -23,7 +23,7 @@ jobs: - name: Start raccoon run: make docker-run - name: Run tests - run: go test $(go list ./... | grep -v "proto") -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ + run: go test ./... -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ env: INTEGTEST_BOOTSTRAP_SERVER: 'localhost:9094' INTEGTEST_HOST: 'localhost:8080' From a39cad6fb76a332ed4132ead3564fc9c54b57109 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 20:44:08 +0530 Subject: [PATCH 45/66] Revert "Revert "ci: ignore proto package during tests"" This reverts commit 1038f46d0467170f1b0f0a5d0a21f79fbc35675d. --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index b8a639d7..fe4e410f 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -23,7 +23,7 @@ jobs: - name: Start raccoon run: make docker-run - name: Run tests - run: go test ./... -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ + run: go test $(go list ./... | grep -v "proto") -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ env: INTEGTEST_BOOTSTRAP_SERVER: 'localhost:9094' INTEGTEST_HOST: 'localhost:8080' From 38b48f58e4e51c56943bf2973366a1d0610fd210 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 21:17:57 +0530 Subject: [PATCH 46/66] Revert "Revert "Revert "ci: ignore proto package during tests""" This reverts commit a39cad6fb76a332ed4132ead3564fc9c54b57109. --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index fe4e410f..b8a639d7 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -23,7 +23,7 @@ jobs: - name: Start raccoon run: make docker-run - name: Run tests - run: go test $(go list ./... | grep -v "proto") -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ + run: go test ./... -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ env: INTEGTEST_BOOTSTRAP_SERVER: 'localhost:9094' INTEGTEST_HOST: 'localhost:8080' From 7f837bf5e09b183ce3384b4dd986aeee73e9f687 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 21:33:43 +0530 Subject: [PATCH 47/66] publisher: log: move test event to root proto package --- {publisher/log/internal/testproto => proto}/gen.go | 2 +- {publisher/log/internal/testproto => proto}/test.pb.go | 6 +++--- {publisher/log/internal/testproto => proto}/test.proto | 2 +- publisher/log/log_test.go | 3 +-- 4 files changed, 6 insertions(+), 7 deletions(-) rename {publisher/log/internal/testproto => proto}/gen.go (78%) rename {publisher/log/internal/testproto => proto}/test.pb.go (96%) rename {publisher/log/internal/testproto => proto}/test.proto (80%) diff --git a/publisher/log/internal/testproto/gen.go b/proto/gen.go similarity index 78% rename from publisher/log/internal/testproto/gen.go rename to proto/gen.go index 605b6b00..e75ca087 100644 --- a/publisher/log/internal/testproto/gen.go +++ b/proto/gen.go @@ -1,2 +1,2 @@ //go:generate protoc --go_out=paths=source_relative:. ./test.proto -package testproto +package raccoonv1 diff --git a/publisher/log/internal/testproto/test.pb.go b/proto/test.pb.go similarity index 96% rename from publisher/log/internal/testproto/test.pb.go rename to proto/test.pb.go index 726c78b5..c59ac336 100644 --- a/publisher/log/internal/testproto/test.pb.go +++ b/proto/test.pb.go @@ -4,7 +4,7 @@ // protoc v3.12.4 // source: test.proto -package testproto +package raccoonv1 import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" @@ -93,8 +93,8 @@ var file_test_proto_rawDesc = []byte{ 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x74, - 0x61, 0x67, 0x73, 0x42, 0x0d, 0x5a, 0x0b, 0x2e, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x61, 0x67, 0x73, 0x42, 0x0d, 0x5a, 0x0b, 0x2e, 0x2f, 0x72, 0x61, 0x63, 0x63, 0x6f, 0x6f, 0x6e, + 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/publisher/log/internal/testproto/test.proto b/proto/test.proto similarity index 80% rename from publisher/log/internal/testproto/test.proto rename to proto/test.proto index f0a52432..f5a9b9c5 100644 --- a/publisher/log/internal/testproto/test.proto +++ b/proto/test.proto @@ -2,7 +2,7 @@ syntax = "proto3"; package raccoon.test; -option go_package = "./testproto"; +option go_package = "./raccoonv1"; message TestEvent { string description = 1; diff --git a/publisher/log/log_test.go b/publisher/log/log_test.go index 27c432a9..f8cde628 100644 --- a/publisher/log/log_test.go +++ b/publisher/log/log_test.go @@ -5,7 +5,6 @@ import ( "testing" raccoonv1 "github.com/raystack/raccoon/proto" - "github.com/raystack/raccoon/publisher/log/internal/testproto" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "google.golang.org/protobuf/proto" @@ -58,7 +57,7 @@ func TestLogPublisher(t *testing.T) { assert.Equal(t, expected, em.Messages[0]) }) t.Run("should emit protobuf events correctly", func(t *testing.T) { - msg := &testproto.TestEvent{ + msg := &raccoonv1.TestEvent{ Description: "test event", Count: 420, Tags: []string{"log", "protobuf"}, From 0856073b09cc8d3f0665b47dda56114a6c6f2236 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 21:35:48 +0530 Subject: [PATCH 48/66] ci: blacklist proto package during tests --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index b8a639d7..e185293c 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -23,7 +23,7 @@ jobs: - name: Start raccoon run: make docker-run - name: Run tests - run: go test ./... -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ + run: go test $(go list ./... | grep -v proto) -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ env: INTEGTEST_BOOTSTRAP_SERVER: 'localhost:9094' INTEGTEST_HOST: 'localhost:8080' From 5ffb846505b3233f63b38a50abb1eeec5d7b6b91 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 21:42:44 +0530 Subject: [PATCH 49/66] hack: publisher: log: use single spaced fields --- publisher/log/log_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/publisher/log/log_test.go b/publisher/log/log_test.go index f8cde628..47cb0479 100644 --- a/publisher/log/log_test.go +++ b/publisher/log/log_test.go @@ -82,7 +82,7 @@ func TestLogPublisher(t *testing.T) { "[LogPublisher] kind = %s, event_type = %s, event = %s", "protobuf", "unknown", - `1:"test event" 2:420 3:"log" 3:"protobuf"`, + `1:"test event" 2:420 3:"log" 3:"protobuf"`, ) assert.Equal(t, expected, em.Messages[0]) }) From 76e52ce4ea8123c2431f305d0f8091d995d06f54 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 21:52:19 +0530 Subject: [PATCH 50/66] Revert "publisher: log: move test event to root proto package" This reverts commit 7f837bf5e09b183ce3384b4dd986aeee73e9f687. --- {proto => publisher/log/internal/testproto}/gen.go | 2 +- {proto => publisher/log/internal/testproto}/test.pb.go | 6 +++--- {proto => publisher/log/internal/testproto}/test.proto | 2 +- publisher/log/log_test.go | 3 ++- 4 files changed, 7 insertions(+), 6 deletions(-) rename {proto => publisher/log/internal/testproto}/gen.go (78%) rename {proto => publisher/log/internal/testproto}/test.pb.go (96%) rename {proto => publisher/log/internal/testproto}/test.proto (80%) diff --git a/proto/gen.go b/publisher/log/internal/testproto/gen.go similarity index 78% rename from proto/gen.go rename to publisher/log/internal/testproto/gen.go index e75ca087..605b6b00 100644 --- a/proto/gen.go +++ b/publisher/log/internal/testproto/gen.go @@ -1,2 +1,2 @@ //go:generate protoc --go_out=paths=source_relative:. ./test.proto -package raccoonv1 +package testproto diff --git a/proto/test.pb.go b/publisher/log/internal/testproto/test.pb.go similarity index 96% rename from proto/test.pb.go rename to publisher/log/internal/testproto/test.pb.go index c59ac336..726c78b5 100644 --- a/proto/test.pb.go +++ b/publisher/log/internal/testproto/test.pb.go @@ -4,7 +4,7 @@ // protoc v3.12.4 // source: test.proto -package raccoonv1 +package testproto import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" @@ -93,8 +93,8 @@ var file_test_proto_rawDesc = []byte{ 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x61, 0x67, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x74, - 0x61, 0x67, 0x73, 0x42, 0x0d, 0x5a, 0x0b, 0x2e, 0x2f, 0x72, 0x61, 0x63, 0x63, 0x6f, 0x6f, 0x6e, - 0x76, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x61, 0x67, 0x73, 0x42, 0x0d, 0x5a, 0x0b, 0x2e, 0x2f, 0x74, 0x65, 0x73, 0x74, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/proto/test.proto b/publisher/log/internal/testproto/test.proto similarity index 80% rename from proto/test.proto rename to publisher/log/internal/testproto/test.proto index f5a9b9c5..f0a52432 100644 --- a/proto/test.proto +++ b/publisher/log/internal/testproto/test.proto @@ -2,7 +2,7 @@ syntax = "proto3"; package raccoon.test; -option go_package = "./raccoonv1"; +option go_package = "./testproto"; message TestEvent { string description = 1; diff --git a/publisher/log/log_test.go b/publisher/log/log_test.go index 47cb0479..7d1a631f 100644 --- a/publisher/log/log_test.go +++ b/publisher/log/log_test.go @@ -5,6 +5,7 @@ import ( "testing" raccoonv1 "github.com/raystack/raccoon/proto" + "github.com/raystack/raccoon/publisher/log/internal/testproto" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "google.golang.org/protobuf/proto" @@ -57,7 +58,7 @@ func TestLogPublisher(t *testing.T) { assert.Equal(t, expected, em.Messages[0]) }) t.Run("should emit protobuf events correctly", func(t *testing.T) { - msg := &raccoonv1.TestEvent{ + msg := &testproto.TestEvent{ Description: "test event", Count: 420, Tags: []string{"log", "protobuf"}, From 98ac6ddd490f57f81cfa1e864c657988e794b802 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 21:52:35 +0530 Subject: [PATCH 51/66] Revert "ci: blacklist proto package during tests" This reverts commit 0856073b09cc8d3f0665b47dda56114a6c6f2236. --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index e185293c..b8a639d7 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -23,7 +23,7 @@ jobs: - name: Start raccoon run: make docker-run - name: Run tests - run: go test $(go list ./... | grep -v proto) -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ + run: go test ./... -v -cover -test.gocoverdir=$PWD/raccoon-coverage/ env: INTEGTEST_BOOTSTRAP_SERVER: 'localhost:9094' INTEGTEST_HOST: 'localhost:8080' From 67797d90a8ffc8ef9c8f731c9a299f4770b1d6f1 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 21:52:49 +0530 Subject: [PATCH 52/66] Revert "hack: publisher: log: use single spaced fields" This reverts commit 5ffb846505b3233f63b38a50abb1eeec5d7b6b91. --- publisher/log/log_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/publisher/log/log_test.go b/publisher/log/log_test.go index 7d1a631f..27c432a9 100644 --- a/publisher/log/log_test.go +++ b/publisher/log/log_test.go @@ -83,7 +83,7 @@ func TestLogPublisher(t *testing.T) { "[LogPublisher] kind = %s, event_type = %s, event = %s", "protobuf", "unknown", - `1:"test event" 2:420 3:"log" 3:"protobuf"`, + `1:"test event" 2:420 3:"log" 3:"protobuf"`, ) assert.Equal(t, expected, em.Messages[0]) }) From 986bf944ee36e17976786d083b0ac50d09753693 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 21:54:14 +0530 Subject: [PATCH 53/66] Revert "Revert "hack: publisher: log: use single spaced fields"" This reverts commit 67797d90a8ffc8ef9c8f731c9a299f4770b1d6f1. --- publisher/log/log_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/publisher/log/log_test.go b/publisher/log/log_test.go index 27c432a9..7d1a631f 100644 --- a/publisher/log/log_test.go +++ b/publisher/log/log_test.go @@ -83,7 +83,7 @@ func TestLogPublisher(t *testing.T) { "[LogPublisher] kind = %s, event_type = %s, event = %s", "protobuf", "unknown", - `1:"test event" 2:420 3:"log" 3:"protobuf"`, + `1:"test event" 2:420 3:"log" 3:"protobuf"`, ) assert.Equal(t, expected, em.Messages[0]) }) From 9672b1345f0dfc2115f0a8b486137496ceeed7d2 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 22:01:35 +0530 Subject: [PATCH 54/66] ci: ignore proto folders when computing coverage --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index b8a639d7..e4251a58 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -34,7 +34,7 @@ jobs: - name: Stop raccoon run: docker compose down - name: Merge coverage data - run: go tool covdata textfmt -i=raccoon-coverage -o coverage.out + run: go tool covdata textfmt -i=raccoon-coverage -pkg $(go list ./... | grep -v proto) -o coverage.out - name: Upload coverage data to coveralls uses: shogo82148/actions-goveralls@v1 with: From a727bd72fe60ec8e2f908c0fb50ba4c84dd2f0d7 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 22:07:48 +0530 Subject: [PATCH 55/66] hack: publisher: log: use double spaced fields --- publisher/log/log_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/publisher/log/log_test.go b/publisher/log/log_test.go index 7d1a631f..27c432a9 100644 --- a/publisher/log/log_test.go +++ b/publisher/log/log_test.go @@ -83,7 +83,7 @@ func TestLogPublisher(t *testing.T) { "[LogPublisher] kind = %s, event_type = %s, event = %s", "protobuf", "unknown", - `1:"test event" 2:420 3:"log" 3:"protobuf"`, + `1:"test event" 2:420 3:"log" 3:"protobuf"`, ) assert.Equal(t, expected, em.Messages[0]) }) From f01cc07cdc2152896df01fb854778f49b11df9c3 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 22:13:48 +0530 Subject: [PATCH 56/66] ci: fix coverage data merge --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index e4251a58..9f4f46fc 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -34,7 +34,7 @@ jobs: - name: Stop raccoon run: docker compose down - name: Merge coverage data - run: go tool covdata textfmt -i=raccoon-coverage -pkg $(go list ./... | grep -v proto) -o coverage.out + run: go tool covdata textfmt -i=raccoon-coverage -pkg "$(go list ./... | grep -v proto)" -o coverage.out - name: Upload coverage data to coveralls uses: shogo82148/actions-goveralls@v1 with: From 9ec931c74ce8b88e13344a05dee72e005ba6898e Mon Sep 17 00:00:00 2001 From: turtleDev Date: Fri, 6 Sep 2024 22:21:25 +0530 Subject: [PATCH 57/66] ci: fix cover merge resulting in empty coverage file --- .github/workflows/test.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 9f4f46fc..d4107feb 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -34,7 +34,7 @@ jobs: - name: Stop raccoon run: docker compose down - name: Merge coverage data - run: go tool covdata textfmt -i=raccoon-coverage -pkg "$(go list ./... | grep -v proto)" -o coverage.out + run: go tool covdata textfmt -i=raccoon-coverage -pkg "$(go list ./... | grep -v proto | paste -sd ',')" -o coverage.out - name: Upload coverage data to coveralls uses: shogo82148/actions-goveralls@v1 with: From d459c900737c437dc099bdfce47d0dcdfd75915e Mon Sep 17 00:00:00 2001 From: turtleDev Date: Sat, 7 Sep 2024 14:22:44 +0530 Subject: [PATCH 58/66] publish: kinesis: add tests for stream's existence check --- publisher/kinesis/kinesis.go | 8 +- publisher/kinesis/kinesis_integration_test.go | 293 +++++++++++++++++ publisher/kinesis/kinesis_test.go | 307 ++---------------- publisher/kinesis/mock.go | 27 ++ 4 files changed, 358 insertions(+), 277 deletions(-) create mode 100644 publisher/kinesis/kinesis_integration_test.go create mode 100644 publisher/kinesis/mock.go diff --git a/publisher/kinesis/kinesis.go b/publisher/kinesis/kinesis.go index 7ff1cbfc..dc561899 100644 --- a/publisher/kinesis/kinesis.go +++ b/publisher/kinesis/kinesis.go @@ -20,8 +20,14 @@ import ( var globalCtx = context.Background() +type KinesisClient interface { + PutRecord(context.Context, *kinesis.PutRecordInput, ...func(*kinesis.Options)) (*kinesis.PutRecordOutput, error) + DescribeStreamSummary(context.Context, *kinesis.DescribeStreamSummaryInput, ...func(*kinesis.Options)) (*kinesis.DescribeStreamSummaryOutput, error) + CreateStream(context.Context, *kinesis.CreateStreamInput, ...func(*kinesis.Options)) (*kinesis.CreateStreamOutput, error) +} + type Publisher struct { - client *kinesis.Client + client KinesisClient streamLock sync.RWMutex streams map[string]bool diff --git a/publisher/kinesis/kinesis_integration_test.go b/publisher/kinesis/kinesis_integration_test.go new file mode 100644 index 00000000..2a9b98db --- /dev/null +++ b/publisher/kinesis/kinesis_integration_test.go @@ -0,0 +1,293 @@ +package kinesis_test + +import ( + "context" + "errors" + "fmt" + "io" + "os" + "strings" + "testing" + "time" + + "github.com/aws/aws-sdk-go-v2/aws" + "github.com/aws/aws-sdk-go-v2/config" + kinesis_sdk "github.com/aws/aws-sdk-go-v2/service/kinesis" + "github.com/aws/aws-sdk-go-v2/service/kinesis/types" + "github.com/raystack/raccoon/logger" + pb "github.com/raystack/raccoon/proto" + "github.com/raystack/raccoon/publisher/kinesis" + "github.com/stretchr/testify/require" +) + +const ( + envLocalstackHost = "LOCALSTACK_HOST" +) + +type localstackProvider struct{} + +func (p *localstackProvider) Retrieve(ctx context.Context) (aws.Credentials, error) { + return aws.Credentials{ + AccessKeyID: "test", + SecretAccessKey: "test", + }, nil +} + +func withLocalStack(host string) func(o *kinesis_sdk.Options) { + return func(o *kinesis_sdk.Options) { + o.BaseEndpoint = aws.String(host) + o.Credentials = &localstackProvider{} + } +} + +var ( + testEvent = &pb.Event{ + EventBytes: []byte("EVENT"), + Type: "click", + } +) + +func createStream(client *kinesis_sdk.Client, name string) (string, error) { + _, err := client.CreateStream( + context.Background(), + &kinesis_sdk.CreateStreamInput{ + StreamName: aws.String(name), + StreamModeDetails: &types.StreamModeDetails{ + StreamMode: types.StreamModeOnDemand, + }, + ShardCount: aws.Int32(1), + }, + ) + if err != nil { + return "", err + } + retries := 5 + for range retries { + stream, err := client.DescribeStreamSummary( + context.Background(), + &kinesis_sdk.DescribeStreamSummaryInput{ + StreamName: aws.String(name), + }, + ) + if err != nil { + return "", err + } + if stream.StreamDescriptionSummary.StreamStatus == types.StreamStatusActive { + return *stream.StreamDescriptionSummary.StreamARN, nil + } + time.Sleep(time.Second / 2) + } + return "", fmt.Errorf("timed out waiting for stream to get ready") +} + +func deleteStream(client *kinesis_sdk.Client, name string) error { + _, err := client.DeleteStream(context.Background(), &kinesis_sdk.DeleteStreamInput{ + StreamName: aws.String(name), + }) + if err != nil { + return err + } + + var errNotFound *types.ResourceNotFoundException + for !errors.As(err, &errNotFound) { + _, err = client.DescribeStreamSummary( + context.Background(), + &kinesis_sdk.DescribeStreamSummaryInput{ + StreamName: aws.String(name), + }, + ) + time.Sleep(time.Second / 2) + } + + return nil +} + +func getStreamMode(client *kinesis_sdk.Client, name string) (types.StreamMode, error) { + stream, err := client.DescribeStreamSummary( + context.Background(), + &kinesis_sdk.DescribeStreamSummaryInput{ + StreamName: aws.String(name), + }, + ) + if err != nil { + return "", err + } + return stream.StreamDescriptionSummary.StreamModeDetails.StreamMode, nil +} + +func readStream(client *kinesis_sdk.Client, arn string) ([][]byte, error) { + stream, err := client.DescribeStream( + context.Background(), + &kinesis_sdk.DescribeStreamInput{ + StreamARN: aws.String(arn), + }, + ) + if err != nil { + return nil, err + } + if len(stream.StreamDescription.Shards) == 0 { + return nil, fmt.Errorf("stream %q has no shards", arn) + } + iter, err := client.GetShardIterator( + context.Background(), + &kinesis_sdk.GetShardIteratorInput{ + ShardId: stream.StreamDescription.Shards[0].ShardId, + StreamARN: aws.String(arn), + ShardIteratorType: types.ShardIteratorTypeTrimHorizon, + }, + ) + if err != nil { + return nil, err + } + res, err := client.GetRecords( + context.Background(), + &kinesis_sdk.GetRecordsInput{ + StreamARN: aws.String(arn), + ShardIterator: iter.ShardIterator, + }, + ) + if err != nil { + return nil, err + } + if len(res.Records) == 0 { + return nil, fmt.Errorf("got empty response") + } + rv := [][]byte{} + for _, record := range res.Records { + rv = append(rv, record.Data) + } + return rv, nil +} + +func TestKinesisProducer(t *testing.T) { + localstackHost := os.Getenv(envLocalstackHost) + if strings.TrimSpace(localstackHost) == "" { + t.Errorf("cannot run tests because %s env variable is not set", envLocalstackHost) + return + } + cfg, err := config.LoadDefaultConfig(context.Background()) + require.NoError(t, err, "error loading aws config") + + client := kinesis_sdk.NewFromConfig(cfg, withLocalStack(localstackHost)) + + t.Run("should return an error if stream doesn't exist", func(t *testing.T) { + pub, err := kinesis.New(client) + require.NoError(t, err) + err = pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") + require.Error(t, err) + }) + + t.Run("should return an error if an invalid stream mode is specified", func(t *testing.T) { + _, err := kinesis.New( + client, + kinesis.WithStreamMode("INVALID"), + ) + require.Error(t, err) + }) + + t.Run("should publish message to kinesis", func(t *testing.T) { + streamARN, err := createStream(client, testEvent.Type) + require.NoError(t, err) + defer deleteStream(client, testEvent.Type) + + pub, err := kinesis.New(client) + require.NoError(t, err) + pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") + require.NoError(t, err) + events, err := readStream(client, streamARN) + require.NoError(t, err) + require.Len(t, events, 1) + require.Equal(t, events[0], testEvent.EventBytes) + }) + t.Run("stream auto creation", func(t *testing.T) { + t.Run("should create the stream if it doesn't exist and autocreate is set to true", func(t *testing.T) { + pub, err := kinesis.New(client, kinesis.WithStreamAutocreate(true)) + require.NoError(t, err) + + err = pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") + require.NoError(t, err) + deleteStream(client, testEvent.Type) + }) + t.Run("should create the stream with mode = ON_DEMAND (default)", func(t *testing.T) { + pub, err := kinesis.New(client, kinesis.WithStreamAutocreate(true)) + require.NoError(t, err) + err = pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") + require.NoError(t, err) + defer deleteStream(client, testEvent.Type) + + mode, err := getStreamMode(client, testEvent.Type) + require.NoError(t, err) + require.Equal(t, mode, types.StreamModeOnDemand) + }) + t.Run("should create the stream with mode = PROVISIONED", func(t *testing.T) { + pub, err := kinesis.New( + client, + kinesis.WithStreamAutocreate(true), + kinesis.WithStreamMode(types.StreamModeProvisioned), + ) + require.NoError(t, err) + err = pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") + require.NoError(t, err) + defer deleteStream(client, testEvent.Type) + + mode, err := getStreamMode(client, testEvent.Type) + require.NoError(t, err) + require.Equal(t, mode, types.StreamModeProvisioned) + }) + t.Run("should create stream with specified number of shards", func(t *testing.T) { + shards := 5 + pub, err := kinesis.New( + client, + kinesis.WithStreamAutocreate(true), + kinesis.WithShards(uint32(shards)), + ) + require.NoError(t, err) + + err = pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") + require.NoError(t, err) + defer deleteStream(client, testEvent.Type) + + stream, err := client.DescribeStream( + context.Background(), + &kinesis_sdk.DescribeStreamInput{ + StreamName: aws.String(testEvent.Type), + }, + ) + require.NoError(t, err) + require.Equal(t, shards, len(stream.StreamDescription.Shards)) + }) + }) + + t.Run("should publish message according to the stream pattern", func(t *testing.T) { + streamPattern := "pre-%s-post" + destinationStream := "pre-click-post" + _, err := createStream(client, destinationStream) + require.NoError(t, err) + defer deleteStream(client, destinationStream) + pub, err := kinesis.New( + client, + kinesis.WithStreamPattern(streamPattern), + ) + require.NoError(t, err) + err = pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") + require.NoError(t, err) + }) + t.Run("should publish messages to static stream names", func(t *testing.T) { + destinationStream := "static" + _, err := createStream(client, destinationStream) + require.NoError(t, err) + defer deleteStream(client, destinationStream) + pub, err := kinesis.New( + client, + kinesis.WithStreamPattern(destinationStream), + ) + require.NoError(t, err) + err = pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") + require.NoError(t, err) + }) +} + +func TestMain(m *testing.M) { + logger.SetOutput(io.Discard) + os.Exit(m.Run()) +} diff --git a/publisher/kinesis/kinesis_test.go b/publisher/kinesis/kinesis_test.go index 2a9b98db..a6649dca 100644 --- a/publisher/kinesis/kinesis_test.go +++ b/publisher/kinesis/kinesis_test.go @@ -1,293 +1,48 @@ -package kinesis_test +package kinesis import ( - "context" - "errors" "fmt" - "io" - "os" - "strings" "testing" - "time" "github.com/aws/aws-sdk-go-v2/aws" - "github.com/aws/aws-sdk-go-v2/config" - kinesis_sdk "github.com/aws/aws-sdk-go-v2/service/kinesis" - "github.com/aws/aws-sdk-go-v2/service/kinesis/types" - "github.com/raystack/raccoon/logger" + "github.com/aws/aws-sdk-go-v2/service/kinesis" pb "github.com/raystack/raccoon/proto" - "github.com/raystack/raccoon/publisher/kinesis" - "github.com/stretchr/testify/require" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" ) -const ( - envLocalstackHost = "LOCALSTACK_HOST" -) - -type localstackProvider struct{} - -func (p *localstackProvider) Retrieve(ctx context.Context) (aws.Credentials, error) { - return aws.Credentials{ - AccessKeyID: "test", - SecretAccessKey: "test", - }, nil -} - -func withLocalStack(host string) func(o *kinesis_sdk.Options) { - return func(o *kinesis_sdk.Options) { - o.BaseEndpoint = aws.String(host) - o.Credentials = &localstackProvider{} - } -} - -var ( - testEvent = &pb.Event{ - EventBytes: []byte("EVENT"), - Type: "click", - } -) - -func createStream(client *kinesis_sdk.Client, name string) (string, error) { - _, err := client.CreateStream( - context.Background(), - &kinesis_sdk.CreateStreamInput{ - StreamName: aws.String(name), - StreamModeDetails: &types.StreamModeDetails{ - StreamMode: types.StreamModeOnDemand, +func TestKinesisProducer_UnitTest(t *testing.T) { + t.Run("should return an error if stream creation fails", func(t *testing.T) { + events := []*pb.Event{ + { + Type: "unknown", }, - ShardCount: aws.Int32(1), - }, - ) - if err != nil { - return "", err - } - retries := 5 - for range retries { - stream, err := client.DescribeStreamSummary( - context.Background(), - &kinesis_sdk.DescribeStreamSummaryInput{ - StreamName: aws.String(name), - }, - ) - if err != nil { - return "", err } - if stream.StreamDescriptionSummary.StreamStatus == types.StreamStatusActive { - return *stream.StreamDescriptionSummary.StreamARN, nil - } - time.Sleep(time.Second / 2) - } - return "", fmt.Errorf("timed out waiting for stream to get ready") -} - -func deleteStream(client *kinesis_sdk.Client, name string) error { - _, err := client.DeleteStream(context.Background(), &kinesis_sdk.DeleteStreamInput{ - StreamName: aws.String(name), - }) - if err != nil { - return err - } + client := &mockKinesisClient{} - var errNotFound *types.ResourceNotFoundException - for !errors.As(err, &errNotFound) { - _, err = client.DescribeStreamSummary( - context.Background(), - &kinesis_sdk.DescribeStreamSummaryInput{ - StreamName: aws.String(name), + client.On( + "DescribeStreamSummary", + mock.Anything, + &kinesis.DescribeStreamSummaryInput{ + StreamName: aws.String("unknown"), }, + mock.Anything, + ).Return( + &kinesis.DescribeStreamSummaryOutput{}, + fmt.Errorf("simulated error"), + ).Once() + + p, err := New( + nil, // we will override it later + WithStreamAutocreate(true), ) - time.Sleep(time.Second / 2) - } - - return nil -} - -func getStreamMode(client *kinesis_sdk.Client, name string) (types.StreamMode, error) { - stream, err := client.DescribeStreamSummary( - context.Background(), - &kinesis_sdk.DescribeStreamSummaryInput{ - StreamName: aws.String(name), - }, - ) - if err != nil { - return "", err - } - return stream.StreamDescriptionSummary.StreamModeDetails.StreamMode, nil -} - -func readStream(client *kinesis_sdk.Client, arn string) ([][]byte, error) { - stream, err := client.DescribeStream( - context.Background(), - &kinesis_sdk.DescribeStreamInput{ - StreamARN: aws.String(arn), - }, - ) - if err != nil { - return nil, err - } - if len(stream.StreamDescription.Shards) == 0 { - return nil, fmt.Errorf("stream %q has no shards", arn) - } - iter, err := client.GetShardIterator( - context.Background(), - &kinesis_sdk.GetShardIteratorInput{ - ShardId: stream.StreamDescription.Shards[0].ShardId, - StreamARN: aws.String(arn), - ShardIteratorType: types.ShardIteratorTypeTrimHorizon, - }, - ) - if err != nil { - return nil, err - } - res, err := client.GetRecords( - context.Background(), - &kinesis_sdk.GetRecordsInput{ - StreamARN: aws.String(arn), - ShardIterator: iter.ShardIterator, - }, - ) - if err != nil { - return nil, err - } - if len(res.Records) == 0 { - return nil, fmt.Errorf("got empty response") - } - rv := [][]byte{} - for _, record := range res.Records { - rv = append(rv, record.Data) - } - return rv, nil -} - -func TestKinesisProducer(t *testing.T) { - localstackHost := os.Getenv(envLocalstackHost) - if strings.TrimSpace(localstackHost) == "" { - t.Errorf("cannot run tests because %s env variable is not set", envLocalstackHost) - return - } - cfg, err := config.LoadDefaultConfig(context.Background()) - require.NoError(t, err, "error loading aws config") - - client := kinesis_sdk.NewFromConfig(cfg, withLocalStack(localstackHost)) - - t.Run("should return an error if stream doesn't exist", func(t *testing.T) { - pub, err := kinesis.New(client) - require.NoError(t, err) - err = pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") - require.Error(t, err) - }) - - t.Run("should return an error if an invalid stream mode is specified", func(t *testing.T) { - _, err := kinesis.New( - client, - kinesis.WithStreamMode("INVALID"), - ) - require.Error(t, err) - }) - - t.Run("should publish message to kinesis", func(t *testing.T) { - streamARN, err := createStream(client, testEvent.Type) - require.NoError(t, err) - defer deleteStream(client, testEvent.Type) - - pub, err := kinesis.New(client) - require.NoError(t, err) - pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") - require.NoError(t, err) - events, err := readStream(client, streamARN) - require.NoError(t, err) - require.Len(t, events, 1) - require.Equal(t, events[0], testEvent.EventBytes) - }) - t.Run("stream auto creation", func(t *testing.T) { - t.Run("should create the stream if it doesn't exist and autocreate is set to true", func(t *testing.T) { - pub, err := kinesis.New(client, kinesis.WithStreamAutocreate(true)) - require.NoError(t, err) - - err = pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") - require.NoError(t, err) - deleteStream(client, testEvent.Type) - }) - t.Run("should create the stream with mode = ON_DEMAND (default)", func(t *testing.T) { - pub, err := kinesis.New(client, kinesis.WithStreamAutocreate(true)) - require.NoError(t, err) - err = pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") - require.NoError(t, err) - defer deleteStream(client, testEvent.Type) - - mode, err := getStreamMode(client, testEvent.Type) - require.NoError(t, err) - require.Equal(t, mode, types.StreamModeOnDemand) - }) - t.Run("should create the stream with mode = PROVISIONED", func(t *testing.T) { - pub, err := kinesis.New( - client, - kinesis.WithStreamAutocreate(true), - kinesis.WithStreamMode(types.StreamModeProvisioned), - ) - require.NoError(t, err) - err = pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") - require.NoError(t, err) - defer deleteStream(client, testEvent.Type) - - mode, err := getStreamMode(client, testEvent.Type) - require.NoError(t, err) - require.Equal(t, mode, types.StreamModeProvisioned) - }) - t.Run("should create stream with specified number of shards", func(t *testing.T) { - shards := 5 - pub, err := kinesis.New( - client, - kinesis.WithStreamAutocreate(true), - kinesis.WithShards(uint32(shards)), - ) - require.NoError(t, err) - - err = pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") - require.NoError(t, err) - defer deleteStream(client, testEvent.Type) + if err != nil { + t.Errorf("error constructing client: %v", err) + return + } + p.client = client - stream, err := client.DescribeStream( - context.Background(), - &kinesis_sdk.DescribeStreamInput{ - StreamName: aws.String(testEvent.Type), - }, - ) - require.NoError(t, err) - require.Equal(t, shards, len(stream.StreamDescription.Shards)) - }) + err = p.ProduceBulk(events, "") + assert.NotNil(t, err) }) - - t.Run("should publish message according to the stream pattern", func(t *testing.T) { - streamPattern := "pre-%s-post" - destinationStream := "pre-click-post" - _, err := createStream(client, destinationStream) - require.NoError(t, err) - defer deleteStream(client, destinationStream) - pub, err := kinesis.New( - client, - kinesis.WithStreamPattern(streamPattern), - ) - require.NoError(t, err) - err = pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") - require.NoError(t, err) - }) - t.Run("should publish messages to static stream names", func(t *testing.T) { - destinationStream := "static" - _, err := createStream(client, destinationStream) - require.NoError(t, err) - defer deleteStream(client, destinationStream) - pub, err := kinesis.New( - client, - kinesis.WithStreamPattern(destinationStream), - ) - require.NoError(t, err) - err = pub.ProduceBulk([]*pb.Event{testEvent}, "conn_group") - require.NoError(t, err) - }) -} - -func TestMain(m *testing.M) { - logger.SetOutput(io.Discard) - os.Exit(m.Run()) } diff --git a/publisher/kinesis/mock.go b/publisher/kinesis/mock.go new file mode 100644 index 00000000..e1b6951f --- /dev/null +++ b/publisher/kinesis/mock.go @@ -0,0 +1,27 @@ +package kinesis + +import ( + "context" + + "github.com/aws/aws-sdk-go-v2/service/kinesis" + "github.com/stretchr/testify/mock" +) + +type mockKinesisClient struct { + mock.Mock +} + +func (cli *mockKinesisClient) PutRecord(ctx context.Context, in *kinesis.PutRecordInput, opts ...func(*kinesis.Options)) (*kinesis.PutRecordOutput, error) { + args := cli.Called(ctx, in, opts) + return args.Get(0).(*kinesis.PutRecordOutput), args.Error(1) +} + +func (cli *mockKinesisClient) DescribeStreamSummary(ctx context.Context, in *kinesis.DescribeStreamSummaryInput, opts ...func(*kinesis.Options)) (*kinesis.DescribeStreamSummaryOutput, error) { + args := cli.Called(ctx, in, opts) + return args.Get(0).(*kinesis.DescribeStreamSummaryOutput), args.Error(1) +} + +func (cli *mockKinesisClient) CreateStream(ctx context.Context, in *kinesis.CreateStreamInput, opts ...func(*kinesis.Options)) (*kinesis.CreateStreamOutput, error) { + args := cli.Called(ctx, in, opts) + return args.Get(0).(*kinesis.CreateStreamOutput), args.Error(1) +} From 970bdccec7317e7bbf16be57c967307525c3b4b2 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Sat, 7 Sep 2024 14:32:40 +0530 Subject: [PATCH 59/66] misc: improve test case description --- publisher/kinesis/kinesis_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/publisher/kinesis/kinesis_test.go b/publisher/kinesis/kinesis_test.go index a6649dca..0b6ca064 100644 --- a/publisher/kinesis/kinesis_test.go +++ b/publisher/kinesis/kinesis_test.go @@ -12,7 +12,7 @@ import ( ) func TestKinesisProducer_UnitTest(t *testing.T) { - t.Run("should return an error if stream creation fails", func(t *testing.T) { + t.Run("should return an error if stream existence check fails", func(t *testing.T) { events := []*pb.Event{ { Type: "unknown", From f78453a0544dc668449fcfc45066048359b466c4 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Sat, 7 Sep 2024 15:02:57 +0530 Subject: [PATCH 60/66] publisher: kinesis: add tests for hitting rate limits and quotas --- publisher/kinesis/kinesis_test.go | 72 ++++++++++++++++++++++++++++--- 1 file changed, 67 insertions(+), 5 deletions(-) diff --git a/publisher/kinesis/kinesis_test.go b/publisher/kinesis/kinesis_test.go index 0b6ca064..8460ed5a 100644 --- a/publisher/kinesis/kinesis_test.go +++ b/publisher/kinesis/kinesis_test.go @@ -6,18 +6,48 @@ import ( "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/service/kinesis" + "github.com/aws/aws-sdk-go-v2/service/kinesis/types" pb "github.com/raystack/raccoon/proto" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" ) func TestKinesisProducer_UnitTest(t *testing.T) { + events := []*pb.Event{ + { + Type: "unknown", + }, + } t.Run("should return an error if stream existence check fails", func(t *testing.T) { - events := []*pb.Event{ - { - Type: "unknown", + client := &mockKinesisClient{} + + client.On( + "DescribeStreamSummary", + mock.Anything, + &kinesis.DescribeStreamSummaryInput{ + StreamName: aws.String("unknown"), }, + mock.Anything, + ).Return( + &kinesis.DescribeStreamSummaryOutput{}, + fmt.Errorf("simulated error"), + ).Once() + defer client.AssertExpectations(t) + + p, err := New( + nil, // we will override it later + WithStreamAutocreate(true), + ) + if err != nil { + t.Errorf("error constructing client: %v", err) + return } + p.client = client + + err = p.ProduceBulk(events, "") + assert.Error(t, err, "error when sending message: simulated error") + }) + t.Run("should return an error if stream creation exceeds resource limit", func(t *testing.T) { client := &mockKinesisClient{} client.On( @@ -29,9 +59,18 @@ func TestKinesisProducer_UnitTest(t *testing.T) { mock.Anything, ).Return( &kinesis.DescribeStreamSummaryOutput{}, - fmt.Errorf("simulated error"), + &types.ResourceNotFoundException{}, ).Once() + client.On("CreateStream", mock.Anything, mock.Anything, mock.Anything). + Return( + &kinesis.CreateStreamOutput{}, + &types.LimitExceededException{ + Message: aws.String("stream limit reached"), + }, + ).Once() + defer client.AssertExpectations(t) + p, err := New( nil, // we will override it later WithStreamAutocreate(true), @@ -43,6 +82,29 @@ func TestKinesisProducer_UnitTest(t *testing.T) { p.client = client err = p.ProduceBulk(events, "") - assert.NotNil(t, err) + assert.Error(t, err, "error when sending messages: LimitExceededException: stream limit reached") + }) + t.Run("should return an error if rate limit is exceeded", func(t *testing.T) { + + client := &mockKinesisClient{} + + client.On("PutRecord", mock.Anything, mock.Anything, mock.Anything). + Return( + &kinesis.PutRecordOutput{}, + &types.ProvisionedThroughputExceededException{ + Message: aws.String("put limit exceeded"), + }, + ).Once() + defer client.AssertExpectations(t) + + p, err := New(nil) + if err != nil { + t.Errorf("error constructing client: %v", err) + return + } + p.client = client + + err = p.ProduceBulk(events, "") + assert.Error(t, err, "error when sending messages: ProvisionedThroughputExceededException: put limit exceeded") }) } From e6fa9a16b295a0d902a4909fdf48d65f69af181f Mon Sep 17 00:00:00 2001 From: turtleDev Date: Mon, 9 Sep 2024 17:18:38 +0530 Subject: [PATCH 61/66] publisher: kinesis: refactor tests --- publisher/kinesis/kinesis.go | 7 +- publisher/kinesis/kinesis_test.go | 161 +++++++++++++++--------------- 2 files changed, 83 insertions(+), 85 deletions(-) diff --git a/publisher/kinesis/kinesis.go b/publisher/kinesis/kinesis.go index dc561899..68f84139 100644 --- a/publisher/kinesis/kinesis.go +++ b/publisher/kinesis/kinesis.go @@ -20,14 +20,15 @@ import ( var globalCtx = context.Background() -type KinesisClient interface { +// Client is an interface to *kinesis.Client +type Client interface { PutRecord(context.Context, *kinesis.PutRecordInput, ...func(*kinesis.Options)) (*kinesis.PutRecordOutput, error) DescribeStreamSummary(context.Context, *kinesis.DescribeStreamSummaryInput, ...func(*kinesis.Options)) (*kinesis.DescribeStreamSummaryOutput, error) CreateStream(context.Context, *kinesis.CreateStreamInput, ...func(*kinesis.Options)) (*kinesis.CreateStreamOutput, error) } type Publisher struct { - client KinesisClient + client Client streamLock sync.RWMutex streams map[string]bool @@ -215,7 +216,7 @@ func WithStreamProbleInterval(interval time.Duration) Opt { } } -func New(client *kinesis.Client, opts ...Opt) (*Publisher, error) { +func New(client Client, opts ...Opt) (*Publisher, error) { p := &Publisher{ client: client, streamPattern: "%s", diff --git a/publisher/kinesis/kinesis_test.go b/publisher/kinesis/kinesis_test.go index 8460ed5a..3398c1b0 100644 --- a/publisher/kinesis/kinesis_test.go +++ b/publisher/kinesis/kinesis_test.go @@ -18,93 +18,90 @@ func TestKinesisProducer_UnitTest(t *testing.T) { Type: "unknown", }, } - t.Run("should return an error if stream existence check fails", func(t *testing.T) { - client := &mockKinesisClient{} - client.On( - "DescribeStreamSummary", - mock.Anything, - &kinesis.DescribeStreamSummaryInput{ - StreamName: aws.String("unknown"), + testCases := []struct { + Desc string + Init func(*mockKinesisClient) + Opts []Opt + ExpectedErr string + }{ + { + Desc: "should return an error if stream existence check fails", + Init: func(client *mockKinesisClient) { + client.On( + "DescribeStreamSummary", + mock.Anything, + &kinesis.DescribeStreamSummaryInput{ + StreamName: aws.String("unknown"), + }, + mock.Anything, + ).Return( + &kinesis.DescribeStreamSummaryOutput{}, + fmt.Errorf("simulated error"), + ).Once() }, - mock.Anything, - ).Return( - &kinesis.DescribeStreamSummaryOutput{}, - fmt.Errorf("simulated error"), - ).Once() - defer client.AssertExpectations(t) - - p, err := New( - nil, // we will override it later - WithStreamAutocreate(true), - ) - if err != nil { - t.Errorf("error constructing client: %v", err) - return - } - p.client = client - - err = p.ProduceBulk(events, "") - assert.Error(t, err, "error when sending message: simulated error") - }) - t.Run("should return an error if stream creation exceeds resource limit", func(t *testing.T) { - client := &mockKinesisClient{} - - client.On( - "DescribeStreamSummary", - mock.Anything, - &kinesis.DescribeStreamSummaryInput{ - StreamName: aws.String("unknown"), + Opts: []Opt{ + WithStreamAutocreate(true), }, - mock.Anything, - ).Return( - &kinesis.DescribeStreamSummaryOutput{}, - &types.ResourceNotFoundException{}, - ).Once() - - client.On("CreateStream", mock.Anything, mock.Anything, mock.Anything). - Return( - &kinesis.CreateStreamOutput{}, - &types.LimitExceededException{ - Message: aws.String("stream limit reached"), - }, - ).Once() - defer client.AssertExpectations(t) - - p, err := New( - nil, // we will override it later - WithStreamAutocreate(true), - ) - if err != nil { - t.Errorf("error constructing client: %v", err) - return - } - p.client = client - - err = p.ProduceBulk(events, "") - assert.Error(t, err, "error when sending messages: LimitExceededException: stream limit reached") - }) - t.Run("should return an error if rate limit is exceeded", func(t *testing.T) { + ExpectedErr: "error when sending message: simulated error", + }, + { + Desc: "should return an error if stream creation exceeds resource limit", + Init: func(client *mockKinesisClient) { + client.On( + "DescribeStreamSummary", + mock.Anything, + &kinesis.DescribeStreamSummaryInput{ + StreamName: aws.String("unknown"), + }, + mock.Anything, + ).Return( + &kinesis.DescribeStreamSummaryOutput{}, + &types.ResourceNotFoundException{}, + ).Once() - client := &mockKinesisClient{} + client.On("CreateStream", mock.Anything, mock.Anything, mock.Anything). + Return( + &kinesis.CreateStreamOutput{}, + &types.LimitExceededException{ + Message: aws.String("stream limit reached"), + }, + ).Once() + }, + Opts: []Opt{ + WithStreamAutocreate(true), + }, + ExpectedErr: "error when sending messages: LimitExceededException: stream limit reached", + }, + { + Desc: "should return an error if rate limit is exceeded", + Init: func(client *mockKinesisClient) { + client.On("PutRecord", mock.Anything, mock.Anything, mock.Anything). + Return( + &kinesis.PutRecordOutput{}, + &types.ProvisionedThroughputExceededException{ + Message: aws.String("put limit exceeded"), + }, + ).Once() + }, + ExpectedErr: "error when sending messages: ProvisionedThroughputExceededException: put limit exceeded", + }, + } + for _, testCase := range testCases { + t.Run(testCase.Desc, func(t *testing.T) { + client := &mockKinesisClient{} + testCase.Init(client) + defer client.AssertExpectations(t) - client.On("PutRecord", mock.Anything, mock.Anything, mock.Anything). - Return( - &kinesis.PutRecordOutput{}, - &types.ProvisionedThroughputExceededException{ - Message: aws.String("put limit exceeded"), - }, - ).Once() - defer client.AssertExpectations(t) + p, err := New(client, testCase.Opts...) + if err != nil { + t.Errorf("error constructing client: %v", err) + return + } - p, err := New(nil) - if err != nil { - t.Errorf("error constructing client: %v", err) - return - } - p.client = client + err = p.ProduceBulk(events, "") + assert.Error(t, err, testCase.ExpectedErr) + }) - err = p.ProduceBulk(events, "") - assert.Error(t, err, "error when sending messages: ProvisionedThroughputExceededException: put limit exceeded") - }) + } } From c6a037a4c5ebf3fd7d48e16f37c8b6483321b9e9 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Mon, 9 Sep 2024 20:59:51 +0530 Subject: [PATCH 62/66] hygiene: reorganise package structure --- app/proc.go | 2 +- app/server.go | 10 +++++----- cmd/server.go | 6 +++--- {collector => core/collector}/collector.go | 2 +- {collector => core/collector}/mock.go | 0 {collector => core/collector}/service.go | 2 +- {collector => core/collector}/service_test.go | 2 +- .../deserialization}/deserializer.go | 0 {deserialization => core/deserialization}/json.go | 0 .../deserialization}/json_test.go | 0 {deserialization => core/deserialization}/proto.go | 0 .../deserialization}/proto_test.go | 0 .../identification}/identifier.go | 0 {serialization => core/serialization}/json.go | 0 {serialization => core/serialization}/json_test.go | 0 {serialization => core/serialization}/proto.go | 0 {serialization => core/serialization}/proto_test.go | 2 +- {serialization => core/serialization}/serializer.go | 0 {worker => core/worker}/init_test.go | 2 +- {worker => core/worker}/mocks.go | 0 {worker => core/worker}/worker.go | 8 ++++---- {worker => core/worker}/worker_test.go | 8 ++++---- dashboards/grafana.json => grafana.json | 0 {clock => pkg/clock}/clock.go | 0 {clock => pkg/clock}/mock.go | 0 {logger => pkg/logger}/logger.go | 0 {metrics => pkg/metrics}/metric_test.go | 0 {metrics => pkg/metrics}/metrics.go | 2 +- {metrics => pkg/metrics}/mock.go | 0 {metrics => pkg/metrics}/prometheus.go | 2 +- {metrics => pkg/metrics}/prometheus_test.go | 0 {metrics => pkg/metrics}/statsd.go | 2 +- {middleware => pkg/middleware}/cors.go | 0 {middleware => pkg/middleware}/util.go | 0 publisher/kafka/kafka.go | 4 ++-- publisher/kafka/kafka_test.go | 2 +- publisher/kinesis/kinesis.go | 2 +- publisher/kinesis/kinesis_integration_test.go | 2 +- publisher/log/log.go | 2 +- publisher/pubsub/pubsub.go | 2 +- publisher/pubsub/pubsub_test.go | 2 +- {services => server}/grpc/handler.go | 8 ++++---- {services => server}/grpc/handler_test.go | 6 +++--- {services => server}/grpc/service.go | 2 +- {services => server}/pprof/service.go | 0 {services => server}/rest/handler.go | 12 ++++++------ {services => server}/rest/handler_test.go | 4 ++-- {services => server}/rest/response.go | 2 +- {services => server}/rest/response_test.go | 2 +- {services => server}/rest/service.go | 10 +++++----- {services => server}/rest/websocket/ack.go | 6 +++--- .../rest/websocket/connection/conn.go | 6 +++--- .../rest/websocket/connection/table.go | 2 +- .../rest/websocket/connection/table_test.go | 2 +- .../rest/websocket/connection/upgrader.go | 6 +++--- .../rest/websocket/connection/upgrader_test.go | 4 ++-- {services => server}/rest/websocket/handler.go | 12 ++++++------ {services => server}/rest/websocket/handler_test.go | 8 ++++---- {services => server}/rest/websocket/pinger.go | 8 ++++---- {services => server}/services.go | 12 ++++++------ .../integration}/integration_test.go | 0 61 files changed, 89 insertions(+), 89 deletions(-) rename {collector => core/collector}/collector.go (88%) rename {collector => core/collector}/mock.go (100%) rename {collector => core/collector}/service.go (90%) rename {collector => core/collector}/service_test.go (96%) rename {deserialization => core/deserialization}/deserializer.go (100%) rename {deserialization => core/deserialization}/json.go (100%) rename {deserialization => core/deserialization}/json_test.go (100%) rename {deserialization => core/deserialization}/proto.go (100%) rename {deserialization => core/deserialization}/proto_test.go (100%) rename {identification => core/identification}/identifier.go (100%) rename {serialization => core/serialization}/json.go (100%) rename {serialization => core/serialization}/json_test.go (100%) rename {serialization => core/serialization}/proto.go (100%) rename {serialization => core/serialization}/proto_test.go (91%) rename {serialization => core/serialization}/serializer.go (100%) rename {worker => core/worker}/init_test.go (75%) rename {worker => core/worker}/mocks.go (100%) rename {worker => core/worker}/worker.go (95%) rename {worker => core/worker}/worker_test.go (96%) rename dashboards/grafana.json => grafana.json (100%) rename {clock => pkg/clock}/clock.go (100%) rename {clock => pkg/clock}/mock.go (100%) rename {logger => pkg/logger}/logger.go (100%) rename {metrics => pkg/metrics}/metric_test.go (100%) rename {metrics => pkg/metrics}/metrics.go (98%) rename {metrics => pkg/metrics}/mock.go (100%) rename {metrics => pkg/metrics}/prometheus.go (99%) rename {metrics => pkg/metrics}/prometheus_test.go (100%) rename {metrics => pkg/metrics}/statsd.go (97%) rename {middleware => pkg/middleware}/cors.go (100%) rename {middleware => pkg/middleware}/util.go (100%) rename {services => server}/grpc/handler.go (93%) rename {services => server}/grpc/handler_test.go (96%) rename {services => server}/grpc/service.go (94%) rename {services => server}/pprof/service.go (100%) rename {services => server}/rest/handler.go (95%) rename {services => server}/rest/handler_test.go (98%) rename {services => server}/rest/response.go (93%) rename {services => server}/rest/response_test.go (99%) rename {services => server}/rest/service.go (89%) rename {services => server}/rest/websocket/ack.go (84%) rename {services => server}/rest/websocket/connection/conn.go (89%) rename {services => server}/rest/websocket/connection/table.go (97%) rename {services => server}/rest/websocket/connection/table_test.go (99%) rename {services => server}/rest/websocket/connection/upgrader.go (97%) rename {services => server}/rest/websocket/connection/upgrader_test.go (98%) rename {services => server}/rest/websocket/handler.go (95%) rename {services => server}/rest/websocket/handler_test.go (96%) rename {services => server}/rest/websocket/pinger.go (81%) rename {services => server}/services.go (81%) rename {integration => test/integration}/integration_test.go (100%) diff --git a/app/proc.go b/app/proc.go index 9427df1d..e34724b4 100644 --- a/app/proc.go +++ b/app/proc.go @@ -4,7 +4,7 @@ import ( "context" "github.com/raystack/raccoon/config" - "github.com/raystack/raccoon/logger" + "github.com/raystack/raccoon/pkg/logger" ) // Run the server diff --git a/app/server.go b/app/server.go index a2f33f7b..54d188bb 100644 --- a/app/server.go +++ b/app/server.go @@ -10,17 +10,17 @@ import ( "syscall" "time" - "github.com/raystack/raccoon/collector" "github.com/raystack/raccoon/config" - "github.com/raystack/raccoon/logger" - "github.com/raystack/raccoon/metrics" + "github.com/raystack/raccoon/core/collector" + "github.com/raystack/raccoon/core/worker" + "github.com/raystack/raccoon/pkg/logger" + "github.com/raystack/raccoon/pkg/metrics" "github.com/raystack/raccoon/publisher" "github.com/raystack/raccoon/publisher/kafka" "github.com/raystack/raccoon/publisher/kinesis" logpub "github.com/raystack/raccoon/publisher/log" "github.com/raystack/raccoon/publisher/pubsub" - "github.com/raystack/raccoon/services" - "github.com/raystack/raccoon/worker" + services "github.com/raystack/raccoon/server" pubsubsdk "cloud.google.com/go/pubsub" awsconfig "github.com/aws/aws-sdk-go-v2/config" diff --git a/cmd/server.go b/cmd/server.go index 6d6e6456..c64eab69 100644 --- a/cmd/server.go +++ b/cmd/server.go @@ -7,9 +7,9 @@ import ( "github.com/raystack/raccoon/app" "github.com/raystack/raccoon/config" - "github.com/raystack/raccoon/logger" - "github.com/raystack/raccoon/metrics" - "github.com/raystack/raccoon/middleware" + "github.com/raystack/raccoon/pkg/logger" + "github.com/raystack/raccoon/pkg/metrics" + "github.com/raystack/raccoon/pkg/middleware" "github.com/spf13/cobra" "github.com/spf13/pflag" ) diff --git a/collector/collector.go b/core/collector/collector.go similarity index 88% rename from collector/collector.go rename to core/collector/collector.go index 041c3992..552e86b1 100644 --- a/collector/collector.go +++ b/core/collector/collector.go @@ -4,7 +4,7 @@ import ( "context" "time" - "github.com/raystack/raccoon/identification" + "github.com/raystack/raccoon/core/identification" pb "github.com/raystack/raccoon/proto" ) diff --git a/collector/mock.go b/core/collector/mock.go similarity index 100% rename from collector/mock.go rename to core/collector/mock.go diff --git a/collector/service.go b/core/collector/service.go similarity index 90% rename from collector/service.go rename to core/collector/service.go index 0562a3c9..8e0e5fba 100644 --- a/collector/service.go +++ b/core/collector/service.go @@ -3,7 +3,7 @@ package collector import ( "context" - "github.com/raystack/raccoon/clock" + "github.com/raystack/raccoon/pkg/clock" ) type ChannelCollector struct { diff --git a/collector/service_test.go b/core/collector/service_test.go similarity index 96% rename from collector/service_test.go rename to core/collector/service_test.go index f7e89991..7d783640 100644 --- a/collector/service_test.go +++ b/core/collector/service_test.go @@ -6,7 +6,7 @@ import ( "testing" "time" - "github.com/raystack/raccoon/clock" + "github.com/raystack/raccoon/pkg/clock" "github.com/stretchr/testify/assert" ) diff --git a/deserialization/deserializer.go b/core/deserialization/deserializer.go similarity index 100% rename from deserialization/deserializer.go rename to core/deserialization/deserializer.go diff --git a/deserialization/json.go b/core/deserialization/json.go similarity index 100% rename from deserialization/json.go rename to core/deserialization/json.go diff --git a/deserialization/json_test.go b/core/deserialization/json_test.go similarity index 100% rename from deserialization/json_test.go rename to core/deserialization/json_test.go diff --git a/deserialization/proto.go b/core/deserialization/proto.go similarity index 100% rename from deserialization/proto.go rename to core/deserialization/proto.go diff --git a/deserialization/proto_test.go b/core/deserialization/proto_test.go similarity index 100% rename from deserialization/proto_test.go rename to core/deserialization/proto_test.go diff --git a/identification/identifier.go b/core/identification/identifier.go similarity index 100% rename from identification/identifier.go rename to core/identification/identifier.go diff --git a/serialization/json.go b/core/serialization/json.go similarity index 100% rename from serialization/json.go rename to core/serialization/json.go diff --git a/serialization/json_test.go b/core/serialization/json_test.go similarity index 100% rename from serialization/json_test.go rename to core/serialization/json_test.go diff --git a/serialization/proto.go b/core/serialization/proto.go similarity index 100% rename from serialization/proto.go rename to core/serialization/proto.go diff --git a/serialization/proto_test.go b/core/serialization/proto_test.go similarity index 91% rename from serialization/proto_test.go rename to core/serialization/proto_test.go index a3522d3e..197fa632 100644 --- a/serialization/proto_test.go +++ b/core/serialization/proto_test.go @@ -3,8 +3,8 @@ package serialization_test import ( "testing" + "github.com/raystack/raccoon/core/serialization" pb "github.com/raystack/raccoon/proto" - "github.com/raystack/raccoon/serialization" "github.com/stretchr/testify/assert" ) diff --git a/serialization/serializer.go b/core/serialization/serializer.go similarity index 100% rename from serialization/serializer.go rename to core/serialization/serializer.go diff --git a/worker/init_test.go b/core/worker/init_test.go similarity index 75% rename from worker/init_test.go rename to core/worker/init_test.go index ed469cf5..8a0bc5cc 100644 --- a/worker/init_test.go +++ b/core/worker/init_test.go @@ -5,7 +5,7 @@ import ( "os" "testing" - "github.com/raystack/raccoon/logger" + "github.com/raystack/raccoon/pkg/logger" ) func TestMain(t *testing.M) { diff --git a/worker/mocks.go b/core/worker/mocks.go similarity index 100% rename from worker/mocks.go rename to core/worker/mocks.go diff --git a/worker/worker.go b/core/worker/worker.go similarity index 95% rename from worker/worker.go rename to core/worker/worker.go index 8df63283..643b34b4 100644 --- a/worker/worker.go +++ b/core/worker/worker.go @@ -5,10 +5,10 @@ import ( "sync" "time" - "github.com/raystack/raccoon/clock" - "github.com/raystack/raccoon/collector" - "github.com/raystack/raccoon/logger" - "github.com/raystack/raccoon/metrics" + "github.com/raystack/raccoon/core/collector" + "github.com/raystack/raccoon/pkg/clock" + "github.com/raystack/raccoon/pkg/logger" + "github.com/raystack/raccoon/pkg/metrics" pb "github.com/raystack/raccoon/proto" "github.com/raystack/raccoon/publisher" ) diff --git a/worker/worker_test.go b/core/worker/worker_test.go similarity index 96% rename from worker/worker_test.go rename to core/worker/worker_test.go index 7607b4e0..51892ce8 100644 --- a/worker/worker_test.go +++ b/core/worker/worker_test.go @@ -5,10 +5,10 @@ import ( "testing" "time" - "github.com/raystack/raccoon/clock" - "github.com/raystack/raccoon/collector" - "github.com/raystack/raccoon/identification" - "github.com/raystack/raccoon/metrics" + "github.com/raystack/raccoon/core/collector" + "github.com/raystack/raccoon/core/identification" + "github.com/raystack/raccoon/pkg/clock" + "github.com/raystack/raccoon/pkg/metrics" pb "github.com/raystack/raccoon/proto" "github.com/raystack/raccoon/publisher" "github.com/stretchr/testify/assert" diff --git a/dashboards/grafana.json b/grafana.json similarity index 100% rename from dashboards/grafana.json rename to grafana.json diff --git a/clock/clock.go b/pkg/clock/clock.go similarity index 100% rename from clock/clock.go rename to pkg/clock/clock.go diff --git a/clock/mock.go b/pkg/clock/mock.go similarity index 100% rename from clock/mock.go rename to pkg/clock/mock.go diff --git a/logger/logger.go b/pkg/logger/logger.go similarity index 100% rename from logger/logger.go rename to pkg/logger/logger.go diff --git a/metrics/metric_test.go b/pkg/metrics/metric_test.go similarity index 100% rename from metrics/metric_test.go rename to pkg/metrics/metric_test.go diff --git a/metrics/metrics.go b/pkg/metrics/metrics.go similarity index 98% rename from metrics/metrics.go rename to pkg/metrics/metrics.go index 0ae09c12..bdd851b2 100644 --- a/metrics/metrics.go +++ b/pkg/metrics/metrics.go @@ -4,7 +4,7 @@ import ( "errors" "github.com/raystack/raccoon/config" - "github.com/raystack/raccoon/logger" + "github.com/raystack/raccoon/pkg/logger" ) var instrument MetricInstrument = voidInstrument{} diff --git a/metrics/mock.go b/pkg/metrics/mock.go similarity index 100% rename from metrics/mock.go rename to pkg/metrics/mock.go diff --git a/metrics/prometheus.go b/pkg/metrics/prometheus.go similarity index 99% rename from metrics/prometheus.go rename to pkg/metrics/prometheus.go index 9f98730b..b46830f4 100644 --- a/metrics/prometheus.go +++ b/pkg/metrics/prometheus.go @@ -10,7 +10,7 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promhttp" "github.com/raystack/raccoon/config" - "github.com/raystack/raccoon/logger" + "github.com/raystack/raccoon/pkg/logger" "github.com/spf13/cast" ) diff --git a/metrics/prometheus_test.go b/pkg/metrics/prometheus_test.go similarity index 100% rename from metrics/prometheus_test.go rename to pkg/metrics/prometheus_test.go diff --git a/metrics/statsd.go b/pkg/metrics/statsd.go similarity index 97% rename from metrics/statsd.go rename to pkg/metrics/statsd.go index 6c3b23e8..612d8dbe 100644 --- a/metrics/statsd.go +++ b/pkg/metrics/statsd.go @@ -6,7 +6,7 @@ import ( "time" "github.com/raystack/raccoon/config" - "github.com/raystack/raccoon/logger" + "github.com/raystack/raccoon/pkg/logger" client "gopkg.in/alexcesaro/statsd.v2" ) diff --git a/middleware/cors.go b/pkg/middleware/cors.go similarity index 100% rename from middleware/cors.go rename to pkg/middleware/cors.go diff --git a/middleware/util.go b/pkg/middleware/util.go similarity index 100% rename from middleware/util.go rename to pkg/middleware/util.go diff --git a/publisher/kafka/kafka.go b/publisher/kafka/kafka.go index 44736242..b447c155 100644 --- a/publisher/kafka/kafka.go +++ b/publisher/kafka/kafka.go @@ -9,8 +9,8 @@ import ( "github.com/confluentinc/confluent-kafka-go/kafka" "github.com/raystack/raccoon/config" - "github.com/raystack/raccoon/logger" - "github.com/raystack/raccoon/metrics" + "github.com/raystack/raccoon/pkg/logger" + "github.com/raystack/raccoon/pkg/metrics" pb "github.com/raystack/raccoon/proto" "github.com/raystack/raccoon/publisher" ) diff --git a/publisher/kafka/kafka_test.go b/publisher/kafka/kafka_test.go index ad5425db..10f5d00a 100644 --- a/publisher/kafka/kafka_test.go +++ b/publisher/kafka/kafka_test.go @@ -7,7 +7,7 @@ import ( "testing" "github.com/confluentinc/confluent-kafka-go/kafka" - "github.com/raystack/raccoon/logger" + "github.com/raystack/raccoon/pkg/logger" pb "github.com/raystack/raccoon/proto" "github.com/raystack/raccoon/publisher" "github.com/stretchr/testify/assert" diff --git a/publisher/kinesis/kinesis.go b/publisher/kinesis/kinesis.go index 68f84139..9aae00e4 100644 --- a/publisher/kinesis/kinesis.go +++ b/publisher/kinesis/kinesis.go @@ -13,7 +13,7 @@ import ( "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/service/kinesis" "github.com/aws/aws-sdk-go-v2/service/kinesis/types" - "github.com/raystack/raccoon/metrics" + "github.com/raystack/raccoon/pkg/metrics" pb "github.com/raystack/raccoon/proto" "github.com/raystack/raccoon/publisher" ) diff --git a/publisher/kinesis/kinesis_integration_test.go b/publisher/kinesis/kinesis_integration_test.go index 2a9b98db..7d4d7d3d 100644 --- a/publisher/kinesis/kinesis_integration_test.go +++ b/publisher/kinesis/kinesis_integration_test.go @@ -14,7 +14,7 @@ import ( "github.com/aws/aws-sdk-go-v2/config" kinesis_sdk "github.com/aws/aws-sdk-go-v2/service/kinesis" "github.com/aws/aws-sdk-go-v2/service/kinesis/types" - "github.com/raystack/raccoon/logger" + "github.com/raystack/raccoon/pkg/logger" pb "github.com/raystack/raccoon/proto" "github.com/raystack/raccoon/publisher/kinesis" "github.com/stretchr/testify/require" diff --git a/publisher/log/log.go b/publisher/log/log.go index 5de24caf..e6e52f4a 100644 --- a/publisher/log/log.go +++ b/publisher/log/log.go @@ -5,7 +5,7 @@ import ( "encoding/json" "fmt" - "github.com/raystack/raccoon/logger" + "github.com/raystack/raccoon/pkg/logger" pb "github.com/raystack/raccoon/proto" "github.com/raystack/raccoon/publisher" "github.com/turtleDev/protoraw" diff --git a/publisher/pubsub/pubsub.go b/publisher/pubsub/pubsub.go index 1ef0a543..d7dac130 100644 --- a/publisher/pubsub/pubsub.go +++ b/publisher/pubsub/pubsub.go @@ -11,7 +11,7 @@ import ( "cloud.google.com/go/pubsub" "github.com/googleapis/gax-go/v2/apierror" - "github.com/raystack/raccoon/metrics" + "github.com/raystack/raccoon/pkg/metrics" pb "github.com/raystack/raccoon/proto" "github.com/raystack/raccoon/publisher" "google.golang.org/grpc/codes" diff --git a/publisher/pubsub/pubsub_test.go b/publisher/pubsub/pubsub_test.go index 13a62b51..94c46934 100644 --- a/publisher/pubsub/pubsub_test.go +++ b/publisher/pubsub/pubsub_test.go @@ -9,7 +9,7 @@ import ( "time" pubsubsdk "cloud.google.com/go/pubsub" - "github.com/raystack/raccoon/logger" + "github.com/raystack/raccoon/pkg/logger" raccoonv1 "github.com/raystack/raccoon/proto" "github.com/raystack/raccoon/publisher/pubsub" "github.com/stretchr/testify/assert" diff --git a/services/grpc/handler.go b/server/grpc/handler.go similarity index 93% rename from services/grpc/handler.go rename to server/grpc/handler.go index a3ddcead..b9943d69 100644 --- a/services/grpc/handler.go +++ b/server/grpc/handler.go @@ -5,11 +5,11 @@ import ( "errors" "time" - "github.com/raystack/raccoon/collector" "github.com/raystack/raccoon/config" - "github.com/raystack/raccoon/identification" - "github.com/raystack/raccoon/logger" - "github.com/raystack/raccoon/metrics" + "github.com/raystack/raccoon/core/collector" + "github.com/raystack/raccoon/core/identification" + "github.com/raystack/raccoon/pkg/logger" + "github.com/raystack/raccoon/pkg/metrics" pb "github.com/raystack/raccoon/proto" "google.golang.org/grpc/metadata" ) diff --git a/services/grpc/handler_test.go b/server/grpc/handler_test.go similarity index 96% rename from services/grpc/handler_test.go rename to server/grpc/handler_test.go index 25bff036..e81bcdb5 100644 --- a/services/grpc/handler_test.go +++ b/server/grpc/handler_test.go @@ -7,10 +7,10 @@ import ( "reflect" "testing" - "github.com/raystack/raccoon/collector" "github.com/raystack/raccoon/config" - "github.com/raystack/raccoon/logger" - "github.com/raystack/raccoon/metrics" + "github.com/raystack/raccoon/core/collector" + "github.com/raystack/raccoon/pkg/logger" + "github.com/raystack/raccoon/pkg/metrics" pb "github.com/raystack/raccoon/proto" "github.com/stretchr/testify/mock" "google.golang.org/grpc/metadata" diff --git a/services/grpc/service.go b/server/grpc/service.go similarity index 94% rename from services/grpc/service.go rename to server/grpc/service.go index d425ca5f..9c07c2cf 100644 --- a/services/grpc/service.go +++ b/server/grpc/service.go @@ -5,8 +5,8 @@ import ( "fmt" "net" - "github.com/raystack/raccoon/collector" "github.com/raystack/raccoon/config" + "github.com/raystack/raccoon/core/collector" pb "github.com/raystack/raccoon/proto" "google.golang.org/grpc" ) diff --git a/services/pprof/service.go b/server/pprof/service.go similarity index 100% rename from services/pprof/service.go rename to server/pprof/service.go diff --git a/services/rest/handler.go b/server/rest/handler.go similarity index 95% rename from services/rest/handler.go rename to server/rest/handler.go index 46d7a1a9..f5f94926 100644 --- a/services/rest/handler.go +++ b/server/rest/handler.go @@ -6,14 +6,14 @@ import ( "net/http" "time" - "github.com/raystack/raccoon/collector" "github.com/raystack/raccoon/config" - "github.com/raystack/raccoon/deserialization" - "github.com/raystack/raccoon/identification" - "github.com/raystack/raccoon/logger" - "github.com/raystack/raccoon/metrics" + "github.com/raystack/raccoon/core/collector" + "github.com/raystack/raccoon/core/deserialization" + "github.com/raystack/raccoon/core/identification" + "github.com/raystack/raccoon/core/serialization" + "github.com/raystack/raccoon/pkg/logger" + "github.com/raystack/raccoon/pkg/metrics" pb "github.com/raystack/raccoon/proto" - "github.com/raystack/raccoon/serialization" ) const ( diff --git a/services/rest/handler_test.go b/server/rest/handler_test.go similarity index 98% rename from services/rest/handler_test.go rename to server/rest/handler_test.go index eafa9e0e..3356fdbe 100644 --- a/services/rest/handler_test.go +++ b/server/rest/handler_test.go @@ -11,9 +11,9 @@ import ( "testing" "testing/iotest" - "github.com/raystack/raccoon/collector" "github.com/raystack/raccoon/config" - "github.com/raystack/raccoon/logger" + "github.com/raystack/raccoon/core/collector" + "github.com/raystack/raccoon/pkg/logger" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" diff --git a/services/rest/response.go b/server/rest/response.go similarity index 93% rename from services/rest/response.go rename to server/rest/response.go index 5ebfedac..ef98eb6b 100644 --- a/services/rest/response.go +++ b/server/rest/response.go @@ -3,8 +3,8 @@ package rest import ( "io" + "github.com/raystack/raccoon/core/serialization" pb "github.com/raystack/raccoon/proto" - "github.com/raystack/raccoon/serialization" ) type Response struct { diff --git a/services/rest/response_test.go b/server/rest/response_test.go similarity index 99% rename from services/rest/response_test.go rename to server/rest/response_test.go index ff785619..ce1b11b1 100644 --- a/services/rest/response_test.go +++ b/server/rest/response_test.go @@ -7,8 +7,8 @@ import ( "testing" "time" + "github.com/raystack/raccoon/core/serialization" pb "github.com/raystack/raccoon/proto" - "github.com/raystack/raccoon/serialization" ) func TestResponse_SetCode(t *testing.T) { diff --git a/services/rest/service.go b/server/rest/service.go similarity index 89% rename from services/rest/service.go rename to server/rest/service.go index 61df1a63..569ae76a 100644 --- a/services/rest/service.go +++ b/server/rest/service.go @@ -6,12 +6,12 @@ import ( "time" "github.com/gorilla/mux" - "github.com/raystack/raccoon/collector" "github.com/raystack/raccoon/config" - "github.com/raystack/raccoon/metrics" - "github.com/raystack/raccoon/middleware" - "github.com/raystack/raccoon/services/rest/websocket" - "github.com/raystack/raccoon/services/rest/websocket/connection" + "github.com/raystack/raccoon/core/collector" + "github.com/raystack/raccoon/pkg/metrics" + "github.com/raystack/raccoon/pkg/middleware" + "github.com/raystack/raccoon/server/rest/websocket" + "github.com/raystack/raccoon/server/rest/websocket/connection" ) type Service struct { diff --git a/services/rest/websocket/ack.go b/server/rest/websocket/ack.go similarity index 84% rename from services/rest/websocket/ack.go rename to server/rest/websocket/ack.go index 8767c3c7..81d6579d 100644 --- a/services/rest/websocket/ack.go +++ b/server/rest/websocket/ack.go @@ -3,9 +3,9 @@ package websocket import ( "time" - "github.com/raystack/raccoon/metrics" - "github.com/raystack/raccoon/serialization" - "github.com/raystack/raccoon/services/rest/websocket/connection" + "github.com/raystack/raccoon/core/serialization" + "github.com/raystack/raccoon/pkg/metrics" + "github.com/raystack/raccoon/server/rest/websocket/connection" ) var AckChan = make(chan AckInfo) diff --git a/services/rest/websocket/connection/conn.go b/server/rest/websocket/connection/conn.go similarity index 89% rename from services/rest/websocket/connection/conn.go rename to server/rest/websocket/connection/conn.go index 4e46fda7..55d8f8b9 100644 --- a/services/rest/websocket/connection/conn.go +++ b/server/rest/websocket/connection/conn.go @@ -4,9 +4,9 @@ import ( "time" "github.com/gorilla/websocket" - "github.com/raystack/raccoon/identification" - "github.com/raystack/raccoon/logger" - "github.com/raystack/raccoon/metrics" + "github.com/raystack/raccoon/core/identification" + "github.com/raystack/raccoon/pkg/logger" + "github.com/raystack/raccoon/pkg/metrics" ) type Conn struct { diff --git a/services/rest/websocket/connection/table.go b/server/rest/websocket/connection/table.go similarity index 97% rename from services/rest/websocket/connection/table.go rename to server/rest/websocket/connection/table.go index 3ed9c892..037c1e42 100644 --- a/services/rest/websocket/connection/table.go +++ b/server/rest/websocket/connection/table.go @@ -4,7 +4,7 @@ import ( "errors" "sync" - "github.com/raystack/raccoon/identification" + "github.com/raystack/raccoon/core/identification" ) var ( diff --git a/services/rest/websocket/connection/table_test.go b/server/rest/websocket/connection/table_test.go similarity index 99% rename from services/rest/websocket/connection/table_test.go rename to server/rest/websocket/connection/table_test.go index 109aa45e..adf103ad 100644 --- a/services/rest/websocket/connection/table_test.go +++ b/server/rest/websocket/connection/table_test.go @@ -4,7 +4,7 @@ import ( "fmt" "testing" - "github.com/raystack/raccoon/identification" + "github.com/raystack/raccoon/core/identification" "github.com/stretchr/testify/assert" ) diff --git a/services/rest/websocket/connection/upgrader.go b/server/rest/websocket/connection/upgrader.go similarity index 97% rename from services/rest/websocket/connection/upgrader.go rename to server/rest/websocket/connection/upgrader.go index f22182f2..d243032d 100644 --- a/services/rest/websocket/connection/upgrader.go +++ b/server/rest/websocket/connection/upgrader.go @@ -7,9 +7,9 @@ import ( "time" "github.com/gorilla/websocket" - "github.com/raystack/raccoon/identification" - "github.com/raystack/raccoon/logger" - "github.com/raystack/raccoon/metrics" + "github.com/raystack/raccoon/core/identification" + "github.com/raystack/raccoon/pkg/logger" + "github.com/raystack/raccoon/pkg/metrics" pb "github.com/raystack/raccoon/proto" "google.golang.org/protobuf/proto" ) diff --git a/services/rest/websocket/connection/upgrader_test.go b/server/rest/websocket/connection/upgrader_test.go similarity index 98% rename from services/rest/websocket/connection/upgrader_test.go rename to server/rest/websocket/connection/upgrader_test.go index 3c449356..b4c799a3 100644 --- a/services/rest/websocket/connection/upgrader_test.go +++ b/server/rest/websocket/connection/upgrader_test.go @@ -11,8 +11,8 @@ import ( "github.com/gorilla/mux" "github.com/gorilla/websocket" - "github.com/raystack/raccoon/logger" - "github.com/raystack/raccoon/metrics" + "github.com/raystack/raccoon/pkg/logger" + "github.com/raystack/raccoon/pkg/metrics" "github.com/stretchr/testify/assert" ) diff --git a/services/rest/websocket/handler.go b/server/rest/websocket/handler.go similarity index 95% rename from services/rest/websocket/handler.go rename to server/rest/websocket/handler.go index dff4fe23..7a2c4a69 100644 --- a/services/rest/websocket/handler.go +++ b/server/rest/websocket/handler.go @@ -6,14 +6,14 @@ import ( "time" "github.com/gorilla/websocket" - "github.com/raystack/raccoon/collector" "github.com/raystack/raccoon/config" - "github.com/raystack/raccoon/deserialization" - "github.com/raystack/raccoon/logger" - "github.com/raystack/raccoon/metrics" + "github.com/raystack/raccoon/core/collector" + "github.com/raystack/raccoon/core/deserialization" + "github.com/raystack/raccoon/core/serialization" + "github.com/raystack/raccoon/pkg/logger" + "github.com/raystack/raccoon/pkg/metrics" pb "github.com/raystack/raccoon/proto" - "github.com/raystack/raccoon/serialization" - "github.com/raystack/raccoon/services/rest/websocket/connection" + "github.com/raystack/raccoon/server/rest/websocket/connection" ) type serDe struct { diff --git a/services/rest/websocket/handler_test.go b/server/rest/websocket/handler_test.go similarity index 96% rename from services/rest/websocket/handler_test.go rename to server/rest/websocket/handler_test.go index b2baf957..fda49bc1 100644 --- a/services/rest/websocket/handler_test.go +++ b/server/rest/websocket/handler_test.go @@ -10,11 +10,11 @@ import ( "github.com/gorilla/mux" "github.com/gorilla/websocket" - "github.com/raystack/raccoon/collector" - "github.com/raystack/raccoon/logger" - "github.com/raystack/raccoon/metrics" + "github.com/raystack/raccoon/core/collector" + "github.com/raystack/raccoon/pkg/logger" + "github.com/raystack/raccoon/pkg/metrics" pb "github.com/raystack/raccoon/proto" - "github.com/raystack/raccoon/services/rest/websocket/connection" + "github.com/raystack/raccoon/server/rest/websocket/connection" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" diff --git a/services/rest/websocket/pinger.go b/server/rest/websocket/pinger.go similarity index 81% rename from services/rest/websocket/pinger.go rename to server/rest/websocket/pinger.go index 8e01dd8c..ecea9c1c 100644 --- a/services/rest/websocket/pinger.go +++ b/server/rest/websocket/pinger.go @@ -4,10 +4,10 @@ import ( "fmt" "time" - "github.com/raystack/raccoon/identification" - "github.com/raystack/raccoon/logger" - "github.com/raystack/raccoon/metrics" - "github.com/raystack/raccoon/services/rest/websocket/connection" + "github.com/raystack/raccoon/core/identification" + "github.com/raystack/raccoon/pkg/logger" + "github.com/raystack/raccoon/pkg/metrics" + "github.com/raystack/raccoon/server/rest/websocket/connection" ) // Pinger is worker that pings the connected peers based on ping interval. diff --git a/services/services.go b/server/services.go similarity index 81% rename from services/services.go rename to server/services.go index aa60774a..b4b8a2f1 100644 --- a/services/services.go +++ b/server/services.go @@ -1,14 +1,14 @@ -package services +package server import ( "context" "net/http" - "github.com/raystack/raccoon/collector" - "github.com/raystack/raccoon/logger" - "github.com/raystack/raccoon/services/grpc" - "github.com/raystack/raccoon/services/pprof" - "github.com/raystack/raccoon/services/rest" + "github.com/raystack/raccoon/core/collector" + "github.com/raystack/raccoon/pkg/logger" + "github.com/raystack/raccoon/server/grpc" + "github.com/raystack/raccoon/server/pprof" + "github.com/raystack/raccoon/server/rest" ) type bootstrapper interface { diff --git a/integration/integration_test.go b/test/integration/integration_test.go similarity index 100% rename from integration/integration_test.go rename to test/integration/integration_test.go From d5309e5a0a9370f95b4c0be5ddf439fc95185da7 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Mon, 9 Sep 2024 21:18:58 +0530 Subject: [PATCH 63/66] refactor: merge serialization and deserialization pkg into serde --- core/deserialization/deserializer.go | 3 -- core/deserialization/json_test.go | 35 ------------------- core/deserialization/proto.go | 19 ---------- core/{deserialization => serde}/json.go | 6 +++- core/{serialization => serde}/json_test.go | 34 +++++++++++++++++- core/{serialization => serde}/proto.go | 10 +++++- core/{deserialization => serde}/proto_test.go | 17 ++++++++- core/serde/serde.go | 5 +++ core/serialization/json.go | 7 ---- core/serialization/proto_test.go | 22 ------------ core/serialization/serializer.go | 3 -- go.mod | 4 +++ server/rest/handler.go | 23 ++++++------ server/rest/response.go | 4 +-- server/rest/response_test.go | 4 +-- server/rest/websocket/ack.go | 4 +-- server/rest/websocket/handler.go | 23 ++++++------ 17 files changed, 100 insertions(+), 123 deletions(-) delete mode 100644 core/deserialization/deserializer.go delete mode 100644 core/deserialization/json_test.go delete mode 100644 core/deserialization/proto.go rename core/{deserialization => serde}/json.go (54%) rename core/{serialization => serde}/json_test.go (55%) rename core/{serialization => serde}/proto.go (62%) rename core/{deserialization => serde}/proto_test.go (61%) create mode 100644 core/serde/serde.go delete mode 100644 core/serialization/json.go delete mode 100644 core/serialization/proto_test.go delete mode 100644 core/serialization/serializer.go diff --git a/core/deserialization/deserializer.go b/core/deserialization/deserializer.go deleted file mode 100644 index de54f468..00000000 --- a/core/deserialization/deserializer.go +++ /dev/null @@ -1,3 +0,0 @@ -package deserialization - -type DeserializeFunc func(b []byte, i interface{}) error diff --git a/core/deserialization/json_test.go b/core/deserialization/json_test.go deleted file mode 100644 index ee3fb854..00000000 --- a/core/deserialization/json_test.go +++ /dev/null @@ -1,35 +0,0 @@ -package deserialization - -import "testing" - -func TestJSONDeserializer_Deserialize(t *testing.T) { - type args struct { - b []byte - i interface{} - } - tests := []struct { - name string - j DeserializeFunc - args args - wantErr bool - }{ - { - name: "Use JSON Deserializer", - j: DeserializeJSON, - args: args{ - b: []byte(`{"A": "a"}`), - i: &struct { - A string - }{}, - }, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := tt.j(tt.args.b, tt.args.i); (err != nil) != tt.wantErr { - t.Errorf("JSONDeserializer.Deserialize() error = %v, wantErr %v", err, tt.wantErr) - } - }) - } -} diff --git a/core/deserialization/proto.go b/core/deserialization/proto.go deleted file mode 100644 index 3e759134..00000000 --- a/core/deserialization/proto.go +++ /dev/null @@ -1,19 +0,0 @@ -package deserialization - -import ( - "errors" - - "google.golang.org/protobuf/proto" -) - -var ErrInvalidProtoMessage = errors.New("invalld proto message") - -// type ProtoDeserilizer struct{} - -func DeserializeProto(b []byte, i interface{}) error { - msg, ok := i.(proto.Message) - if !ok { - return ErrInvalidProtoMessage - } - return proto.Unmarshal(b, msg) -} diff --git a/core/deserialization/json.go b/core/serde/json.go similarity index 54% rename from core/deserialization/json.go rename to core/serde/json.go index f5f25d1e..dcf11d13 100644 --- a/core/deserialization/json.go +++ b/core/serde/json.go @@ -1,7 +1,11 @@ -package deserialization +package serde import "encoding/json" func DeserializeJSON(b []byte, i interface{}) error { return json.Unmarshal(b, i) } + +func SerializeJSON(m interface{}) ([]byte, error) { + return json.Marshal(m) +} diff --git a/core/serialization/json_test.go b/core/serde/json_test.go similarity index 55% rename from core/serialization/json_test.go rename to core/serde/json_test.go index 670e79ee..22d45ceb 100644 --- a/core/serialization/json_test.go +++ b/core/serde/json_test.go @@ -1,4 +1,4 @@ -package serialization +package serde import ( "reflect" @@ -7,6 +7,38 @@ import ( pb "github.com/raystack/raccoon/proto" ) +func TestJSONDeserializer_Deserialize(t *testing.T) { + type args struct { + b []byte + i interface{} + } + tests := []struct { + name string + j DeserializeFunc + args args + wantErr bool + }{ + { + name: "Use JSON Deserializer", + j: DeserializeJSON, + args: args{ + b: []byte(`{"A": "a"}`), + i: &struct { + A string + }{}, + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if err := tt.j(tt.args.b, tt.args.i); (err != nil) != tt.wantErr { + t.Errorf("JSONDeserializer.Deserialize() error = %v, wantErr %v", err, tt.wantErr) + } + }) + } +} + func TestJSONSerializer_Serialize(t *testing.T) { type args struct { m interface{} diff --git a/core/serialization/proto.go b/core/serde/proto.go similarity index 62% rename from core/serialization/proto.go rename to core/serde/proto.go index f97e1723..5d07b1f9 100644 --- a/core/serialization/proto.go +++ b/core/serde/proto.go @@ -1,4 +1,4 @@ -package serialization +package serde import ( "errors" @@ -17,3 +17,11 @@ func SerializeProto(m interface{}) ([]byte, error) { } return proto.Marshal(msg) } + +func DeserializeProto(b []byte, i interface{}) error { + msg, ok := i.(proto.Message) + if !ok { + return ErrInvalidProtoMessage + } + return proto.Unmarshal(b, msg) +} diff --git a/core/deserialization/proto_test.go b/core/serde/proto_test.go similarity index 61% rename from core/deserialization/proto_test.go rename to core/serde/proto_test.go index cb188068..cce91497 100644 --- a/core/deserialization/proto_test.go +++ b/core/serde/proto_test.go @@ -1,8 +1,10 @@ -package deserialization +package serde import ( "testing" + "github.com/alecthomas/assert" + "github.com/raystack/raccoon/core/serialization" pb "github.com/raystack/raccoon/proto" ) @@ -44,3 +46,16 @@ func TestProtoDeserilizer_Deserialize(t *testing.T) { }) } } + +func TestSerialiseProto(t *testing.T) { + t.Run("should return an error if argument is a non-protobuf message", func(t *testing.T) { + arg := struct{}{} + _, err := serialization.SerializeProto(arg) + assert.Equal(t, err, serialization.ErrInvalidProtoMessage) + }) + t.Run("should serialize a proto message", func(t *testing.T) { + v := &pb.SendEventRequest{} + _, err := serialization.SerializeProto(v) + assert.Nil(t, err) + }) +} diff --git a/core/serde/serde.go b/core/serde/serde.go new file mode 100644 index 00000000..6079495d --- /dev/null +++ b/core/serde/serde.go @@ -0,0 +1,5 @@ +package serde + +type DeserializeFunc func(b []byte, i interface{}) error + +type SerializeFunc func(m interface{}) ([]byte, error) diff --git a/core/serialization/json.go b/core/serialization/json.go deleted file mode 100644 index 64f1c6e7..00000000 --- a/core/serialization/json.go +++ /dev/null @@ -1,7 +0,0 @@ -package serialization - -import "encoding/json" - -func SerializeJSON(m interface{}) ([]byte, error) { - return json.Marshal(m) -} diff --git a/core/serialization/proto_test.go b/core/serialization/proto_test.go deleted file mode 100644 index 197fa632..00000000 --- a/core/serialization/proto_test.go +++ /dev/null @@ -1,22 +0,0 @@ -package serialization_test - -import ( - "testing" - - "github.com/raystack/raccoon/core/serialization" - pb "github.com/raystack/raccoon/proto" - "github.com/stretchr/testify/assert" -) - -func TestSerialiseProto(t *testing.T) { - t.Run("should return an error if argument is a non-protobuf message", func(t *testing.T) { - arg := struct{}{} - _, err := serialization.SerializeProto(arg) - assert.Equal(t, err, serialization.ErrInvalidProtoMessage) - }) - t.Run("should serialize a proto message", func(t *testing.T) { - v := &pb.SendEventRequest{} - _, err := serialization.SerializeProto(v) - assert.Nil(t, err) - }) -} diff --git a/core/serialization/serializer.go b/core/serialization/serializer.go deleted file mode 100644 index 1cf2458d..00000000 --- a/core/serialization/serializer.go +++ /dev/null @@ -1,3 +0,0 @@ -package serialization - -type SerializeFunc func(m interface{}) ([]byte, error) diff --git a/go.mod b/go.mod index f432aa1f..4bfa3200 100644 --- a/go.mod +++ b/go.mod @@ -5,6 +5,7 @@ go 1.22.4 require ( cloud.google.com/go/pubsub v1.38.0 github.com/MakeNowJust/heredoc v1.0.0 + github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 github.com/aws/aws-sdk-go-v2 v1.27.2 github.com/aws/aws-sdk-go-v2/config v1.27.18 github.com/aws/aws-sdk-go-v2/service/kinesis v1.27.10 @@ -35,6 +36,8 @@ require ( cloud.google.com/go/compute/metadata v0.3.0 // indirect cloud.google.com/go/iam v1.1.7 // indirect github.com/alecthomas/chroma v0.8.2 // indirect + github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 // indirect + github.com/alecthomas/repr v0.0.0-20180818092828-117648cd9897 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.2 // indirect github.com/aws/aws-sdk-go-v2/credentials v1.17.18 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.5 // indirect @@ -92,6 +95,7 @@ require ( github.com/rivo/uniseg v0.2.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/schollz/progressbar/v3 v3.8.5 // indirect + github.com/sergi/go-diff v1.0.0 // indirect github.com/spf13/afero v1.9.5 // indirect github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/spf13/viper v1.16.0 // indirect diff --git a/server/rest/handler.go b/server/rest/handler.go index f5f94926..7b46974e 100644 --- a/server/rest/handler.go +++ b/server/rest/handler.go @@ -8,9 +8,8 @@ import ( "github.com/raystack/raccoon/config" "github.com/raystack/raccoon/core/collector" - "github.com/raystack/raccoon/core/deserialization" "github.com/raystack/raccoon/core/identification" - "github.com/raystack/raccoon/core/serialization" + "github.com/raystack/raccoon/core/serde" "github.com/raystack/raccoon/pkg/logger" "github.com/raystack/raccoon/pkg/metrics" pb "github.com/raystack/raccoon/proto" @@ -22,8 +21,8 @@ const ( ) type serDe struct { - serializer serialization.SerializeFunc - deserializer deserialization.DeserializeFunc + serializer serde.SerializeFunc + deserializer serde.DeserializeFunc } type Handler struct { serDeMap map[string]*serDe @@ -34,13 +33,13 @@ type Handler struct { func NewHandler(collector collector.Collector) *Handler { serDeMap := make(map[string]*serDe) serDeMap[ContentJSON] = &serDe{ - serializer: serialization.SerializeJSON, - deserializer: deserialization.DeserializeJSON, + serializer: serde.SerializeJSON, + deserializer: serde.DeserializeJSON, } serDeMap[ContentProto] = &serDe{ - serializer: serialization.SerializeProto, - deserializer: deserialization.DeserializeProto, + serializer: serde.SerializeProto, + deserializer: serde.DeserializeProto, } return &Handler{ serDeMap: serDeMap, @@ -57,20 +56,20 @@ func (h *Handler) RESTAPIHandler(rw http.ResponseWriter, r *http.Request) { SendEventResponse: &pb.SendEventResponse{}, } - serde, ok := h.serDeMap[contentType] + sd, ok := h.serDeMap[contentType] if !ok { metrics.Increment("batches_read_total", map[string]string{"status": "failed", "reason": "unknowncontentype", "conn_group": "NA"}) logger.Errorf("[rest.GetRESTAPIHandler] invalid content type %s", contentType) rw.WriteHeader(http.StatusBadRequest) _, err := res.SetCode(pb.Code_CODE_BAD_REQUEST).SetStatus(pb.Status_STATUS_ERROR).SetReason("invalid content type"). - SetSentTime(time.Now().Unix()).Write(rw, serialization.SerializeJSON) + SetSentTime(time.Now().Unix()).Write(rw, serde.SerializeJSON) if err != nil { logger.Errorf("[rest.GetRESTAPIHandler] error sending response: %v", err) } return } - d, s := serde.deserializer, serde.serializer + d, s := sd.deserializer, sd.serializer var group string group = r.Header.Get(config.Server.Websocket.Conn.GroupHeader) @@ -126,7 +125,7 @@ func (h *Handler) RESTAPIHandler(rw http.ResponseWriter, r *http.Request) { <-resChannel } -func (h *Handler) Ack(rw http.ResponseWriter, resChannel chan struct{}, s serialization.SerializeFunc, reqGuid string, connGroup string) collector.AckFunc { +func (h *Handler) Ack(rw http.ResponseWriter, resChannel chan struct{}, s serde.SerializeFunc, reqGuid string, connGroup string) collector.AckFunc { res := &Response{ SendEventResponse: &pb.SendEventResponse{}, } diff --git a/server/rest/response.go b/server/rest/response.go index ef98eb6b..3e43a3de 100644 --- a/server/rest/response.go +++ b/server/rest/response.go @@ -3,7 +3,7 @@ package rest import ( "io" - "github.com/raystack/raccoon/core/serialization" + "github.com/raystack/raccoon/core/serde" pb "github.com/raystack/raccoon/proto" ) @@ -36,7 +36,7 @@ func (r *Response) SetDataMap(data map[string]string) *Response { return r } -func (r *Response) Write(w io.Writer, s serialization.SerializeFunc) (int, error) { +func (r *Response) Write(w io.Writer, s serde.SerializeFunc) (int, error) { b, err := s(r) if err != nil { return 0, err diff --git a/server/rest/response_test.go b/server/rest/response_test.go index ce1b11b1..a9feed3a 100644 --- a/server/rest/response_test.go +++ b/server/rest/response_test.go @@ -7,7 +7,7 @@ import ( "testing" "time" - "github.com/raystack/raccoon/core/serialization" + "github.com/raystack/raccoon/core/serde" pb "github.com/raystack/raccoon/proto" ) @@ -233,7 +233,7 @@ func TestResponse_Write(t *testing.T) { SendEventResponse *pb.SendEventResponse } type args struct { - s serialization.SerializeFunc + s serde.SerializeFunc } tests := []struct { name string diff --git a/server/rest/websocket/ack.go b/server/rest/websocket/ack.go index 81d6579d..bd992661 100644 --- a/server/rest/websocket/ack.go +++ b/server/rest/websocket/ack.go @@ -3,7 +3,7 @@ package websocket import ( "time" - "github.com/raystack/raccoon/core/serialization" + "github.com/raystack/raccoon/core/serde" "github.com/raystack/raccoon/pkg/metrics" "github.com/raystack/raccoon/server/rest/websocket/connection" ) @@ -15,7 +15,7 @@ type AckInfo struct { RequestGuid string Err error Conn connection.Conn - serializer serialization.SerializeFunc + serializer serde.SerializeFunc TimeConsumed time.Time AckTimeConsumed time.Time } diff --git a/server/rest/websocket/handler.go b/server/rest/websocket/handler.go index 7a2c4a69..d50eaa09 100644 --- a/server/rest/websocket/handler.go +++ b/server/rest/websocket/handler.go @@ -8,8 +8,7 @@ import ( "github.com/gorilla/websocket" "github.com/raystack/raccoon/config" "github.com/raystack/raccoon/core/collector" - "github.com/raystack/raccoon/core/deserialization" - "github.com/raystack/raccoon/core/serialization" + "github.com/raystack/raccoon/core/serde" "github.com/raystack/raccoon/pkg/logger" "github.com/raystack/raccoon/pkg/metrics" pb "github.com/raystack/raccoon/proto" @@ -17,8 +16,8 @@ import ( ) type serDe struct { - serializer serialization.SerializeFunc - deserializer deserialization.DeserializeFunc + serializer serde.SerializeFunc + deserializer serde.DeserializeFunc } type Handler struct { upgrader *connection.Upgrader @@ -30,13 +29,13 @@ type Handler struct { func getSerDeMap() map[int]*serDe { serDeMap := make(map[int]*serDe) serDeMap[websocket.BinaryMessage] = &serDe{ - serializer: serialization.SerializeProto, - deserializer: deserialization.DeserializeProto, + serializer: serde.SerializeProto, + deserializer: serde.DeserializeProto, } serDeMap[websocket.TextMessage] = &serDe{ - serializer: serialization.SerializeJSON, - deserializer: deserialization.DeserializeJSON, + serializer: serde.SerializeJSON, + deserializer: serde.DeserializeJSON, } return serDeMap } @@ -124,7 +123,7 @@ func (h *Handler) HandlerWSEvents(w http.ResponseWriter, r *http.Request) { } } -func (h *Handler) Ack(conn connection.Conn, resChannel chan AckInfo, s serialization.SerializeFunc, messageType int, reqGuid string, timeConsumed time.Time) collector.AckFunc { +func (h *Handler) Ack(conn connection.Conn, resChannel chan AckInfo, s serde.SerializeFunc, messageType int, reqGuid string, timeConsumed time.Time) collector.AckFunc { switch config.Event.Ack { case config.AckTypeAsync: writeSuccessResponse(conn, s, messageType, reqGuid) @@ -159,7 +158,7 @@ func (h *Handler) sendEventCounters(events []*pb.Event, group string) { } } -func writeSuccessResponse(conn connection.Conn, serialize serialization.SerializeFunc, messageType int, requestGUID string) { +func writeSuccessResponse(conn connection.Conn, serialize serde.SerializeFunc, messageType int, requestGUID string) { response := &pb.SendEventResponse{ Status: pb.Status_STATUS_SUCCESS, Code: pb.Code_CODE_OK, @@ -173,7 +172,7 @@ func writeSuccessResponse(conn connection.Conn, serialize serialization.Serializ conn.WriteMessage(messageType, success) } -func writeBadRequestResponse(conn connection.Conn, serialize serialization.SerializeFunc, messageType int, reqGuid string, err error) { +func writeBadRequestResponse(conn connection.Conn, serialize serde.SerializeFunc, messageType int, reqGuid string, err error) { response := &pb.SendEventResponse{ Status: pb.Status_STATUS_ERROR, Code: pb.Code_CODE_BAD_REQUEST, @@ -188,7 +187,7 @@ func writeBadRequestResponse(conn connection.Conn, serialize serialization.Seria conn.WriteMessage(messageType, failure) } -func writeFailedResponse(conn connection.Conn, serialize serialization.SerializeFunc, messageType int, reqGuid string, err error) { +func writeFailedResponse(conn connection.Conn, serialize serde.SerializeFunc, messageType int, reqGuid string, err error) { response := &pb.SendEventResponse{ Status: pb.Status_STATUS_ERROR, Code: pb.Code_CODE_INTERNAL_ERROR, From bd14ba45e604a0dbde3f19a8b0dce817b9596c17 Mon Sep 17 00:00:00 2001 From: turtleDev Date: Mon, 9 Sep 2024 21:21:54 +0530 Subject: [PATCH 64/66] docs: fix broken dashboard link --- docs/docs/guides/monitoring.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/guides/monitoring.md b/docs/docs/guides/monitoring.md index 974b87de..f6ef4d24 100644 --- a/docs/docs/guides/monitoring.md +++ b/docs/docs/guides/monitoring.md @@ -157,7 +157,7 @@ Raccoon provides fine-grained metrics that denote latency. That gives clues as t ### Dashboard -There is a pre-built [grafana dashboard](https://github.com/raystack/raccoon/tree/dashboards/raccoon.json) available with support for Prometheus data source. +There is a pre-built [grafana dashboard](https://github.com/raystack/raccoon/blob/main/grafana.json) available with support for Prometheus data source. If you're running the statsd + telegraf setup, you can configure telegraf to push metrics to Prometheus. From c32b74dd70dc9fd2f583ec4be4541de424debf5e Mon Sep 17 00:00:00 2001 From: turtleDev Date: Mon, 9 Sep 2024 21:23:46 +0530 Subject: [PATCH 65/66] serde: test: fix broken references --- core/serde/proto_test.go | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/core/serde/proto_test.go b/core/serde/proto_test.go index cce91497..a03804b4 100644 --- a/core/serde/proto_test.go +++ b/core/serde/proto_test.go @@ -4,7 +4,6 @@ import ( "testing" "github.com/alecthomas/assert" - "github.com/raystack/raccoon/core/serialization" pb "github.com/raystack/raccoon/proto" ) @@ -50,12 +49,12 @@ func TestProtoDeserilizer_Deserialize(t *testing.T) { func TestSerialiseProto(t *testing.T) { t.Run("should return an error if argument is a non-protobuf message", func(t *testing.T) { arg := struct{}{} - _, err := serialization.SerializeProto(arg) - assert.Equal(t, err, serialization.ErrInvalidProtoMessage) + _, err := SerializeProto(arg) + assert.Equal(t, err, ErrInvalidProtoMessage) }) t.Run("should serialize a proto message", func(t *testing.T) { v := &pb.SendEventRequest{} - _, err := serialization.SerializeProto(v) + _, err := SerializeProto(v) assert.Nil(t, err) }) } From 939727d91528ddae012d64c0cdca81c235a56cae Mon Sep 17 00:00:00 2001 From: turtleDev Date: Mon, 9 Sep 2024 21:49:23 +0530 Subject: [PATCH 66/66] refactor: integrate middleware package into server/rest --- cmd/server.go | 2 -- pkg/middleware/cors.go | 31 ------------------------------- pkg/middleware/util.go | 11 ----------- server/rest/service.go | 21 ++++++++++++++++----- 4 files changed, 16 insertions(+), 49 deletions(-) delete mode 100644 pkg/middleware/cors.go delete mode 100644 pkg/middleware/util.go diff --git a/cmd/server.go b/cmd/server.go index c64eab69..ae4e684a 100644 --- a/cmd/server.go +++ b/cmd/server.go @@ -9,7 +9,6 @@ import ( "github.com/raystack/raccoon/config" "github.com/raystack/raccoon/pkg/logger" "github.com/raystack/raccoon/pkg/metrics" - "github.com/raystack/raccoon/pkg/middleware" "github.com/spf13/cobra" "github.com/spf13/pflag" ) @@ -24,7 +23,6 @@ func serverCommand() *cobra.Command { if err != nil { return err } - middleware.Load() metrics.Setup() defer metrics.Close() logger.SetLevel(config.Log.Level) diff --git a/pkg/middleware/cors.go b/pkg/middleware/cors.go deleted file mode 100644 index a8c904e4..00000000 --- a/pkg/middleware/cors.go +++ /dev/null @@ -1,31 +0,0 @@ -package middleware - -import ( - "net/http" - - "github.com/gorilla/handlers" - "github.com/raystack/raccoon/config" -) - -var cors func(http.Handler) http.Handler - -func loadCors() { - if config.Server.CORS.Enabled { - opts := []handlers.CORSOption{handlers.AllowedHeaders(config.Server.CORS.AllowedHeaders), - handlers.AllowedMethods(config.Server.CORS.AllowedMethods), - handlers.AllowedOrigins(config.Server.CORS.AllowedOrigin)} - if config.Server.CORS.AllowCredentials { - opts = append(opts, handlers.AllowCredentials()) - } - if config.Server.CORS.MaxAge > 0 { - opts = append(opts, handlers.MaxAge(config.Server.CORS.MaxAge)) - } - cors = handlers.CORS(opts...) - } else { - cors = func(h http.Handler) http.Handler { return h } - } -} - -func GetCors() func(http.Handler) http.Handler { - return cors -} diff --git a/pkg/middleware/util.go b/pkg/middleware/util.go deleted file mode 100644 index 2fbf889f..00000000 --- a/pkg/middleware/util.go +++ /dev/null @@ -1,11 +0,0 @@ -package middleware - -var loaded bool - -func Load() { - if loaded { - return - } - loadCors() - loaded = true -} diff --git a/server/rest/service.go b/server/rest/service.go index 569ae76a..6252cf7d 100644 --- a/server/rest/service.go +++ b/server/rest/service.go @@ -5,11 +5,11 @@ import ( "net/http" "time" + "github.com/gorilla/handlers" "github.com/gorilla/mux" "github.com/raystack/raccoon/config" "github.com/raystack/raccoon/core/collector" "github.com/raystack/raccoon/pkg/metrics" - "github.com/raystack/raccoon/pkg/middleware" "github.com/raystack/raccoon/server/rest/websocket" "github.com/raystack/raccoon/server/rest/websocket/connection" ) @@ -38,7 +38,7 @@ func NewRestService(c collector.Collector) *Service { subRouter.HandleFunc("/events", restHandler.RESTAPIHandler).Methods(http.MethodPost).Name("events") server := &http.Server{ - Handler: applyMiddleware(router), + Handler: withCORS(router), Addr: ":" + config.Server.Websocket.AppPort, } return &Service{ @@ -47,9 +47,20 @@ func NewRestService(c collector.Collector) *Service { } } -func applyMiddleware(router http.Handler) http.Handler { - h := middleware.GetCors()(router) - return h +func withCORS(router http.Handler) http.Handler { + if !config.Server.CORS.Enabled { + return router + } + opts := []handlers.CORSOption{handlers.AllowedHeaders(config.Server.CORS.AllowedHeaders), + handlers.AllowedMethods(config.Server.CORS.AllowedMethods), + handlers.AllowedOrigins(config.Server.CORS.AllowedOrigin)} + if config.Server.CORS.AllowCredentials { + opts = append(opts, handlers.AllowCredentials()) + } + if config.Server.CORS.MaxAge > 0 { + opts = append(opts, handlers.MaxAge(config.Server.CORS.MaxAge)) + } + return handlers.CORS(opts...)(router) } func pingHandler(w http.ResponseWriter, r *http.Request) {