From 890f9a311741dbe62cdc854a695a02e485dc88ce Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Fri, 7 Mar 2025 23:36:26 +0100 Subject: [PATCH 01/26] Use exporter.ExportableEvent instead of Any Signed-off-by: Thomas Poignant --- exporter/data_exporter.go | 6 +++--- exporter/event_store.go | 10 ++++----- exporter/exportable_event.go | 5 +++++ exporter/manager.go | 6 +++--- exporter/tracking_event.go | 39 +++++++++++++++++++++++++++++++++++ testutils/mock/event_store.go | 4 ++-- 6 files changed, 57 insertions(+), 13 deletions(-) create mode 100644 exporter/exportable_event.go create mode 100644 exporter/tracking_event.go diff --git a/exporter/data_exporter.go b/exporter/data_exporter.go index 0a01a3bfd74..50fb3d3f469 100644 --- a/exporter/data_exporter.go +++ b/exporter/data_exporter.go @@ -15,7 +15,7 @@ const ( defaultMaxEventInMemory = int64(100000) ) -type DataExporter[T any] interface { +type DataExporter[T ExportableEvent] interface { // Start is launching the ticker to periodically flush the data Start() // Stop is stopping the ticker @@ -35,7 +35,7 @@ type Config struct { MaxEventInMemory int64 } -type dataExporterImpl[T any] struct { +type dataExporterImpl[T ExportableEvent] struct { ctx context.Context consumerID string eventStore *EventStore[T] @@ -48,7 +48,7 @@ type dataExporterImpl[T any] struct { // NewDataExporter create a new DataExporter with the given exporter and his consumer information to consume the data // from the shared event store. -func NewDataExporter[T any](ctx context.Context, exporter Config, consumerID string, +func NewDataExporter[T ExportableEvent](ctx context.Context, exporter Config, consumerID string, eventStore *EventStore[T], logger *fflog.FFLogger) DataExporter[T] { if ctx == nil { ctx = context.Background() diff --git a/exporter/event_store.go b/exporter/event_store.go index 40f9e7d154e..3f73d06456d 100644 --- a/exporter/event_store.go +++ b/exporter/event_store.go @@ -10,7 +10,7 @@ import ( const minOffset = int64(math.MinInt64) -type eventStoreImpl[T any] struct { +type eventStoreImpl[T ExportableEvent] struct { // events is a list of events to store events []Event[T] // mutex to protect the events and consumers @@ -25,7 +25,7 @@ type eventStoreImpl[T any] struct { cleanQueueInterval time.Duration } -func NewEventStore[T any](cleanQueueInterval time.Duration) EventStore[T] { +func NewEventStore[T ExportableEvent](cleanQueueInterval time.Duration) EventStore[T] { store := &eventStoreImpl[T]{ events: make([]Event[T], 0), mutex: sync.RWMutex{}, @@ -38,7 +38,7 @@ func NewEventStore[T any](cleanQueueInterval time.Duration) EventStore[T] { return store } -type EventList[T any] struct { +type EventList[T ExportableEvent] struct { Events []T InitialOffset int64 NewOffset int64 @@ -46,7 +46,7 @@ type EventList[T any] struct { // EventStore is the interface to store events and consume them. // It is a simple implementation of a queue with offsets. -type EventStore[T any] interface { +type EventStore[T ExportableEvent] interface { // AddConsumer is adding a new consumer to the Event store. // note that you can't add a consumer after the Event store has been started. AddConsumer(consumerID string) @@ -68,7 +68,7 @@ type EventStore[T any] interface { Stop() } -type Event[T any] struct { +type Event[T ExportableEvent] struct { Offset int64 Data T } diff --git a/exporter/exportable_event.go b/exporter/exportable_event.go new file mode 100644 index 00000000000..e9cc2427af1 --- /dev/null +++ b/exporter/exportable_event.go @@ -0,0 +1,5 @@ +package exporter + +type ExportableEvent interface { + // No methods here +} diff --git a/exporter/manager.go b/exporter/manager.go index 85c931dd6a5..dfdb912cd8a 100644 --- a/exporter/manager.go +++ b/exporter/manager.go @@ -10,19 +10,19 @@ import ( const DefaultExporterCleanQueueInterval = 1 * time.Minute -type Manager[T any] interface { +type Manager[T ExportableEvent] interface { AddEvent(event T) Start() Stop() } -type managerImpl[T any] struct { +type managerImpl[T ExportableEvent] struct { logger *fflog.FFLogger consumers []DataExporter[T] eventStore *EventStore[T] } -func NewManager[T any](ctx context.Context, exporters []Config, +func NewManager[T ExportableEvent](ctx context.Context, exporters []Config, exporterCleanQueueInterval time.Duration, logger *fflog.FFLogger) Manager[T] { if ctx == nil { ctx = context.Background() diff --git a/exporter/tracking_event.go b/exporter/tracking_event.go new file mode 100644 index 00000000000..e4362a9ed15 --- /dev/null +++ b/exporter/tracking_event.go @@ -0,0 +1,39 @@ +package exporter + +import ( + "github.com/thomaspoignant/go-feature-flag/ffcontext" +) + +type TrackingEventDetails = map[string]interface{} + +// TrackingEvent represent an Event that we store in the data storage +// nolint:lll +type TrackingEvent struct { + // Kind for a feature event is feature. + // A feature event will only be generated if the trackEvents attribute of the flag is set to true. + Kind string `json:"kind" example:"feature" parquet:"name=kind, type=BYTE_ARRAY, convertedtype=UTF8"` + + // ContextKind is the kind of context which generated an event. This will only be "anonymousUser" for events generated + // on behalf of an anonymous user or the reserved word "user" for events generated on behalf of a non-anonymous user + ContextKind string `json:"contextKind,omitempty" example:"user" parquet:"name=contextKind, type=BYTE_ARRAY, convertedtype=UTF8"` + + // UserKey The key of the user object used in a feature flag evaluation. Details for the user object used in a feature + // flag evaluation as reported by the "feature" event are transmitted periodically with a separate index event. + UserKey string `json:"userKey" example:"94a25909-20d8-40cc-8500-fee99b569345" parquet:"name=userKey, type=BYTE_ARRAY, convertedtype=UTF8"` + + // CreationDate When the feature flag was requested at Unix epoch time in milliseconds. + CreationDate int64 `json:"creationDate" example:"1680246000011" parquet:"name=creationDate, type=INT64"` + + // Key of the feature flag requested. + Key string `json:"key" example:"my-feature-flag" parquet:"name=key, type=BYTE_ARRAY, convertedtype=UTF8"` + + // Source indicates where the event was generated. + // This is set to SERVER when the event was evaluated in the relay-proxy and PROVIDER_CACHE when it is evaluated from the cache. + Source string `json:"source" example:"SERVER" parquet:"name=source, type=BYTE_ARRAY, convertedtype=UTF8"` + + // TODO: + EvaluationContext ffcontext.EvaluationContext `json:"evaluationContext" parquet:"name=evaluationContext, type=MAP, keytype=BYTE_ARRAY, keyconvertedtype=UTF8, valuetype=BYTE_ARRAY, valueconvertedtype=UTF8"` + + // TODO: + TrackingDetails TrackingEventDetails `json:"trackingEventDetails" parquet:"name=evaluationContext, type=MAP, keytype=BYTE_ARRAY, keyconvertedtype=UTF8, valuetype=BYTE_ARRAY, valueconvertedtype=UTF8"` +} diff --git a/testutils/mock/event_store.go b/testutils/mock/event_store.go index a53b823122f..788f8a8f0c4 100644 --- a/testutils/mock/event_store.go +++ b/testutils/mock/event_store.go @@ -9,11 +9,11 @@ import ( const consumerNameError = "error" -type implMockEventStore[T any] struct { +type implMockEventStore[T exporter.ExportableEvent] struct { store []T } -func NewEventStore[T any]() exporter.EventStore[T] { +func NewEventStore[T exporter.ExportableEvent]() exporter.EventStore[T] { store := &implMockEventStore[T]{} return store } From 0024cd57698a34d8b909c4e4b2d7535938913743 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Sat, 8 Mar 2025 00:39:27 +0100 Subject: [PATCH 02/26] feat: Exporter support of TrackingEvents Use generics to be able to support FeatureEvents and TrackingEvents with the exporters. Signed-off-by: Thomas Poignant --- exporter/azureexporter/exporter.go | 2 +- exporter/azureexporter/exporter_test.go | 42 ++++----- exporter/common.go | 13 --- exporter/common_test.go | 12 +-- exporter/data_exporter.go | 16 +++- exporter/data_exporter_test.go | 7 +- exporter/even_store_test.go | 98 +++++++++++---------- exporter/event_store.go | 8 +- exporter/exportable_event.go | 15 +++- exporter/exporter.go | 10 ++- exporter/feature_event.go | 53 +++++++++-- exporter/feature_event_test.go | 9 +- exporter/fileexporter/exporter.go | 41 ++++++--- exporter/fileexporter/exporter_test.go | 87 +++++++++--------- exporter/gcstorageexporter/exporter.go | 4 +- exporter/gcstorageexporter/exporter_test.go | 40 ++++----- exporter/kafkaexporter/exporter.go | 10 +-- exporter/kafkaexporter/exporter_test.go | 40 ++++----- exporter/kinesisexporter/exporter.go | 9 +- exporter/kinesisexporter/exporter_test.go | 65 ++++++++------ exporter/logsexporter/exporter.go | 16 +--- exporter/logsexporter/exporter_test.go | 20 ++--- exporter/pubsubexporter/exporter.go | 4 +- exporter/pubsubexporter/exporter_test.go | 40 ++++----- exporter/s3exporter/exporter.go | 10 +-- exporter/s3exporter/exporter_test.go | 42 ++++----- exporter/s3exporterv2/exporter.go | 8 +- exporter/s3exporterv2/exporter_test.go | 52 +++++------ exporter/sqsexporter/exporter.go | 6 +- exporter/sqsexporter/exporter_test.go | 32 +++---- exporter/tracking_event.go | 14 +++ exporter/webhookexporter/exporter.go | 6 +- exporter/webhookexporter/exporter_test.go | 36 ++++---- testutils/exporter.go | 31 +++++++ 34 files changed, 511 insertions(+), 387 deletions(-) create mode 100644 testutils/exporter.go diff --git a/exporter/azureexporter/exporter.go b/exporter/azureexporter/exporter.go index b409058e3cb..b98e15af891 100644 --- a/exporter/azureexporter/exporter.go +++ b/exporter/azureexporter/exporter.go @@ -52,7 +52,7 @@ func (f *Exporter) initializeAzureClient() (*azblob.Client, error) { return azblob.NewClientWithSharedKeyCredential(url, cred, nil) } -func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEvents []exporter.FeatureEvent) error { +func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEvents []exporter.ExportableEvent) error { if f.AccountName == "" { return fmt.Errorf("you should specify an AccountName. %v is invalid", f.AccountName) } diff --git a/exporter/azureexporter/exporter_test.go b/exporter/azureexporter/exporter_test.go index f351eb0d8ea..e881067f145 100644 --- a/exporter/azureexporter/exporter_test.go +++ b/exporter/azureexporter/exporter_test.go @@ -26,7 +26,7 @@ func TestAzureBlobStorage_Export(t *testing.T) { tests := []struct { name string exporter azureexporter.Exporter - events []exporter.FeatureEvent + events []exporter.ExportableEvent wantErr assert.ErrorAssertionFunc wantBlobName string }{ @@ -37,8 +37,8 @@ func TestAzureBlobStorage_Export(t *testing.T) { AccountName: azurite.AccountName, AccountKey: azurite.AccountKey, }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -54,8 +54,8 @@ func TestAzureBlobStorage_Export(t *testing.T) { AccountName: azurite.AccountName, AccountKey: azurite.AccountKey, }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -71,8 +71,8 @@ func TestAzureBlobStorage_Export(t *testing.T) { AccountName: azurite.AccountName, AccountKey: azurite.AccountKey, }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -88,8 +88,8 @@ func TestAzureBlobStorage_Export(t *testing.T) { AccountName: azurite.AccountName, AccountKey: azurite.AccountKey, }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -105,8 +105,8 @@ func TestAzureBlobStorage_Export(t *testing.T) { AccountName: azurite.AccountName, AccountKey: azurite.AccountKey, }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -122,8 +122,8 @@ func TestAzureBlobStorage_Export(t *testing.T) { AccountName: azurite.AccountName, AccountKey: azurite.AccountKey, }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -137,8 +137,8 @@ func TestAzureBlobStorage_Export(t *testing.T) { AccountName: azurite.AccountName, AccountKey: azurite.AccountKey, }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -148,8 +148,8 @@ func TestAzureBlobStorage_Export(t *testing.T) { { name: "Should error with nil container", exporter: azureexporter.Exporter{}, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -161,8 +161,8 @@ func TestAzureBlobStorage_Export(t *testing.T) { exporter: azureexporter.Exporter{ AccountName: "", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -176,8 +176,8 @@ func TestAzureBlobStorage_Export(t *testing.T) { AccountKey: azurite.AccountKey, Container: containerName, }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, diff --git a/exporter/common.go b/exporter/common.go index 152173f2c89..62315fd3f33 100644 --- a/exporter/common.go +++ b/exporter/common.go @@ -2,7 +2,6 @@ package exporter import ( "bytes" - "encoding/json" "os" "strconv" "strings" @@ -44,15 +43,3 @@ func ComputeFilename(template *template.Template, format string) (string, error) }) return buf.String(), err } - -func FormatEventInCSV(csvTemplate *template.Template, event FeatureEvent) ([]byte, error) { - var buf bytes.Buffer - err := csvTemplate.Execute(&buf, event) - return buf.Bytes(), err -} - -func FormatEventInJSON(event FeatureEvent) ([]byte, error) { - b, err := json.Marshal(event) - b = append(b, []byte("\n")...) - return b, err -} diff --git a/exporter/common_test.go b/exporter/common_test.go index abbc784d7bd..de623cef729 100644 --- a/exporter/common_test.go +++ b/exporter/common_test.go @@ -141,11 +141,11 @@ func TestFormatEventInCSV(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got, err := exporter.FormatEventInCSV(tt.args.csvTemplate, tt.args.event) - if !tt.wantErr(t, err, fmt.Sprintf("FormatEventInCSV(%v, %v)", tt.args.csvTemplate, tt.args.event)) { + got, err := tt.args.event.FormatInCSV(tt.args.csvTemplate) + if !tt.wantErr(t, err, fmt.Sprintf("FormatInCSV(%v, %v)", tt.args.csvTemplate, tt.args.event)) { return } - assert.Equalf(t, tt.want, string(got), "FormatEventInCSV(%v, %v)", tt.args.csvTemplate, tt.args.event) + assert.Equalf(t, tt.want, string(got), "FormatInCSV(%v, %v)", tt.args.csvTemplate, tt.args.event) }) } } @@ -172,11 +172,11 @@ func TestFormatEventInJSON(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got, err := exporter.FormatEventInJSON(tt.args.event) - if !tt.wantErr(t, err, fmt.Sprintf("FormatEventInJSON(%v)", tt.args.event)) { + got, err := tt.args.event.FormatInJSON() + if !tt.wantErr(t, err, fmt.Sprintf("FormatInJSON(%v)", tt.args.event)) { return } - assert.Equalf(t, tt.want, string(got), "FormatEventInJSON(%v)", tt.args.event) + assert.Equalf(t, tt.want, string(got), "FormatInJSON(%v)", tt.args.event) }) } } diff --git a/exporter/data_exporter.go b/exporter/data_exporter.go index 50fb3d3f469..bcd891d24c8 100644 --- a/exporter/data_exporter.go +++ b/exporter/data_exporter.go @@ -135,14 +135,14 @@ func (d *dataExporterImpl[T]) sendEvents(ctx context.Context, events []T) error return nil } switch exp := d.exporter.Exporter.(type) { - case DeprecatedExporter: + case DeprecatedExporterV1: var legacyLogger *log.Logger if d.logger != nil { legacyLogger = d.logger.GetLogLogger(slog.LevelError) } switch events := any(events).(type) { case []FeatureEvent: - // use dc exporter as a DeprecatedExporter + // use dc exporter as a DeprecatedExporterV1 err := exp.Export(ctx, legacyLogger, events) slog.Warn("You are using an exporter with the old logger."+ "Please update your custom exporter to comply to the new Exporter interface.", @@ -154,7 +154,7 @@ func (d *dataExporterImpl[T]) sendEvents(ctx context.Context, events []T) error return fmt.Errorf("trying to send unknown object to the exporter (deprecated)") } break - case Exporter: + case DeprecatedExporterV2: switch events := any(events).(type) { case []FeatureEvent: err := exp.Export(ctx, d.logger, events) @@ -165,6 +165,16 @@ func (d *dataExporterImpl[T]) sendEvents(ctx context.Context, events []T) error return fmt.Errorf("trying to send unknown object to the exporter") } break + case Exporter: + exportableEvents := make([]ExportableEvent, len(events)) + for i, event := range events { + exportableEvents[i] = ExportableEvent(event) + } + err := exp.Export(ctx, d.logger, exportableEvents) + if err != nil { + return fmt.Errorf("error while exporting data: %w", err) + } + break default: return fmt.Errorf("this is not a valid exporter") } diff --git a/exporter/data_exporter_test.go b/exporter/data_exporter_test.go index 4d67c7c0234..fc9c3253703 100644 --- a/exporter/data_exporter_test.go +++ b/exporter/data_exporter_test.go @@ -9,6 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/thomaspoignant/go-feature-flag/exporter" + "github.com/thomaspoignant/go-feature-flag/testutils" "github.com/thomaspoignant/go-feature-flag/testutils/mock" "github.com/thomaspoignant/go-feature-flag/testutils/slogutil" "github.com/thomaspoignant/go-feature-flag/utils/fflog" @@ -61,9 +62,9 @@ func TestDataExporterFlush_TriggerErrorIfNotKnowType(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - evStore := mock.NewEventStore[string]() + evStore := mock.NewEventStore[testutils.ExportableMockEvent]() for i := 0; i < 100; i++ { - evStore.Add("feature") + evStore.Add(testutils.NewExportableMockEvent("feature")) } logFile, _ := os.CreateTemp("", "") @@ -72,7 +73,7 @@ func TestDataExporterFlush_TriggerErrorIfNotKnowType(t *testing.T) { defer func() { _ = os.Remove(logFile.Name()) }() exporterMock := tt.exporter - exp := exporter.NewDataExporter[string](context.TODO(), exporter.Config{ + exp := exporter.NewDataExporter[testutils.ExportableMockEvent](context.TODO(), exporter.Config{ Exporter: exporterMock, FlushInterval: 0, MaxEventInMemory: 0, diff --git a/exporter/even_store_test.go b/exporter/even_store_test.go index 2be61cb5f38..8836c0a2b31 100644 --- a/exporter/even_store_test.go +++ b/exporter/even_store_test.go @@ -10,32 +10,33 @@ import ( "github.com/stretchr/testify/assert" "github.com/thomaspoignant/go-feature-flag/exporter" + "github.com/thomaspoignant/go-feature-flag/testutils" ) const defaultTestCleanQueueDuration = 100 * time.Millisecond func Test_ConsumerNameInvalid(t *testing.T) { t.Run("GetPendingEventCount: should return an error if the consumer name is invalid", func(t *testing.T) { - eventStore := exporter.NewEventStore[string](defaultTestCleanQueueDuration) + eventStore := exporter.NewEventStore[testutils.ExportableMockEvent](defaultTestCleanQueueDuration) eventStore.AddConsumer("consumer1") defer eventStore.Stop() _, err := eventStore.GetPendingEventCount("wrong name") assert.NotNil(t, err) }) t.Run("ProcessPendingEvents: should return an error if the consumer name is invalid", func(t *testing.T) { - eventStore := exporter.NewEventStore[string](defaultTestCleanQueueDuration) + eventStore := exporter.NewEventStore[testutils.ExportableMockEvent](defaultTestCleanQueueDuration) eventStore.AddConsumer("consumer1") defer eventStore.Stop() err := eventStore.ProcessPendingEvents( "wrong name", - func(ctx context.Context, events []string) error { return nil }) + func(ctx context.Context, events []testutils.ExportableMockEvent) error { return nil }) assert.NotNil(t, err) }) } func Test_SingleConsumer(t *testing.T) { consumerName := "consumer1" - eventStore := exporter.NewEventStore[string](defaultTestCleanQueueDuration) + eventStore := exporter.NewEventStore[testutils.ExportableMockEvent](defaultTestCleanQueueDuration) eventStore.AddConsumer(consumerName) defer eventStore.Stop() got, _ := eventStore.GetPendingEventCount(consumerName) @@ -51,10 +52,11 @@ func Test_SingleConsumer(t *testing.T) { cancel() // stop producing // Consume - err := eventStore.ProcessPendingEvents(consumerName, func(ctx context.Context, events []string) error { - assert.Equal(t, 100, len(events)) - return nil - }) + err := eventStore.ProcessPendingEvents(consumerName, + func(ctx context.Context, events []testutils.ExportableMockEvent) error { + assert.Equal(t, 100, len(events)) + return nil + }) assert.Nil(t, err) got, _ = eventStore.GetPendingEventCount(consumerName) assert.Equal(t, int64(0), got) @@ -67,10 +69,11 @@ func Test_SingleConsumer(t *testing.T) { got, _ = eventStore.GetPendingEventCount(consumerName) assert.Equal(t, int64(91), got) - err = eventStore.ProcessPendingEvents(consumerName, func(ctx context.Context, events []string) error { - assert.Equal(t, 91, len(events)) - return nil - }) + err = eventStore.ProcessPendingEvents(consumerName, + func(ctx context.Context, events []testutils.ExportableMockEvent) error { + assert.Equal(t, 91, len(events)) + return nil + }) assert.Nil(t, err) time.Sleep(120 * time.Millisecond) // to wait until garbage collector remove the events @@ -79,7 +82,7 @@ func Test_SingleConsumer(t *testing.T) { func Test_MultipleConsumersSingleThread(t *testing.T) { consumerNames := []string{"consumer1", "consumer2"} - eventStore := exporter.NewEventStore[string](defaultTestCleanQueueDuration) + eventStore := exporter.NewEventStore[testutils.ExportableMockEvent](defaultTestCleanQueueDuration) for _, name := range consumerNames { eventStore.AddConsumer(name) } @@ -95,10 +98,11 @@ func Test_MultipleConsumersSingleThread(t *testing.T) { consumer1Size, err := eventStore.GetPendingEventCount(consumerNames[0]) assert.Nil(t, err) assert.Equal(t, int64(1000), consumer1Size) - err = eventStore.ProcessPendingEvents(consumerNames[0], func(ctx context.Context, events []string) error { - assert.Equal(t, 1000, len(events)) - return nil - }) + err = eventStore.ProcessPendingEvents(consumerNames[0], + func(ctx context.Context, events []testutils.ExportableMockEvent) error { + assert.Equal(t, 1000, len(events)) + return nil + }) assert.Nil(t, err) // Produce a second time @@ -117,16 +121,18 @@ func Test_MultipleConsumersSingleThread(t *testing.T) { assert.Equal(t, int64(2000), consumer2Size) // Consumer with Consumer1 and Consumer2 - err = eventStore.ProcessPendingEvents(consumerNames[0], func(ctx context.Context, events []string) error { - assert.Equal(t, 1000, len(events)) - return nil - }) + err = eventStore.ProcessPendingEvents(consumerNames[0], + func(ctx context.Context, events []testutils.ExportableMockEvent) error { + assert.Equal(t, 1000, len(events)) + return nil + }) assert.Nil(t, err) - err = eventStore.ProcessPendingEvents(consumerNames[1], func(ctx context.Context, events []string) error { - assert.Equal(t, 2000, len(events)) - return nil - }) + err = eventStore.ProcessPendingEvents(consumerNames[1], + func(ctx context.Context, events []testutils.ExportableMockEvent) error { + assert.Equal(t, 2000, len(events)) + return nil + }) assert.Nil(t, err) // Check garbage collector @@ -136,7 +142,7 @@ func Test_MultipleConsumersSingleThread(t *testing.T) { func Test_MultipleConsumersMultipleGORoutines(t *testing.T) { consumerNames := []string{"consumer1", "consumer2"} - eventStore := exporter.NewEventStore[string](defaultTestCleanQueueDuration) + eventStore := exporter.NewEventStore[testutils.ExportableMockEvent](defaultTestCleanQueueDuration) for _, name := range consumerNames { eventStore.AddConsumer(name) } @@ -148,32 +154,34 @@ func Test_MultipleConsumersMultipleGORoutines(t *testing.T) { time.Sleep(50 * time.Millisecond) wg := &sync.WaitGroup{} - consumFunc := func(eventStore exporter.EventStore[string], consumerName string) { + consumeFunc := func(eventStore exporter.EventStore[testutils.ExportableMockEvent], consumerName string) { wg.Add(1) defer wg.Done() - err := eventStore.ProcessPendingEvents(consumerName, func(ctx context.Context, events []string) error { - assert.True(t, len(events) > 0) - return nil - }) + err := eventStore.ProcessPendingEvents(consumerName, + func(ctx context.Context, events []testutils.ExportableMockEvent) error { + assert.True(t, len(events) > 0) + return nil + }) assert.Nil(t, err) time.Sleep(50 * time.Millisecond) // we wait to be sure that the producer has produce new events - err = eventStore.ProcessPendingEvents(consumerName, func(ctx context.Context, events []string) error { - assert.True(t, len(events) > 0) - return nil - }) + err = eventStore.ProcessPendingEvents(consumerName, + func(ctx context.Context, events []testutils.ExportableMockEvent) error { + assert.True(t, len(events) > 0) + return nil + }) assert.Nil(t, err) } - go consumFunc(eventStore, consumerNames[0]) - go consumFunc(eventStore, consumerNames[1]) + go consumeFunc(eventStore, consumerNames[0]) + go consumeFunc(eventStore, consumerNames[1]) wg.Wait() } func Test_ProcessPendingEventInError(t *testing.T) { consumerName := "consumer1" - eventStore := exporter.NewEventStore[string](defaultTestCleanQueueDuration) + eventStore := exporter.NewEventStore[testutils.ExportableMockEvent](defaultTestCleanQueueDuration) eventStore.AddConsumer(consumerName) defer eventStore.Stop() // start producer @@ -185,7 +193,7 @@ func Test_ProcessPendingEventInError(t *testing.T) { assert.Nil(t, err) // process is in error, so we are not able to update the offset - err = eventStore.ProcessPendingEvents(consumerName, func(ctx context.Context, events []string) error { + err = eventStore.ProcessPendingEvents(consumerName, func(ctx context.Context, events []testutils.ExportableMockEvent) error { assert.Equal(t, 1000, len(events)) return fmt.Errorf("error") }) @@ -197,13 +205,13 @@ func Test_ProcessPendingEventInError(t *testing.T) { assert.Nil(t, err) // process is not in error anymore - err = eventStore.ProcessPendingEvents(consumerName, func(ctx context.Context, events []string) error { + err = eventStore.ProcessPendingEvents(consumerName, func(ctx context.Context, events []testutils.ExportableMockEvent) error { assert.Equal(t, 1000, len(events)) return nil }) assert.Nil(t, err) - // we have consume all the items + // we have consumed all the items consumer1Size, err = eventStore.GetPendingEventCount(consumerName) assert.Equal(t, 0, int(consumer1Size)) assert.Nil(t, err) @@ -211,7 +219,7 @@ func Test_ProcessPendingEventInError(t *testing.T) { func Test_WaitForEmptyClean(t *testing.T) { consumerNames := []string{"consumer1"} - eventStore := exporter.NewEventStore[string](defaultTestCleanQueueDuration) + eventStore := exporter.NewEventStore[testutils.ExportableMockEvent](defaultTestCleanQueueDuration) for _, name := range consumerNames { eventStore.AddConsumer(name) } @@ -220,7 +228,7 @@ func Test_WaitForEmptyClean(t *testing.T) { // start producer ctx := context.Background() startEventProducer(ctx, eventStore, 100, false) - err := eventStore.ProcessPendingEvents(consumerNames[0], func(ctx context.Context, events []string) error { + err := eventStore.ProcessPendingEvents(consumerNames[0], func(ctx context.Context, events []testutils.ExportableMockEvent) error { assert.Equal(t, 100, len(events)) return nil }) @@ -230,7 +238,7 @@ func Test_WaitForEmptyClean(t *testing.T) { assert.Equal(t, int64(0), eventStore.GetTotalEventCount()) } -func startEventProducer(ctx context.Context, eventStore exporter.EventStore[string], produceMax int, randomizeProducingTime bool) { +func startEventProducer(ctx context.Context, eventStore exporter.EventStore[testutils.ExportableMockEvent], produceMax int, randomizeProducingTime bool) { for i := 0; i < produceMax; i++ { select { case <-ctx.Done(): @@ -241,7 +249,7 @@ func startEventProducer(ctx context.Context, eventStore exporter.EventStore[stri randomNumber := rand.Intn(10) + 1 time.Sleep(time.Duration(randomNumber) * time.Millisecond) } - eventStore.Add("Hello") + eventStore.Add(testutils.NewExportableMockEvent("Hello")) } } } diff --git a/exporter/event_store.go b/exporter/event_store.go index 3f73d06456d..88310d0ee71 100644 --- a/exporter/event_store.go +++ b/exporter/event_store.go @@ -12,7 +12,7 @@ const minOffset = int64(math.MinInt64) type eventStoreImpl[T ExportableEvent] struct { // events is a list of events to store - events []Event[T] + events []EventStoreItem[T] // mutex to protect the events and consumers mutex sync.RWMutex // consumers is a map of consumers with their name as key @@ -27,7 +27,7 @@ type eventStoreImpl[T ExportableEvent] struct { func NewEventStore[T ExportableEvent](cleanQueueInterval time.Duration) EventStore[T] { store := &eventStoreImpl[T]{ - events: make([]Event[T], 0), + events: make([]EventStoreItem[T], 0), mutex: sync.RWMutex{}, lastOffset: minOffset, stopPeriodicCleaning: make(chan struct{}), @@ -68,7 +68,7 @@ type EventStore[T ExportableEvent] interface { Stop() } -type Event[T ExportableEvent] struct { +type EventStoreItem[T ExportableEvent] struct { Offset int64 Data T } @@ -128,7 +128,7 @@ func (e *eventStoreImpl[T]) Add(data T) { e.mutex.Lock() defer e.mutex.Unlock() e.lastOffset++ - e.events = append(e.events, Event[T]{Offset: e.lastOffset, Data: data}) + e.events = append(e.events, EventStoreItem[T]{Offset: e.lastOffset, Data: data}) } // fetchPendingEvents is returning all the available item in the Event store for this consumer. diff --git a/exporter/exportable_event.go b/exporter/exportable_event.go index e9cc2427af1..c8e2d2bc40d 100644 --- a/exporter/exportable_event.go +++ b/exporter/exportable_event.go @@ -1,5 +1,18 @@ package exporter +import ( + "text/template" +) + type ExportableEvent interface { - // No methods here + // GetUserKey returns the unique key for the event. + GetUserKey() string + // GetKey returns the unique key for the event. + GetKey() string + // GetCreationDate returns the creationDate of the event. + GetCreationDate() int64 + // FormatEventInCSV returns the event in CSV format. + FormatInCSV(csvTemplate *template.Template) ([]byte, error) + // FormatEventInJSON returns the event in JSON format. + FormatInJSON() ([]byte, error) } diff --git a/exporter/exporter.go b/exporter/exporter.go index 02690e45611..51e371a3cfa 100644 --- a/exporter/exporter.go +++ b/exporter/exporter.go @@ -7,19 +7,23 @@ import ( "github.com/thomaspoignant/go-feature-flag/utils/fflog" ) -// DeprecatedExporter is an interface to describe how an exporter looks like. +// DeprecatedExporterV1 is an interface to describe how an exporter looks like. // Deprecated: use Exporter instead. -type DeprecatedExporter interface { +type DeprecatedExporterV1 interface { CommonExporter // Export will send the data to the exporter. Export(context.Context, *log.Logger, []FeatureEvent) error } -type Exporter interface { +type DeprecatedExporterV2 interface { CommonExporter Export(context.Context, *fflog.FFLogger, []FeatureEvent) error } +type Exporter interface { + CommonExporter + Export(context.Context, *fflog.FFLogger, []ExportableEvent) error +} type CommonExporter interface { // IsBulk return false if we should directly send the data as soon as it is produce // and true if we collect the data to send them in bulk. diff --git a/exporter/feature_event.go b/exporter/feature_event.go index 61ad2bcd736..3c7920be3e1 100644 --- a/exporter/feature_event.go +++ b/exporter/feature_event.go @@ -1,7 +1,10 @@ package exporter import ( + "bytes" "encoding/json" + "fmt" + "text/template" "time" "github.com/thomaspoignant/go-feature-flag/ffcontext" @@ -83,15 +86,51 @@ type FeatureEvent struct { Metadata FeatureEventMetadata `json:"metadata,omitempty" parquet:"name=metadata, type=MAP, keytype=BYTE_ARRAY, keyconvertedtype=UTF8, valuetype=BYTE_ARRAY, valueconvertedtype=UTF8"` } -// MarshalInterface marshals all interface type fields in FeatureEvent into JSON-encoded string. -func (f *FeatureEvent) MarshalInterface() error { - if f == nil { - return nil +// GetKey returns the key of the event +func (f FeatureEvent) GetKey() string { + return f.Key +} + +// GetUserKey returns the user key of the event +func (f FeatureEvent) GetUserKey() string { + return f.UserKey +} + +// GetCreationDate returns the creationDate of the event. +func (f FeatureEvent) GetCreationDate() int64 { + return f.CreationDate +} + +func (f FeatureEvent) FormatInCSV(csvTemplate *template.Template) ([]byte, error) { + var buf bytes.Buffer + err := csvTemplate.Execute(&buf, struct { + FeatureEvent + FormattedDate string + }{ + FeatureEvent: f, + FormattedDate: time.Unix(f.GetCreationDate(), 0).Format(time.RFC3339), + }) + if err != nil { + return nil, err + } + return buf.Bytes(), nil + +} + +func (f FeatureEvent) FormatInJSON() ([]byte, error) { + b, err := json.Marshal(f) + b = append(b, []byte("\n")...) + return b, err +} + +// ConvertValueForParquet converts the value of the event to a string to be stored in a parquet file. +func (f FeatureEvent) ConvertValueForParquet() (string, error) { + if f.Value == nil { + return "", fmt.Errorf("no value to convert, returning empty string") } b, err := json.Marshal(f.Value) if err != nil { - return err + return "", err } - f.Value = string(b) - return nil + return string(b), nil } diff --git a/exporter/feature_event_test.go b/exporter/feature_event_test.go index 2f80df96a21..4dd320308dd 100644 --- a/exporter/feature_event_test.go +++ b/exporter/feature_event_test.go @@ -99,19 +99,16 @@ func TestFeatureEvent_MarshalInterface(t *testing.T) { }, wantErr: true, }, - { - name: "nil featureEvent", - featureEvent: nil, - }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - if err := tt.featureEvent.MarshalInterface(); (err != nil) != tt.wantErr { + val, err := tt.featureEvent.ConvertValueForParquet() + if (err != nil) != tt.wantErr { t.Errorf("FeatureEvent.MarshalInterface() error = %v, wantErr %v", err, tt.wantErr) return } if tt.want != nil { - assert.Equal(t, tt.want, tt.featureEvent) + assert.Equal(t, tt.want.Value, val) } }) } diff --git a/exporter/fileexporter/exporter.go b/exporter/fileexporter/exporter.go index 232249fd440..e60f2681789 100644 --- a/exporter/fileexporter/exporter.go +++ b/exporter/fileexporter/exporter.go @@ -53,7 +53,7 @@ type Exporter struct { } // Export is saving a collection of events in a file. -func (f *Exporter) Export(_ context.Context, _ *fflog.FFLogger, featureEvents []exporter.FeatureEvent) error { +func (f *Exporter) Export(_ context.Context, _ *fflog.FFLogger, events []exporter.ExportableEvent) error { // Parse the template only once f.initTemplates.Do(func() { f.csvTemplate = exporter.ParseTemplate("csvFormat", f.CsvTemplate, exporter.DefaultCsvTemplate) @@ -86,9 +86,9 @@ func (f *Exporter) Export(_ context.Context, _ *fflog.FFLogger, featureEvents [] } if f.Format == "parquet" { - return f.writeParquet(filePath, featureEvents) + return f.writeParquet(filePath, events) } - return f.writeFile(filePath, featureEvents) + return f.writeFile(filePath, events) } // IsBulk return false if we should directly send the data as soon as it is produce @@ -97,24 +97,24 @@ func (f *Exporter) IsBulk() bool { return true } -func (f *Exporter) writeFile(filePath string, featureEvents []exporter.FeatureEvent) error { +func (f *Exporter) writeFile(filePath string, events []exporter.ExportableEvent) error { file, err := os.OpenFile(filePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o644) if err != nil { return err } defer file.Close() - for _, event := range featureEvents { + for _, event := range events { var line []byte var err error // Convert the line in the right format switch f.Format { case "csv": - line, err = exporter.FormatEventInCSV(f.csvTemplate, event) + line, err = event.FormatInCSV(f.csvTemplate) case "json": - line, err = exporter.FormatEventInJSON(event) + line, err = event.FormatInJSON() default: - line, err = exporter.FormatEventInJSON(event) + line, err = event.FormatInJSON() } // Handle error and write line into the file @@ -129,7 +129,24 @@ func (f *Exporter) writeFile(filePath string, featureEvents []exporter.FeatureEv return nil } -func (f *Exporter) writeParquet(filePath string, featureEvents []exporter.FeatureEvent) error { +func (f *Exporter) writeParquet(filePath string, events []exporter.ExportableEvent) error { + parquetFeatureEvents := make([]exporter.FeatureEvent, 0) + for _, event := range events { + switch ev := any(event).(type) { + case exporter.FeatureEvent: + parquetFeatureEvents = append(parquetFeatureEvents, ev) + break + default: + // do nothing + } + } + + // TODO: create same logic for TrackingEvents + + return f.writeParquetFeatureEvent(filePath, parquetFeatureEvents) +} + +func (f *Exporter) writeParquetFeatureEvent(filePath string, events []exporter.FeatureEvent) error { fw, err := local.NewLocalFileWriter(filePath) if err != nil { return err @@ -146,10 +163,12 @@ func (f *Exporter) writeParquet(filePath string, featureEvents []exporter.Featur pw.CompressionType = ct } - for _, event := range featureEvents { - if err := event.MarshalInterface(); err != nil { + for _, event := range events { + eventValue, err := event.ConvertValueForParquet() + if err != nil { return err } + event.Value = eventValue if err = pw.Write(event); err != nil { return fmt.Errorf("error while writing the export file: %v", err) } diff --git a/exporter/fileexporter/exporter_test.go b/exporter/fileexporter/exporter_test.go index 2b00e74f805..b0f29c9b65c 100644 --- a/exporter/fileexporter/exporter_test.go +++ b/exporter/fileexporter/exporter_test.go @@ -36,7 +36,7 @@ func TestFile_Export(t *testing.T) { } type args struct { logger *fflog.FFLogger - featureEvents []exporter.FeatureEvent + featureEvents []exporter.ExportableEvent } type expected struct { fileNameRegex string @@ -57,12 +57,12 @@ func TestFile_Export(t *testing.T) { wantErr: false, fields: fields{}, args: args{ - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Version: "127", Source: "SERVER", }, @@ -80,12 +80,12 @@ func TestFile_Export(t *testing.T) { Format: "csv", }, args: args{ - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Source: "SERVER", }, @@ -104,12 +104,12 @@ func TestFile_Export(t *testing.T) { ParquetCompressionCodec: parquet.CompressionCodec_SNAPPY.String(), }, args: args{ - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", Metadata: map[string]interface{}{"test": "test"}, }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Version: "127", Source: "SERVER", }, @@ -137,12 +137,12 @@ func TestFile_Export(t *testing.T) { CsvTemplate: "{{ .Kind}};{{ .ContextKind}}\n", }, args: args{ - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Source: "SERVER", }, @@ -161,8 +161,8 @@ func TestFile_Export(t *testing.T) { ParquetCompressionCodec: parquet.CompressionCodec_UNCOMPRESSED.String(), }, args: args{ - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", @@ -207,12 +207,12 @@ func TestFile_Export(t *testing.T) { Filename: "{{ .Format}}-test-{{ .Timestamp}}", }, args: args{ - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Source: "SERVER", }, @@ -230,12 +230,12 @@ func TestFile_Export(t *testing.T) { Format: "xxx", }, args: args{ - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Version: "127", Source: "SERVER", }, @@ -253,12 +253,12 @@ func TestFile_Export(t *testing.T) { OutputDir: filepath.Join(tempDir, "non-existent-dir"), }, args: args{ - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Version: "127", Source: "SERVER", }, @@ -276,12 +276,12 @@ func TestFile_Export(t *testing.T) { Filename: "{{ .InvalidField}}", }, args: args{ - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Source: "SERVER", }, @@ -296,12 +296,12 @@ func TestFile_Export(t *testing.T) { CsvTemplate: "{{ .Foo}}", }, args: args{ - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Source: "SERVER", }, @@ -316,8 +316,8 @@ func TestFile_Export(t *testing.T) { OutputDir: filepath.Join(tempDir, "invalid-permissions-dir"), }, args: args{ - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -342,8 +342,8 @@ func TestFile_Export(t *testing.T) { OutputDir: filepath.Join(tempDir, "invalid-parent-dir"), }, args: args{ - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -368,12 +368,12 @@ func TestFile_Export(t *testing.T) { OutputDir: filepath.Join(tempDir, "dir-with-trailing-slash") + "/", }, args: args{ - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Version: "127", Source: "SERVER", }, @@ -446,15 +446,16 @@ func TestFile_Export(t *testing.T) { } func TestFile_IsBulk(t *testing.T) { - exporter := fileexporter.Exporter{} - assert.True(t, exporter.IsBulk(), "DeprecatedExporter is a bulk exporter") + e := fileexporter.Exporter{} + assert.True(t, e.IsBulk(), "DeprecatedExporterV1 is a bulk exporter") } func TestExportWithoutOutputDir(t *testing.T) { - featureEvents := []exporter.FeatureEvent{{ - Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", - Variation: "Default", Value: "YO", Default: false, Source: "SERVER", - }} + featureEvents := []exporter.ExportableEvent{ + exporter.FeatureEvent{ + Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", + Variation: "Default", Value: "YO", Default: false, Source: "SERVER", + }} filePrefix := "test-flag-variation-EXAMPLE-" e := fileexporter.Exporter{ diff --git a/exporter/gcstorageexporter/exporter.go b/exporter/gcstorageexporter/exporter.go index 48b38e2a9e1..7ea0243a2aa 100644 --- a/exporter/gcstorageexporter/exporter.go +++ b/exporter/gcstorageexporter/exporter.go @@ -54,7 +54,7 @@ func (f *Exporter) IsBulk() bool { } // Export is saving a collection of events in a file. -func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEvents []exporter.FeatureEvent) error { +func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, events []exporter.ExportableEvent) error { // Init google storage client client, err := storage.NewClient(ctx, f.Options...) if err != nil { @@ -81,7 +81,7 @@ func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEv CsvTemplate: f.CsvTemplate, ParquetCompressionCodec: f.ParquetCompressionCodec, } - err = fileExporter.Export(ctx, logger, featureEvents) + err = fileExporter.Export(ctx, logger, events) if err != nil { return err } diff --git a/exporter/gcstorageexporter/exporter_test.go b/exporter/gcstorageexporter/exporter_test.go index c66d07c88bc..53a6b53f369 100644 --- a/exporter/gcstorageexporter/exporter_test.go +++ b/exporter/gcstorageexporter/exporter_test.go @@ -31,7 +31,7 @@ func TestGoogleStorage_Export(t *testing.T) { tests := []struct { name string fields fields - events []exporter.FeatureEvent + events []exporter.ExportableEvent wantErr bool expectedName string }{ @@ -40,8 +40,8 @@ func TestGoogleStorage_Export(t *testing.T) { fields: fields{ Bucket: "test", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -54,8 +54,8 @@ func TestGoogleStorage_Export(t *testing.T) { Path: "random/path", Bucket: "test", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -68,8 +68,8 @@ func TestGoogleStorage_Export(t *testing.T) { Format: "csv", Bucket: "test", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -83,8 +83,8 @@ func TestGoogleStorage_Export(t *testing.T) { CsvTemplate: "{{ .Kind}};{{ .ContextKind}}\n", Bucket: "test", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -98,8 +98,8 @@ func TestGoogleStorage_Export(t *testing.T) { Filename: "{{ .Format}}-test-{{ .Timestamp}}", Bucket: "test", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -112,8 +112,8 @@ func TestGoogleStorage_Export(t *testing.T) { Format: "xxx", Bucket: "test", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -125,8 +125,8 @@ func TestGoogleStorage_Export(t *testing.T) { fields: fields{ Format: "xxx", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -139,8 +139,8 @@ func TestGoogleStorage_Export(t *testing.T) { Filename: "{{ .InvalidField}}", Bucket: "test", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -153,8 +153,8 @@ func TestGoogleStorage_Export(t *testing.T) { Format: "csv", CsvTemplate: "{{ .Foo}}", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -176,7 +176,7 @@ func TestGoogleStorage_Export(t *testing.T) { }, } - // init DeprecatedExporter + // init DeprecatedExporterV1 f := gcstorageexporter.Exporter{ Bucket: tt.fields.Bucket, Options: []option.ClientOption{ diff --git a/exporter/kafkaexporter/exporter.go b/exporter/kafkaexporter/exporter.go index ff84e0f5ab1..2d9ca251568 100644 --- a/exporter/kafkaexporter/exporter.go +++ b/exporter/kafkaexporter/exporter.go @@ -45,7 +45,7 @@ type Exporter struct { // Export will produce a message to the Kafka topic. The message's value will contain the event encoded in the // selected format. Messages are published synchronously and will error immediately on failure. -func (e *Exporter) Export(_ context.Context, _ *fflog.FFLogger, featureEvents []exporter.FeatureEvent) error { +func (e *Exporter) Export(_ context.Context, _ *fflog.FFLogger, events []exporter.ExportableEvent) error { if e.sender == nil { err := e.initializeProducer() if err != nil { @@ -53,8 +53,8 @@ func (e *Exporter) Export(_ context.Context, _ *fflog.FFLogger, featureEvents [] } } - messages := make([]*sarama.ProducerMessage, 0, len(featureEvents)) - for _, event := range featureEvents { + messages := make([]*sarama.ProducerMessage, 0, len(events)) + for _, event := range events { data, err := e.formatMessage(event) if err != nil { return fmt.Errorf("format: %w", err) @@ -62,7 +62,7 @@ func (e *Exporter) Export(_ context.Context, _ *fflog.FFLogger, featureEvents [] messages = append(messages, &sarama.ProducerMessage{ Topic: e.Settings.Topic, - Key: sarama.StringEncoder(event.UserKey), + Key: sarama.StringEncoder(event.GetUserKey()), Value: sarama.ByteEncoder(data), }) } @@ -105,7 +105,7 @@ func (e *Exporter) initializeProducer() error { } // formatMessage returns the event encoded in the selected format. Will always use JSON for now. -func (e *Exporter) formatMessage(event exporter.FeatureEvent) ([]byte, error) { +func (e *Exporter) formatMessage(event exporter.ExportableEvent) ([]byte, error) { switch e.Format { case formatJSON: fallthrough diff --git a/exporter/kafkaexporter/exporter_test.go b/exporter/kafkaexporter/exporter_test.go index ef361b0d32d..a99c3a7721e 100644 --- a/exporter/kafkaexporter/exporter_test.go +++ b/exporter/kafkaexporter/exporter_test.go @@ -25,19 +25,19 @@ func (s *messageSenderMock) SendMessages(msgs []*sarama.ProducerMessage) error { func TestExporter_IsBulk(t *testing.T) { exp := Exporter{} - assert.False(t, exp.IsBulk(), "DeprecatedExporter is not a bulk exporter") + assert.False(t, exp.IsBulk(), "DeprecatedExporterV1 is not a bulk exporter") } func TestExporter_Export(t *testing.T) { const mockTopic = "mockTopic" tests := []struct { - name string - format string - dialer func(addrs []string, config *sarama.Config) (MessageSender, error) - featureEvents []exporter.FeatureEvent - wantErr bool - settings Settings + name string + format string + dialer func(addrs []string, config *sarama.Config) (MessageSender, error) + events []exporter.ExportableEvent + wantErr bool + settings Settings }{ { name: "should receive an error if dial failed", @@ -65,12 +65,12 @@ func TestExporter_Export(t *testing.T) { name: "should receive an event with a valid feature event", format: "json", wantErr: false, - featureEvents: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCDEF", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -87,12 +87,12 @@ func TestExporter_Export(t *testing.T) { name: "should default to JSON format if none provided", format: "", // Should default to JSON and generate a valid message wantErr: false, - featureEvents: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCDEF", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -109,12 +109,12 @@ func TestExporter_Export(t *testing.T) { name: "should return an error if the publisher is returning an error", format: "json", wantErr: true, - featureEvents: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCDEF", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -144,7 +144,7 @@ func TestExporter_Export(t *testing.T) { } logger := &fflog.FFLogger{LeveledLogger: slog.Default()} - err := exp.Export(context.Background(), logger, tt.featureEvents) + err := exp.Export(context.Background(), logger, tt.events) if tt.wantErr { assert.Error(t, err) return @@ -152,12 +152,12 @@ func TestExporter_Export(t *testing.T) { assert.NoError(t, err) - want := make([]*sarama.ProducerMessage, len(tt.featureEvents)) - for index, event := range tt.featureEvents { + want := make([]*sarama.ProducerMessage, len(tt.events)) + for index, event := range tt.events { messageBody, _ := json.Marshal(event) want[index] = &sarama.ProducerMessage{ Topic: mockTopic, - Key: sarama.StringEncoder(event.UserKey), + Key: sarama.StringEncoder(event.GetUserKey()), Value: sarama.ByteEncoder(messageBody), } } diff --git a/exporter/kinesisexporter/exporter.go b/exporter/kinesisexporter/exporter.go index fafbba8ae81..2272e43b872 100644 --- a/exporter/kinesisexporter/exporter.go +++ b/exporter/kinesisexporter/exporter.go @@ -19,9 +19,8 @@ const ( Mb = 1024 * 1024 ) -var DefaultPartitionKey = func(context context.Context, _ exporter.FeatureEvent) string { +var DefaultPartitionKey = func(context context.Context, _ exporter.ExportableEvent) string { context.Value("feature") - return "default" } @@ -64,7 +63,7 @@ type Exporter struct { sender MessageSender } -type PartitionKeyFunc = func(context.Context, exporter.FeatureEvent) string +type PartitionKeyFunc = func(context.Context, exporter.ExportableEvent) string type Settings struct { StreamName *string @@ -145,7 +144,7 @@ func (e *Exporter) initializeProducer(ctx context.Context) error { return initErr } -func (e *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEvents []exporter.FeatureEvent) error { +func (e *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEvents []exporter.ExportableEvent) error { err := e.initializeProducer(ctx) if err != nil { return fmt.Errorf("writer: %w", err) @@ -216,7 +215,7 @@ func (e *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEv } // formatMessage returns the event encoded in the selected format. Will always use JSON for now. -func (e *Exporter) formatMessage(event exporter.FeatureEvent) ([]byte, error) { +func (e *Exporter) formatMessage(event exporter.ExportableEvent) ([]byte, error) { switch e.Format { case formatJSON: fallthrough diff --git a/exporter/kinesisexporter/exporter_test.go b/exporter/kinesisexporter/exporter_test.go index 2d0ac5dd964..6d6d0ad1a13 100644 --- a/exporter/kinesisexporter/exporter_test.go +++ b/exporter/kinesisexporter/exporter_test.go @@ -17,7 +17,7 @@ import ( func TestExporter_IsBulk(t *testing.T) { exp := Exporter{} - assert.False(t, exp.IsBulk(), "DeprecatedExporter is not a bulk exporter") + assert.False(t, exp.IsBulk(), "DeprecatedExporterV1 is not a bulk exporter") } func TestExporter_ExportBasicWithStreamName(t *testing.T) { @@ -34,10 +34,10 @@ func TestExporter_ExportBasicWithStreamName(t *testing.T) { err := exp.Export( context.Background(), logger, - []exporter.FeatureEvent{ - *NewFeatureEvent(), - *NewFeatureEvent(), - *NewFeatureEvent(), + []exporter.ExportableEvent{ + NewFeatureEvent(), + NewFeatureEvent(), + NewFeatureEvent(), }, ) @@ -67,10 +67,10 @@ func TestExporter_ExportBasicWithStreamArn(t *testing.T) { err := exp.Export( context.Background(), logger, - []exporter.FeatureEvent{ - *NewFeatureEvent(), - *NewFeatureEvent(), - *NewFeatureEvent(), + []exporter.ExportableEvent{ + NewFeatureEvent(), + NewFeatureEvent(), + NewFeatureEvent(), }, ) @@ -101,7 +101,7 @@ func TestExporter_ShouldRaiseErrorIfNoStreamIsSpecified(t *testing.T) { err := exp.Export( context.Background(), logger, - []exporter.FeatureEvent{*NewFeatureEvent()}, + []exporter.ExportableEvent{NewFeatureEvent()}, ) assert.Error(t, err) @@ -115,7 +115,7 @@ func TestExporter_ExportAWSConfigurationCustomisation(t *testing.T) { sender: &mock, Settings: NewSettings( WithStreamName("test-stream"), - WithPartitionKey(func(context.Context, exporter.FeatureEvent) string { + WithPartitionKey(func(context.Context, exporter.ExportableEvent) string { return "test-key" }), ), @@ -129,8 +129,8 @@ func TestExporter_ExportAWSConfigurationCustomisation(t *testing.T) { err := exp.Export( context.Background(), logger, - []exporter.FeatureEvent{ - *NewFeatureEvent(), + []exporter.ExportableEvent{ + NewFeatureEvent(), }, ) @@ -153,8 +153,8 @@ func TestExporter_ExportSenderError(t *testing.T) { err := exp.Export( context.Background(), logger, - []exporter.FeatureEvent{ - *NewFeatureEvent(), + []exporter.ExportableEvent{ + NewFeatureEvent(), }, ) @@ -164,28 +164,28 @@ func TestExporter_ExportSenderError(t *testing.T) { func TestExporterSettingsCreation(t *testing.T) { { settings := NewSettings() - assert.Equal(t, settings.PartitionKey(context.TODO(), *NewFeatureEvent()), "default") + assert.Equal(t, settings.PartitionKey(context.TODO(), NewFeatureEvent()), "default") assert.Nil(t, settings.StreamName) assert.Nil(t, settings.StreamArn) assert.Nil(t, settings.ExplicitHashKey) } { settings := NewSettings(WithStreamArn("test-stream-arn")) - assert.Equal(t, settings.PartitionKey(context.TODO(), *NewFeatureEvent()), "default") + assert.Equal(t, settings.PartitionKey(context.TODO(), NewFeatureEvent()), "default") assert.Nil(t, settings.StreamName) assert.Equal(t, *settings.StreamArn, "test-stream-arn") assert.Nil(t, settings.ExplicitHashKey) } { settings := NewSettings(WithStreamName("test-stream-name")) - assert.Equal(t, settings.PartitionKey(context.TODO(), *NewFeatureEvent()), "default") + assert.Equal(t, settings.PartitionKey(context.TODO(), NewFeatureEvent()), "default") assert.Equal(t, *settings.StreamName, "test-stream-name") assert.Nil(t, settings.StreamArn) assert.Nil(t, settings.ExplicitHashKey) } { settings := NewSettings(WithExplicitHashKey("test-explicit-hash-key")) - assert.Equal(t, settings.PartitionKey(context.TODO(), *NewFeatureEvent()), "default") + assert.Equal(t, settings.PartitionKey(context.TODO(), NewFeatureEvent()), "default") assert.Nil(t, settings.StreamName) assert.Nil(t, settings.StreamArn) assert.Equal(t, *settings.ExplicitHashKey, "test-explicit-hash-key") @@ -195,9 +195,9 @@ func TestExporterSettingsCreation(t *testing.T) { WithStreamName("test-stream-name"), WithStreamArn("test-stream-arn"), WithExplicitHashKey("test-explicit-hash-key"), - WithPartitionKey(func(_ context.Context, _ exporter.FeatureEvent) string { return "non-default" }), + WithPartitionKey(func(_ context.Context, _ exporter.ExportableEvent) string { return "non-default" }), ) - assert.Equal(t, settings.PartitionKey(context.TODO(), *NewFeatureEvent()), "non-default") + assert.Equal(t, settings.PartitionKey(context.TODO(), NewFeatureEvent()), "non-default") assert.Nil(t, settings.StreamName) // overwritten by streamArn assert.Equal(t, *settings.StreamArn, "test-stream-arn") assert.Equal(t, *settings.ExplicitHashKey, "test-explicit-hash-key") @@ -213,7 +213,16 @@ func TestExporterSettingsCreation(t *testing.T) { } func TestHugeMessageExportFlow(t *testing.T) { - event := NewFeatureEvent() + event := exporter.FeatureEvent{ + Kind: "feature", + ContextKind: "anonymousUser", + UserKey: "ABCD", + CreationDate: 1617970547, + Key: "random-key", + Variation: "Default", + Value: "YO", + Default: false, + } event.Value = string(make([]byte, Mb)) mock := MockKinesisSender{} @@ -229,11 +238,11 @@ func TestHugeMessageExportFlow(t *testing.T) { err := exp.Export( context.Background(), logger, - []exporter.FeatureEvent{ - *event, - *event, - *event, - *event, + []exporter.ExportableEvent{ + event, + event, + event, + event, }, ) @@ -241,7 +250,7 @@ func TestHugeMessageExportFlow(t *testing.T) { assert.Len(t, mock.PutRecordsInputs, 0) } -func NewFeatureEvent() *exporter.FeatureEvent { +func NewFeatureEvent() exporter.ExportableEvent { return &exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", diff --git a/exporter/logsexporter/exporter.go b/exporter/logsexporter/exporter.go index 4a2cc276dd2..0c7239807b3 100644 --- a/exporter/logsexporter/exporter.go +++ b/exporter/logsexporter/exporter.go @@ -1,11 +1,9 @@ package logsexporter import ( - "bytes" "context" "sync" "text/template" - "time" "github.com/thomaspoignant/go-feature-flag/exporter" "github.com/thomaspoignant/go-feature-flag/utils/fflog" @@ -31,27 +29,21 @@ type Exporter struct { } // Export is saving a collection of events in a file. -func (f *Exporter) Export(_ context.Context, logger *fflog.FFLogger, featureEvents []exporter.FeatureEvent) error { +func (f *Exporter) Export(_ context.Context, logger *fflog.FFLogger, events []exporter.ExportableEvent) error { f.initTemplates.Do(func() { // Remove below after deprecation of Format if f.LogFormat == "" && f.Format != "" { f.LogFormat = f.Format } - f.logTemplate = exporter.ParseTemplate("logFormat", f.LogFormat, defaultLoggerFormat) }) - for _, event := range featureEvents { - var log bytes.Buffer - err := f.logTemplate.Execute(&log, struct { - exporter.FeatureEvent - FormattedDate string - }{FeatureEvent: event, FormattedDate: time.Unix(event.CreationDate, 0).Format(time.RFC3339)}) - - logger.Info(log.String()) + for _, event := range events { + log, err := event.FormatInCSV(f.logTemplate) if err != nil { return err } + logger.Info(string(log)) } return nil } diff --git a/exporter/logsexporter/exporter_test.go b/exporter/logsexporter/exporter_test.go index 593158151ee..86f76bc0d03 100644 --- a/exporter/logsexporter/exporter_test.go +++ b/exporter/logsexporter/exporter_test.go @@ -19,7 +19,7 @@ func TestLog_Export(t *testing.T) { LogFormat string } type args struct { - featureEvents []exporter.FeatureEvent + featureEvents []exporter.ExportableEvent } tests := []struct { name string @@ -31,8 +31,8 @@ func TestLog_Export(t *testing.T) { { name: "Default format", fields: fields{LogFormat: ""}, - args: args{featureEvents: []exporter.FeatureEvent{ - { + args: args{featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -44,8 +44,8 @@ func TestLog_Export(t *testing.T) { fields: fields{ LogFormat: "key=\"{{ .Key}}\"", }, - args: args{featureEvents: []exporter.FeatureEvent{ - { + args: args{featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -57,8 +57,8 @@ func TestLog_Export(t *testing.T) { fields: fields{ LogFormat: "key=\"{{ .Key}\" [{{ .FormattedDate}}]", }, - args: args{featureEvents: []exporter.FeatureEvent{ - { + args: args{featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -70,8 +70,8 @@ func TestLog_Export(t *testing.T) { fields: fields{ LogFormat: "key=\"{{ .UnknownKey}}\" [{{ .FormattedDate}}]", }, - args: args{featureEvents: []exporter.FeatureEvent{ - { + args: args{featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -96,7 +96,7 @@ func TestLog_Export(t *testing.T) { return } - assert.NoError(t, err, "DeprecatedExporter should not throw errors") + assert.NoError(t, err, "DeprecatedExporterV1 should not throw errors") logContent, _ := os.ReadFile(logFile.Name()) assert.Regexp(t, tt.expectedLog, string(logContent)) diff --git a/exporter/pubsubexporter/exporter.go b/exporter/pubsubexporter/exporter.go index 2d100f02d7a..a964cd33bc8 100644 --- a/exporter/pubsubexporter/exporter.go +++ b/exporter/pubsubexporter/exporter.go @@ -36,14 +36,14 @@ type Exporter struct { } // Export publishes a PubSub message for each exporter.FeatureEvent received. -func (e *Exporter) Export(ctx context.Context, _ *fflog.FFLogger, featureEvents []exporter.FeatureEvent) error { +func (e *Exporter) Export(ctx context.Context, _ *fflog.FFLogger, events []exporter.ExportableEvent) error { if e.publisher == nil { if err := e.initPublisher(ctx); err != nil { return err } } - for _, event := range featureEvents { + for _, event := range events { messageBody, err := json.Marshal(event) if err != nil { return err diff --git a/exporter/pubsubexporter/exporter_test.go b/exporter/pubsubexporter/exporter_test.go index ff2a7c2499b..3b25813309f 100644 --- a/exporter/pubsubexporter/exporter_test.go +++ b/exporter/pubsubexporter/exporter_test.go @@ -54,10 +54,10 @@ func TestExporter_Export(t *testing.T) { newClientFunc func(context.Context, string, ...option.ClientOption) (*pubsub.Client, error) } tests := []struct { - name string - fields fields - featureEvents []exporter.FeatureEvent - wantErr bool + name string + fields fields + events []exporter.ExportableEvent + wantErr bool }{ { name: "should publish a single message with the feature event", @@ -65,8 +65,8 @@ func TestExporter_Export(t *testing.T) { topic: topic, newClientFunc: defaultNewClientFunc, }, - featureEvents: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -78,12 +78,12 @@ func TestExporter_Export(t *testing.T) { topic: topic, newClientFunc: defaultNewClientFunc, }, - featureEvents: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature1", ContextKind: "anonymousUser1", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key1", Variation: "Default", Value: "YO", Default: false, }, - { + exporter.FeatureEvent{ Kind: "feature2", ContextKind: "anonymousUser2", UserKey: "ABCDEF", CreationDate: 1617970527, Key: "random-key2", Variation: "Default", Value: "YO", Default: true, }, @@ -103,8 +103,8 @@ func TestExporter_Export(t *testing.T) { return client, nil }, }, - featureEvents: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -117,8 +117,8 @@ func TestExporter_Export(t *testing.T) { newClientFunc: defaultNewClientFunc, publishSettings: &pubsub.PublishSettings{CountThreshold: 123}, }, - featureEvents: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -131,8 +131,8 @@ func TestExporter_Export(t *testing.T) { newClientFunc: defaultNewClientFunc, enableMessageOrdering: true, }, - featureEvents: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -154,8 +154,8 @@ func TestExporter_Export(t *testing.T) { topic: "not-existing-topic", newClientFunc: defaultNewClientFunc, }, - featureEvents: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -175,7 +175,7 @@ func TestExporter_Export(t *testing.T) { EnableMessageOrdering: tt.fields.enableMessageOrdering, newClientFunc: tt.fields.newClientFunc, } - err = e.Export(ctx, logger, tt.featureEvents) + err = e.Export(ctx, logger, tt.events) if tt.wantErr { assert.Error(t, err) @@ -183,7 +183,7 @@ func TestExporter_Export(t *testing.T) { } assert.NoError(t, err) - assertMessages(t, tt.featureEvents, server.Messages()) + assertMessages(t, tt.events, server.Messages()) assertPublisherSettings(t, tt.fields.publishSettings, e.publisher) assert.Equal(t, tt.fields.enableMessageOrdering, e.publisher.EnableMessageOrdering) }) @@ -198,7 +198,7 @@ func TestExporter_IsBulk(t *testing.T) { assert.False(t, e.IsBulk(), "PubSub exporter is not a bulk one") } -func assertMessages(t *testing.T, expectedEvents []exporter.FeatureEvent, messages []*pstest.Message) { +func assertMessages(t *testing.T, expectedEvents []exporter.ExportableEvent, messages []*pstest.Message) { events := make([]exporter.FeatureEvent, len(messages)) for i, message := range messages { assert.Equal(t, map[string]string{"emitter": "GO Feature Flag"}, message.Attributes, diff --git a/exporter/s3exporter/exporter.go b/exporter/s3exporter/exporter.go index d4399a10f7c..14cd6eda7ce 100644 --- a/exporter/s3exporter/exporter.go +++ b/exporter/s3exporter/exporter.go @@ -15,7 +15,7 @@ import ( "github.com/thomaspoignant/go-feature-flag/utils/fflog" ) -// Deprecated: Please use s3exporterv2.DeprecatedExporter instead, it will use the go-aws-sdk-v2. +// Deprecated: Please use s3exporterv2.Exporter instead, it will use the go-aws-sdk-v2. type Exporter struct { // Bucket is the name of your Exporter Bucket. Bucket string @@ -56,7 +56,7 @@ type Exporter struct { } // Export is saving a collection of events in a file. -func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEvents []exporter.FeatureEvent) error { +func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, events []exporter.ExportableEvent) error { // init the s3 uploader if f.s3Uploader == nil { var initErr error @@ -79,7 +79,7 @@ func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEv defer func() { _ = os.Remove(outputDir) }() // We call the File data exporter to get the file in the right format. - // Files will be put in the temp directory, so we will be able to upload them to DeprecatedExporter from there. + // Files will be put in the temp directory, so we will be able to upload them to Exporter from there. fileExporter := fileexporter.Exporter{ Format: f.Format, OutputDir: outputDir, @@ -87,12 +87,12 @@ func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEv CsvTemplate: f.CsvTemplate, ParquetCompressionCodec: f.ParquetCompressionCodec, } - err = fileExporter.Export(ctx, logger, featureEvents) + err = fileExporter.Export(ctx, logger, events) if err != nil { return err } - // Upload all the files in the folder to DeprecatedExporter + // Upload all the files in the folder to Export files, err := os.ReadDir(outputDir) if err != nil { return err diff --git a/exporter/s3exporter/exporter_test.go b/exporter/s3exporter/exporter_test.go index bc777fe2ed9..c82624a70ae 100644 --- a/exporter/s3exporter/exporter_test.go +++ b/exporter/s3exporter/exporter_test.go @@ -27,7 +27,7 @@ func TestS3_Export(t *testing.T) { tests := []struct { name string fields fields - events []exporter.FeatureEvent + events []exporter.ExportableEvent wantErr bool expectedFile string expectedName string @@ -37,8 +37,8 @@ func TestS3_Export(t *testing.T) { fields: fields{ Bucket: "test", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -47,13 +47,13 @@ func TestS3_Export(t *testing.T) { expectedName: "^/flag-variation-" + hostname + "-[0-9]*\\.json$", }, { - name: "With DeprecatedExporter Path", + name: "With DeprecatedExporterV1 Path", fields: fields{ S3Path: "random/path", Bucket: "test", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -67,8 +67,8 @@ func TestS3_Export(t *testing.T) { Format: "csv", Bucket: "test", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -83,8 +83,8 @@ func TestS3_Export(t *testing.T) { CsvTemplate: "{{ .Kind}};{{ .ContextKind}}\n", Bucket: "test", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -99,8 +99,8 @@ func TestS3_Export(t *testing.T) { Filename: "{{ .Format}}-test-{{ .Timestamp}}", Bucket: "test", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -114,8 +114,8 @@ func TestS3_Export(t *testing.T) { Format: "xxx", Bucket: "test", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -128,8 +128,8 @@ func TestS3_Export(t *testing.T) { fields: fields{ Format: "xxx", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -142,8 +142,8 @@ func TestS3_Export(t *testing.T) { Filename: "{{ .InvalidField}}", Bucket: "test", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -156,8 +156,8 @@ func TestS3_Export(t *testing.T) { Format: "csv", CsvTemplate: "{{ .Foo}}", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -199,7 +199,7 @@ func Test_errSDK(t *testing.T) { Bucket: "empty", AwsConfig: &aws.Config{}, } - err := f.Export(context.Background(), &fflog.FFLogger{LeveledLogger: slog.Default()}, []exporter.FeatureEvent{}) + err := f.Export(context.Background(), &fflog.FFLogger{LeveledLogger: slog.Default()}, []exporter.ExportableEvent{}) assert.Error(t, err, "Empty AWS config should failed") } diff --git a/exporter/s3exporterv2/exporter.go b/exporter/s3exporterv2/exporter.go index 6fabfd68c0d..20c93d42003 100644 --- a/exporter/s3exporterv2/exporter.go +++ b/exporter/s3exporterv2/exporter.go @@ -82,7 +82,7 @@ func (f *Exporter) initializeUploader(ctx context.Context) error { } // Export is saving a collection of events in a file. -func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEvents []exporter.FeatureEvent) error { +func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, events []exporter.ExportableEvent) error { if ctx == nil { ctx = context.Background() } @@ -101,7 +101,7 @@ func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEv defer func() { _ = os.Remove(outputDir) }() // We call the File data exporter to get the file in the right format. - // Files will be put in the temp directory, so we will be able to upload them to DeprecatedExporter from there. + // Files will be put in the temp directory, so we will be able to upload them to export from there. fileExporter := fileexporter.Exporter{ Format: f.Format, OutputDir: outputDir, @@ -109,12 +109,12 @@ func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEv CsvTemplate: f.CsvTemplate, ParquetCompressionCodec: f.ParquetCompressionCodec, } - err = fileExporter.Export(ctx, logger, featureEvents) + err = fileExporter.Export(ctx, logger, events) if err != nil { return err } - // Upload all the files in the folder to DeprecatedExporter + // Upload all the files in the folder to export files, err := os.ReadDir(outputDir) if err != nil { return err diff --git a/exporter/s3exporterv2/exporter_test.go b/exporter/s3exporterv2/exporter_test.go index 6d5b3b470d6..06e36fa61fd 100644 --- a/exporter/s3exporterv2/exporter_test.go +++ b/exporter/s3exporterv2/exporter_test.go @@ -30,7 +30,7 @@ func TestS3_Export(t *testing.T) { tests := []struct { name string fields fields - events []exporter.FeatureEvent + events []exporter.ExportableEvent wantErr bool expectedFile string expectedName string @@ -41,8 +41,8 @@ func TestS3_Export(t *testing.T) { Bucket: "test", Context: context.TODO(), }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -56,8 +56,8 @@ func TestS3_Export(t *testing.T) { Bucket: "test", Context: nil, }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -66,14 +66,14 @@ func TestS3_Export(t *testing.T) { expectedName: "^/flag-variation-" + hostname + "-[0-9]*\\.json$", }, { - name: "With DeprecatedExporter Path", + name: "With DeprecatedExporterV1 Path", fields: fields{ S3Path: "random/path", Bucket: "test", Context: context.TODO(), }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -88,8 +88,8 @@ func TestS3_Export(t *testing.T) { Bucket: "test", Context: context.TODO(), }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -105,8 +105,8 @@ func TestS3_Export(t *testing.T) { Bucket: "test", Context: context.TODO(), }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -122,8 +122,8 @@ func TestS3_Export(t *testing.T) { Bucket: "test", Context: context.TODO(), }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -138,8 +138,8 @@ func TestS3_Export(t *testing.T) { Bucket: "test", Context: context.TODO(), }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -153,8 +153,8 @@ func TestS3_Export(t *testing.T) { Format: "xxx", Context: context.TODO(), }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -168,8 +168,8 @@ func TestS3_Export(t *testing.T) { Bucket: "test", Context: context.TODO(), }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -183,8 +183,8 @@ func TestS3_Export(t *testing.T) { CsvTemplate: "{{ .Foo}}", Context: context.TODO(), }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -202,8 +202,8 @@ func TestS3_Export(t *testing.T) { }, Context: context.TODO(), }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -253,11 +253,11 @@ func Test_errSDK(t *testing.T) { Bucket: "empty", AwsConfig: &aws.Config{}, } - err := f.Export(context.Background(), &fflog.FFLogger{LeveledLogger: slog.Default()}, []exporter.FeatureEvent{}) + err := f.Export(context.Background(), &fflog.FFLogger{LeveledLogger: slog.Default()}, []exporter.ExportableEvent{}) assert.Error(t, err, "Empty AWS config should failed") } func TestS3_IsBulk(t *testing.T) { exporter := Exporter{} - assert.True(t, exporter.IsBulk(), "DeprecatedExporter is a bulk exporter") + assert.True(t, exporter.IsBulk(), "DeprecatedExporterV1 is a bulk exporter") } diff --git a/exporter/sqsexporter/exporter.go b/exporter/sqsexporter/exporter.go index 587fab8c5d0..02e38b2249f 100644 --- a/exporter/sqsexporter/exporter.go +++ b/exporter/sqsexporter/exporter.go @@ -27,8 +27,8 @@ type Exporter struct { sqsService SQSSendMessageAPI } -// Export is sending SQS event for each featureEvents received. -func (f *Exporter) Export(ctx context.Context, _ *fflog.FFLogger, featureEvents []exporter.FeatureEvent) error { +// Export is sending SQS event for each events received. +func (f *Exporter) Export(ctx context.Context, _ *fflog.FFLogger, events []exporter.ExportableEvent) error { if f.AwsConfig == nil { cfg, err := config.LoadDefaultConfig(ctx) if err != nil { @@ -47,7 +47,7 @@ func (f *Exporter) Export(ctx context.Context, _ *fflog.FFLogger, featureEvents }) } - for _, event := range featureEvents { + for _, event := range events { messageBody, err := json.Marshal(event) if err != nil { return err diff --git a/exporter/sqsexporter/exporter_test.go b/exporter/sqsexporter/exporter_test.go index 4f437342e0a..9b7aefd8961 100644 --- a/exporter/sqsexporter/exporter_test.go +++ b/exporter/sqsexporter/exporter_test.go @@ -32,7 +32,7 @@ func (s *SQSSendMessageAPIMock) SendMessage(ctx context.Context, func TestSQS_IsBulk(t *testing.T) { exporter := Exporter{} - assert.False(t, exporter.IsBulk(), "DeprecatedExporter is not a bulk exporter") + assert.False(t, exporter.IsBulk(), "DeprecatedExporterV1 is not a bulk exporter") } func TestExporter_Export(t *testing.T) { @@ -42,10 +42,10 @@ func TestExporter_Export(t *testing.T) { sqsService SQSSendMessageAPIMock } tests := []struct { - name string - fields fields - featureEvents []exporter.FeatureEvent - wantErr bool + name string + fields fields + events []exporter.ExportableEvent + wantErr bool }{ { name: "should return an error if no QueueURL provided", @@ -54,8 +54,8 @@ func TestExporter_Export(t *testing.T) { sqsService: SQSSendMessageAPIMock{}, }, wantErr: true, - featureEvents: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -68,12 +68,12 @@ func TestExporter_Export(t *testing.T) { sqsService: SQSSendMessageAPIMock{}, }, wantErr: false, - featureEvents: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCDEF", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -86,12 +86,12 @@ func TestExporter_Export(t *testing.T) { sqsService: SQSSendMessageAPIMock{}, }, wantErr: true, - featureEvents: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCDEF", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, }, @@ -107,15 +107,15 @@ func TestExporter_Export(t *testing.T) { } logger := &fflog.FFLogger{LeveledLogger: slog.Default()} - err := f.Export(context.TODO(), logger, tt.featureEvents) + err := f.Export(context.TODO(), logger, tt.events) if tt.wantErr { assert.Error(t, err) return } assert.NoError(t, err) - want := make([]sqs.SendMessageInput, len(tt.featureEvents)) - for index, event := range tt.featureEvents { + want := make([]sqs.SendMessageInput, len(tt.events)) + for index, event := range tt.events { messageBody, _ := json.Marshal(event) want[index] = sqs.SendMessageInput{ MessageBody: aws.String(string(messageBody)), diff --git a/exporter/tracking_event.go b/exporter/tracking_event.go index e4362a9ed15..c469ac64eea 100644 --- a/exporter/tracking_event.go +++ b/exporter/tracking_event.go @@ -37,3 +37,17 @@ type TrackingEvent struct { // TODO: TrackingDetails TrackingEventDetails `json:"trackingEventDetails" parquet:"name=evaluationContext, type=MAP, keytype=BYTE_ARRAY, keyconvertedtype=UTF8, valuetype=BYTE_ARRAY, valueconvertedtype=UTF8"` } + +func (f TrackingEvent) GetKey() string { + return f.Key +} + +// GetUserKey returns the user key of the event +func (f TrackingEvent) GetUserKey() string { + return f.UserKey +} + +// GetCreationDate returns the creationDate of the event. +func (f TrackingEvent) GetCreationDate() int64 { + return f.CreationDate +} diff --git a/exporter/webhookexporter/exporter.go b/exporter/webhookexporter/exporter.go index 97887a8fc50..7e0f297c2c2 100644 --- a/exporter/webhookexporter/exporter.go +++ b/exporter/webhookexporter/exporter.go @@ -56,11 +56,11 @@ type webhookPayload struct { Meta map[string]string `json:"meta"` // events is the list of the event we send in the payload - Events []exporter.FeatureEvent `json:"events"` + Events []exporter.ExportableEvent `json:"events"` } // Export is sending a collection of events in a webhook call. -func (f *Exporter) Export(ctx context.Context, _ *fflog.FFLogger, featureEvents []exporter.FeatureEvent) error { +func (f *Exporter) Export(ctx context.Context, _ *fflog.FFLogger, events []exporter.ExportableEvent) error { f.init.Do(func() { if f.httpClient == nil { f.httpClient = internal.DefaultHTTPClient() @@ -78,7 +78,7 @@ func (f *Exporter) Export(ctx context.Context, _ *fflog.FFLogger, featureEvents body := webhookPayload{ Meta: f.Meta, - Events: featureEvents, + Events: events, } payload, err := json.Marshal(body) if err != nil { diff --git a/exporter/webhookexporter/exporter_test.go b/exporter/webhookexporter/exporter_test.go index 39b9d6b2c9c..6f1e92695de 100644 --- a/exporter/webhookexporter/exporter_test.go +++ b/exporter/webhookexporter/exporter_test.go @@ -14,7 +14,7 @@ import ( func TestWebhook_IsBulk(t *testing.T) { exporter := Exporter{} - assert.True(t, exporter.IsBulk(), "DeprecatedExporter is a bulk exporter") + assert.True(t, exporter.IsBulk(), "DeprecatedExporterV1 is a bulk exporter") } func TestWebhook_Export(t *testing.T) { @@ -28,7 +28,7 @@ func TestWebhook_Export(t *testing.T) { } type args struct { logger *fflog.FFLogger - featureEvents []exporter.FeatureEvent + featureEvents []exporter.ExportableEvent } type expected struct { bodyFilePath string @@ -58,12 +58,12 @@ func TestWebhook_Export(t *testing.T) { }, args: args{ logger: logger, - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Version: "127", Source: "SERVER", }, @@ -85,12 +85,12 @@ func TestWebhook_Export(t *testing.T) { }, args: args{ logger: logger, - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Version: "127", Source: "SERVER", }, @@ -112,12 +112,12 @@ func TestWebhook_Export(t *testing.T) { }, args: args{ logger: logger, - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Source: "SERVER", }, @@ -135,12 +135,12 @@ func TestWebhook_Export(t *testing.T) { }, args: args{ logger: logger, - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Source: "SERVER", }, @@ -158,12 +158,12 @@ func TestWebhook_Export(t *testing.T) { }, args: args{ logger: logger, - featureEvents: []exporter.FeatureEvent{ - { + featureEvents: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, - { + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "EFGH", CreationDate: 1617970701, Key: "random-key", Variation: "Default", Value: "YO2", Default: false, Version: "127", Source: "SERVER", }, @@ -217,6 +217,6 @@ func TestWebhook_Export_impossibleToParse(t *testing.T) { EndpointURL: " http://invalid.com/", } - err := f.Export(context.Background(), &fflog.FFLogger{LeveledLogger: slog.Default()}, []exporter.FeatureEvent{}) + err := f.Export(context.Background(), &fflog.FFLogger{LeveledLogger: slog.Default()}, []exporter.ExportableEvent{}) assert.EqualError(t, err, "parse \" http://invalid.com/\": first path segment in URL cannot contain colon") } diff --git a/testutils/exporter.go b/testutils/exporter.go new file mode 100644 index 00000000000..2054217d4bf --- /dev/null +++ b/testutils/exporter.go @@ -0,0 +1,31 @@ +package testutils + +import "text/template" + +func NewExportableMockEvent(name string) ExportableMockEvent { + return ExportableMockEvent{name: name} +} + +type ExportableMockEvent struct { + name string +} + +func (e ExportableMockEvent) GetUserKey() string { + return e.name +} + +func (e ExportableMockEvent) GetKey() string { + return e.name +} + +func (e ExportableMockEvent) GetCreationDate() int64 { + return 0 +} + +func (e ExportableMockEvent) FormatInCSV(csvTemplate *template.Template) ([]byte, error) { + return []byte(e.name), nil +} + +func (e ExportableMockEvent) FormatInJSON() ([]byte, error) { + return []byte(`{"name":"` + e.name + `"}`), nil +} From 56c11944260aa4ee6508d54f4cb0ec45b44b14da Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Wed, 12 Mar 2025 22:35:32 +0100 Subject: [PATCH 03/26] fix linter Signed-off-by: Thomas Poignant --- exporter/feature_event.go | 1 - testutils/exporter.go | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/exporter/feature_event.go b/exporter/feature_event.go index 3c7920be3e1..d7c16b132c3 100644 --- a/exporter/feature_event.go +++ b/exporter/feature_event.go @@ -114,7 +114,6 @@ func (f FeatureEvent) FormatInCSV(csvTemplate *template.Template) ([]byte, error return nil, err } return buf.Bytes(), nil - } func (f FeatureEvent) FormatInJSON() ([]byte, error) { diff --git a/testutils/exporter.go b/testutils/exporter.go index 2054217d4bf..35409714043 100644 --- a/testutils/exporter.go +++ b/testutils/exporter.go @@ -22,7 +22,7 @@ func (e ExportableMockEvent) GetCreationDate() int64 { return 0 } -func (e ExportableMockEvent) FormatInCSV(csvTemplate *template.Template) ([]byte, error) { +func (e ExportableMockEvent) FormatInCSV(_ *template.Template) ([]byte, error) { return []byte(e.name), nil } From e9b26ec4f1f0f9909460e18e3a5fd95b4f2cc378 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 13 Mar 2025 16:01:06 +0100 Subject: [PATCH 04/26] adding tests Signed-off-by: Thomas Poignant --- exporter/feature_event_test.go | 94 ++++++++++++++++++++++++++++++++++ 1 file changed, 94 insertions(+) diff --git a/exporter/feature_event_test.go b/exporter/feature_event_test.go index 4dd320308dd..6ed89154913 100644 --- a/exporter/feature_event_test.go +++ b/exporter/feature_event_test.go @@ -99,6 +99,20 @@ func TestFeatureEvent_MarshalInterface(t *testing.T) { }, wantErr: true, }, + { + name: "nil value", + featureEvent: &exporter.FeatureEvent{ + Kind: "feature", + ContextKind: "anonymousUser", + UserKey: "ABCD", + CreationDate: 1617970547, + Key: "random-key", + Variation: "Default", + Value: nil, + Default: false, + }, + wantErr: true, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -198,3 +212,83 @@ func TestFeatureEvent_MarshalJSON(t *testing.T) { }) } } + +func TestFeatureEvent_GetKey(t *testing.T) { + tests := []struct { + name string + featureEvent *exporter.FeatureEvent + want string + }{ + { + name: "return existing key", + featureEvent: &exporter.FeatureEvent{ + Kind: "feature", + ContextKind: "anonymousUser", + UserKey: "ABCD", + CreationDate: 1617970547, + Key: "random-key", + Variation: "Default", + Value: map[string]interface{}{ + "string": "string", + "bool": true, + "float": 1.23, + "int": 1, + }, + Default: false, + }, + want: "random-key", + }, + { + name: "empty key", + featureEvent: &exporter.FeatureEvent{ + Kind: "feature", + ContextKind: "anonymousUser", + UserKey: "ABCD", + CreationDate: 1617970547, + Key: "", + Variation: "Default", + Value: nil, + Default: false, + }, + want: "", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.featureEvent.GetKey()) + }) + } +} + +func TestFeatureEvent_GetUserKey(t *testing.T) { + tests := []struct { + name string + featureEvent *exporter.FeatureEvent + want string + }{ + { + name: "return existing key", + featureEvent: &exporter.FeatureEvent{ + Kind: "feature", + ContextKind: "anonymousUser", + UserKey: "ABCD", + CreationDate: 1617970547, + Key: "random-key", + Variation: "Default", + Value: map[string]interface{}{ + "string": "string", + "bool": true, + "float": 1.23, + "int": 1, + }, + Default: false, + }, + want: "ABCD", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.featureEvent.GetUserKey()) + }) + } +} From 5cb715e360e7a20d3ce3107bd74ba9affd32647e Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 13 Mar 2025 16:48:20 +0100 Subject: [PATCH 05/26] fix test to use ExportableEvent Signed-off-by: Thomas Poignant --- exporter/data_exporter_test.go | 5 ---- exporter/exportable_event.go | 4 +-- exporter/manager_test.go | 48 ++++++++++++++++++++++----------- testutils/mock/exporter_mock.go | 17 +++++++----- variation_test.go | 2 +- 5 files changed, 47 insertions(+), 29 deletions(-) diff --git a/exporter/data_exporter_test.go b/exporter/data_exporter_test.go index fc9c3253703..6b919b1dfeb 100644 --- a/exporter/data_exporter_test.go +++ b/exporter/data_exporter_test.go @@ -48,11 +48,6 @@ func TestDataExporterFlush_TriggerErrorIfNotKnowType(t *testing.T) { exporter mock.ExporterMock expectedLog string }{ - { - name: "classic exporter", - exporter: &mock.Exporter{}, - expectedLog: "trying to send unknown object to the exporter\n", - }, { name: "deprecated exporter", exporter: &mock.ExporterDeprecated{}, diff --git a/exporter/exportable_event.go b/exporter/exportable_event.go index c8e2d2bc40d..89e60c33b80 100644 --- a/exporter/exportable_event.go +++ b/exporter/exportable_event.go @@ -11,8 +11,8 @@ type ExportableEvent interface { GetKey() string // GetCreationDate returns the creationDate of the event. GetCreationDate() int64 - // FormatEventInCSV returns the event in CSV format. + // FormatInCSV FormatEventInCSV returns the event in CSV format. FormatInCSV(csvTemplate *template.Template) ([]byte, error) - // FormatEventInJSON returns the event in JSON format. + // FormatInJSON FormatEventInJSON returns the event in JSON format. FormatInJSON() ([]byte, error) } diff --git a/exporter/manager_test.go b/exporter/manager_test.go index 7afadc0dc63..0954f925a43 100644 --- a/exporter/manager_test.go +++ b/exporter/manager_test.go @@ -53,12 +53,14 @@ func TestDataExporterManager_flushWithTime(t *testing.T) { "YO", "defaultVar", false, "", "SERVER", nil), } - for _, event := range inputEvents { + want := make([]exporter.ExportableEvent, len(inputEvents)) + for i, event := range inputEvents { dc.AddEvent(event) + want[i] = event } time.Sleep(500 * time.Millisecond) - assert.Equal(t, inputEvents, tt.mockExporter.GetExportedEvents()) + assert.Equal(t, want, tt.mockExporter.GetExportedEvents()) }) } } @@ -97,10 +99,12 @@ func TestDataExporterManager_flushWithNumberOfEvents(t *testing.T) { ffcontext.NewEvaluationContextBuilder("ABCD").AddCustom("anonymous", true).Build(), "random-key", "YO", "defaultVar", false, "", "SERVER", nil)) } - for _, event := range inputEvents { + want := make([]exporter.ExportableEvent, len(inputEvents)) + for i, event := range inputEvents { dc.AddEvent(event) + want[i] = event } - assert.Equal(t, inputEvents[:100], tt.mockExporter.GetExportedEvents()) + assert.Equal(t, want[:100], tt.mockExporter.GetExportedEvents()) }) } } @@ -141,10 +145,12 @@ func TestDataExporterManager_defaultFlush(t *testing.T) { ffcontext.NewEvaluationContextBuilder("ABCD").AddCustom("anonymous", true).Build(), "random-key", "YO", "defaultVar", false, "", "SERVER", nil)) } - for _, event := range inputEvents { + want := make([]exporter.ExportableEvent, len(inputEvents)) + for i, event := range inputEvents { dc.AddEvent(event) + want[i] = event } - assert.Equal(t, inputEvents[:100000], tt.mockExporter.GetExportedEvents()) + assert.Equal(t, want[:100000], tt.mockExporter.GetExportedEvents()) }) } } @@ -175,11 +181,13 @@ func TestDataExporterManager_exporterReturnError(t *testing.T) { ffcontext.NewEvaluationContextBuilder("ABCD").AddCustom("anonymous", true).Build(), "random-key", "YO", "defaultVar", false, "", "SERVER", nil)) } - for _, event := range inputEvents { + want := make([]exporter.ExportableEvent, len(inputEvents)) + for i, event := range inputEvents { dc.AddEvent(event) + want[i] = event } // check that the first 100 events are exported - assert.Equal(t, inputEvents[:100], mockExporter.GetExportedEvents()[:100]) + assert.Equal(t, want[:100], mockExporter.GetExportedEvents()[:100]) handler.AssertMessage("error while exporting data: random err") } @@ -203,13 +211,15 @@ func TestDataExporterManager_nonBulkExporter(t *testing.T) { ffcontext.NewEvaluationContextBuilder("ABCD").AddCustom("anonymous", true).Build(), "random-key", "YO", "defaultVar", false, "", "SERVER", nil)) } - for _, event := range inputEvents { + want := make([]exporter.ExportableEvent, len(inputEvents)) + for i, event := range inputEvents { dc.AddEvent(event) + want[i] = event // we have to wait because we are opening a new thread to slow down the flag evaluation. time.Sleep(1 * time.Millisecond) } - assert.Equal(t, inputEvents[:100], mockExporter.GetExportedEvents()) + assert.Equal(t, want[:100], mockExporter.GetExportedEvents()) } func TestAddExporterMetadataFromContextToExporter(t *testing.T) { @@ -260,8 +270,14 @@ func TestAddExporterMetadataFromContextToExporter(t *testing.T) { time.Sleep(120 * time.Millisecond) assert.Equal(t, 1, len(mockExporter.GetExportedEvents())) - got := mockExporter.GetExportedEvents()[0].Metadata - assert.Equal(t, tt.want, got) + + switch val := mockExporter.GetExportedEvents()[0].(type) { + case exporter.FeatureEvent: + assert.Equal(t, tt.want, val.Metadata) + break + default: + assert.Fail(t, "The exported event is not a FeatureEvent") + } }) } } @@ -293,16 +309,18 @@ func TestDataExporterManager_multipleExporters(t *testing.T) { ffcontext.NewEvaluationContextBuilder("ABCD").AddCustom("anonymous", true).Build(), "random-key", "YO", "defaultVar", false, "", "SERVER", nil)) } - for _, event := range inputEvents { + want := make([]exporter.ExportableEvent, len(inputEvents)) + for i, event := range inputEvents { dc.AddEvent(event) + want[i] = event // we have to wait because we are opening a new thread to slow down the flag evaluation. time.Sleep(1 * time.Millisecond) } - assert.Equal(t, inputEvents[:100], mockExporter1.GetExportedEvents()) + assert.Equal(t, want[:100], mockExporter1.GetExportedEvents()) assert.Equal(t, 0, len(mockExporter2.GetExportedEvents())) time.Sleep(250 * time.Millisecond) - assert.Equal(t, inputEvents[:100], mockExporter2.GetExportedEvents()) + assert.Equal(t, want[:100], mockExporter2.GetExportedEvents()) } func TestDataExporterManager_multipleExportersWithDifferentFlushInterval(t *testing.T) { diff --git a/testutils/mock/exporter_mock.go b/testutils/mock/exporter_mock.go index c9b33e179d2..46dcdf77649 100644 --- a/testutils/mock/exporter_mock.go +++ b/testutils/mock/exporter_mock.go @@ -11,10 +11,10 @@ import ( type ExporterMock interface { exporter.CommonExporter - GetExportedEvents() []exporter.FeatureEvent + GetExportedEvents() []exporter.ExportableEvent } type Exporter struct { - ExportedEvents []exporter.FeatureEvent + ExportedEvents []exporter.ExportableEvent Err error ExpectedNumberErr int CurrentNumberErr int @@ -24,7 +24,7 @@ type Exporter struct { once sync.Once } -func (m *Exporter) Export(_ context.Context, _ *fflog.FFLogger, events []exporter.FeatureEvent) error { +func (m *Exporter) Export(_ context.Context, _ *fflog.FFLogger, events []exporter.ExportableEvent) error { m.once.Do(m.initMutex) m.mutex.Lock() defer m.mutex.Unlock() @@ -38,7 +38,7 @@ func (m *Exporter) Export(_ context.Context, _ *fflog.FFLogger, events []exporte return nil } -func (m *Exporter) GetExportedEvents() []exporter.FeatureEvent { +func (m *Exporter) GetExportedEvents() []exporter.ExportableEvent { m.once.Do(m.initMutex) m.mutex.Lock() defer m.mutex.Unlock() @@ -79,11 +79,16 @@ func (m *ExporterDeprecated) Export(_ context.Context, _ *log.Logger, events []e return nil } -func (m *ExporterDeprecated) GetExportedEvents() []exporter.FeatureEvent { +func (m *ExporterDeprecated) GetExportedEvents() []exporter.ExportableEvent { m.once.Do(m.initMutex) m.mutex.Lock() defer m.mutex.Unlock() - return m.ExportedEvents + + exportableEvents := make([]exporter.ExportableEvent, len(m.ExportedEvents)) + for index, event := range m.ExportedEvents { + exportableEvents[index] = event + } + return exportableEvents } func (m *ExporterDeprecated) IsBulk() bool { diff --git a/variation_test.go b/variation_test.go index c058817d324..b09eacd40a0 100644 --- a/variation_test.go +++ b/variation_test.go @@ -4041,7 +4041,7 @@ func Test_constructMetadataParallel(t *testing.T) { runtime.GOMAXPROCS(runtime.NumCPU()) // generate test cases - for i := 0; i < 10_000; i++ { + for i := 0; i < 1000; i++ { ruleName := fmt.Sprintf("rule-%d", i) tests = append(tests, struct { name string From 62f5a7edbead501c6f7e83571c9e77d0906864be Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 13 Mar 2025 16:54:25 +0100 Subject: [PATCH 06/26] Add 2nd deprecated mock Signed-off-by: Thomas Poignant --- exporter/manager_test.go | 12 +++++++++ testutils/mock/exporter_mock.go | 46 +++++++++++++++++++++++++++++++++ 2 files changed, 58 insertions(+) diff --git a/exporter/manager_test.go b/exporter/manager_test.go index 0954f925a43..ad5edc16608 100644 --- a/exporter/manager_test.go +++ b/exporter/manager_test.go @@ -32,6 +32,10 @@ func TestDataExporterManager_flushWithTime(t *testing.T) { name: "flushTime: deprecated exporter", mockExporter: &mock.ExporterDeprecated{Bulk: true}, }, + { + name: "flushTime: deprecated exporter v2", + mockExporter: &mock.ExporterDeprecatedV2{Bulk: true}, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -78,6 +82,10 @@ func TestDataExporterManager_flushWithNumberOfEvents(t *testing.T) { name: "flushWithNumberOfEvents: deprecated exporter", mockExporter: &mock.ExporterDeprecated{Bulk: true}, }, + { + name: "flushWithNumberOfEvents: deprecated exporter v2", + mockExporter: &mock.ExporterDeprecatedV2{Bulk: true}, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { @@ -122,6 +130,10 @@ func TestDataExporterManager_defaultFlush(t *testing.T) { name: "deprecated exporter", mockExporter: &mock.ExporterDeprecated{Bulk: true}, }, + { + name: "deprecated exporter v2", + mockExporter: &mock.ExporterDeprecatedV2{Bulk: true}, + }, } for _, tt := range tests { diff --git a/testutils/mock/exporter_mock.go b/testutils/mock/exporter_mock.go index 46dcdf77649..5f64651a652 100644 --- a/testutils/mock/exporter_mock.go +++ b/testutils/mock/exporter_mock.go @@ -98,3 +98,49 @@ func (m *ExporterDeprecated) IsBulk() bool { func (m *ExporterDeprecated) initMutex() { m.mutex = sync.Mutex{} } + +// ExporterDeprecatedV2 ----- +type ExporterDeprecatedV2 struct { + ExportedEvents []exporter.FeatureEvent + Err error + ExpectedNumberErr int + CurrentNumberErr int + Bulk bool + + mutex sync.Mutex + once sync.Once +} + +func (m *ExporterDeprecatedV2) Export(_ context.Context, _ *fflog.FFLogger, events []exporter.FeatureEvent) error { + m.once.Do(m.initMutex) + m.mutex.Lock() + defer m.mutex.Unlock() + m.ExportedEvents = append(m.ExportedEvents, events...) + if m.Err != nil { + if m.ExpectedNumberErr > m.CurrentNumberErr { + m.CurrentNumberErr++ + return m.Err + } + } + return nil +} + +func (m *ExporterDeprecatedV2) GetExportedEvents() []exporter.ExportableEvent { + m.once.Do(m.initMutex) + m.mutex.Lock() + defer m.mutex.Unlock() + + exportableEvents := make([]exporter.ExportableEvent, len(m.ExportedEvents)) + for index, event := range m.ExportedEvents { + exportableEvents[index] = event + } + return exportableEvents +} + +func (m *ExporterDeprecatedV2) IsBulk() bool { + return m.Bulk +} + +func (m *ExporterDeprecatedV2) initMutex() { + m.mutex = sync.Mutex{} +} From b6d5024bbfd57e9619bb9645d3bd53479d703e46 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 13 Mar 2025 18:29:52 +0100 Subject: [PATCH 07/26] Add test for trackingEvent Signed-off-by: Thomas Poignant --- exporter/feature_event_test.go | 17 +-- exporter/tracking_event.go | 34 +++++- exporter/tracking_event_test.go | 180 ++++++++++++++++++++++++++++++ ffcontext/context.go | 53 +++++---- ffcontext/context_builder.go | 10 +- ffcontext/context_builder_test.go | 60 +++++----- ffcontext/context_test.go | 22 ++-- 7 files changed, 297 insertions(+), 79 deletions(-) create mode 100644 exporter/tracking_event_test.go diff --git a/exporter/feature_event_test.go b/exporter/feature_event_test.go index 6ed89154913..ce3de40c1f3 100644 --- a/exporter/feature_event_test.go +++ b/exporter/feature_event_test.go @@ -1,7 +1,7 @@ package exporter_test import ( - "encoding/json" + "fmt" "testing" "time" @@ -128,7 +128,7 @@ func TestFeatureEvent_MarshalInterface(t *testing.T) { } } -func TestFeatureEvent_MarshalJSON(t *testing.T) { +func TestFeatureEvent_FormatInJSON(t *testing.T) { tests := []struct { name string featureEvent *exporter.FeatureEvent @@ -153,7 +153,7 @@ func TestFeatureEvent_MarshalJSON(t *testing.T) { Default: false, Metadata: map[string]interface{}{}, }, - want: `{"kind":"feature","contextKind":"anonymousUser","userKey":"ABCD","creationDate":1617970547,"key":"random-key","variation":"Default","value":{"string":"string","bool":true,"float":1.23,"int":1},"default":false}`, + want: `{"kind":"feature","contextKind":"anonymousUser","userKey":"ABCD","creationDate":1617970547,"key":"random-key","variation":"Default","value":{"bool":true,"float":1.23,"int":1,"string":"string"},"default":false,"version":"","source":""}`, wantErr: assert.NoError, }, { @@ -173,7 +173,7 @@ func TestFeatureEvent_MarshalJSON(t *testing.T) { }, Default: false, }, - want: `{"kind":"feature","contextKind":"anonymousUser","userKey":"ABCD","creationDate":1617970547,"key":"random-key","variation":"Default","value":{"string":"string","bool":true,"float":1.23,"int":1},"default":false}`, + want: `{"kind":"feature","contextKind":"anonymousUser","userKey":"ABCD","creationDate":1617970547,"key":"random-key","variation":"Default","value":{"bool":true,"float":1.23,"int":1,"string":"string"},"default":false,"version":"","source":""}`, wantErr: assert.NoError, }, { @@ -198,15 +198,16 @@ func TestFeatureEvent_MarshalJSON(t *testing.T) { "metadata3": true, }, }, - want: `{"kind":"feature","contextKind":"anonymousUser","userKey":"ABCD","creationDate":1617970547,"key":"random-key","variation":"Default","value":{"string":"string","bool":true,"float":1.23,"int":1},"default":false,"metadata":{"metadata1":"metadata1","metadata2":24,"metadata3":true}}`, + want: `{"kind":"feature","contextKind":"anonymousUser","userKey":"ABCD","creationDate":1617970547,"key":"random-key","variation":"Default","value":{"bool":true,"float":1.23,"int":1,"string":"string"},"default":false,"version":"","source":"","metadata":{"metadata1":"metadata1","metadata2":24,"metadata3":true}}`, wantErr: assert.NoError, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got, err := json.Marshal(tt.featureEvent) + got, err := tt.featureEvent.FormatInJSON() tt.wantErr(t, err) - if err != nil { + if err == nil { + fmt.Println(string(got)) assert.JSONEq(t, tt.want, string(got)) } }) @@ -263,7 +264,7 @@ func TestFeatureEvent_GetKey(t *testing.T) { func TestFeatureEvent_GetUserKey(t *testing.T) { tests := []struct { name string - featureEvent *exporter.FeatureEvent + featureEvent *exporter.FeatureEvent `` want string }{ { diff --git a/exporter/tracking_event.go b/exporter/tracking_event.go index c469ac64eea..eb460d563b0 100644 --- a/exporter/tracking_event.go +++ b/exporter/tracking_event.go @@ -1,6 +1,11 @@ package exporter import ( + "bytes" + "encoding/json" + "text/template" + "time" + "github.com/thomaspoignant/go-feature-flag/ffcontext" ) @@ -27,14 +32,10 @@ type TrackingEvent struct { // Key of the feature flag requested. Key string `json:"key" example:"my-feature-flag" parquet:"name=key, type=BYTE_ARRAY, convertedtype=UTF8"` - // Source indicates where the event was generated. - // This is set to SERVER when the event was evaluated in the relay-proxy and PROVIDER_CACHE when it is evaluated from the cache. - Source string `json:"source" example:"SERVER" parquet:"name=source, type=BYTE_ARRAY, convertedtype=UTF8"` - - // TODO: + // EvaluationContext contains the evaluation context used for the tracking EvaluationContext ffcontext.EvaluationContext `json:"evaluationContext" parquet:"name=evaluationContext, type=MAP, keytype=BYTE_ARRAY, keyconvertedtype=UTF8, valuetype=BYTE_ARRAY, valueconvertedtype=UTF8"` - // TODO: + // TrackingDetails contains the details of the tracking event TrackingDetails TrackingEventDetails `json:"trackingEventDetails" parquet:"name=evaluationContext, type=MAP, keytype=BYTE_ARRAY, keyconvertedtype=UTF8, valuetype=BYTE_ARRAY, valueconvertedtype=UTF8"` } @@ -51,3 +52,24 @@ func (f TrackingEvent) GetUserKey() string { func (f TrackingEvent) GetCreationDate() int64 { return f.CreationDate } + +func (f TrackingEvent) FormatInCSV(csvTemplate *template.Template) ([]byte, error) { + var buf bytes.Buffer + err := csvTemplate.Execute(&buf, struct { + TrackingEvent + FormattedDate string + }{ + TrackingEvent: f, + FormattedDate: time.Unix(f.GetCreationDate(), 0).Format(time.RFC3339), + }) + if err != nil { + return nil, err + } + return buf.Bytes(), nil +} + +func (f TrackingEvent) FormatInJSON() ([]byte, error) { + b, err := json.Marshal(f) + b = append(b, []byte("\n")...) + return b, err +} diff --git a/exporter/tracking_event_test.go b/exporter/tracking_event_test.go new file mode 100644 index 00000000000..a17be3d872d --- /dev/null +++ b/exporter/tracking_event_test.go @@ -0,0 +1,180 @@ +package exporter_test + +import ( + "fmt" + "testing" + "text/template" + + "github.com/stretchr/testify/assert" + "github.com/thomaspoignant/go-feature-flag/exporter" + "github.com/thomaspoignant/go-feature-flag/ffcontext" +) + +func TestTrackingEvent_FormatInCSV(t *testing.T) { + tests := []struct { + name string + trackingEvent *exporter.TrackingEvent + template string + want string + wantErr assert.ErrorAssertionFunc + }{ + { + name: "Should return a marshalled JSON string of the tracking event", + trackingEvent: &exporter.TrackingEvent{ + Kind: "tracking", + ContextKind: "anonymousUser", + UserKey: "ABCD", + CreationDate: 1617970547, + Key: "random-key", + EvaluationContext: ffcontext.NewEvaluationContextBuilder("ABCD").Build(), + TrackingDetails: map[string]interface{}{ + "event": "123", + }, + }, + template: `{{ .Kind}};{{ .ContextKind}};{{ .UserKey}};{{ .CreationDate}};{{ .EvaluationContext}};{{ .TrackingDetails}}`, + want: `tracking;anonymousUser;ABCD;1617970547;{ABCD map[]};map[event:123]`, + wantErr: assert.NoError, + }, + { + name: "Should return a marshalled JSON string of the tracking event with evaluation context attributes", + trackingEvent: &exporter.TrackingEvent{ + Kind: "tracking", + ContextKind: "anonymousUser", + UserKey: "ABCD", + CreationDate: 1617970547, + Key: "random-key", + EvaluationContext: ffcontext.NewEvaluationContextBuilder("ABCD").AddCustom("toto", 123).Build(), + TrackingDetails: map[string]interface{}{ + "event": "123", + }, + }, + template: `{{ .Kind}};{{ .ContextKind}};{{ .UserKey}};{{ .CreationDate}};{{ .EvaluationContext}};{{ .TrackingDetails}}`, + want: `tracking;anonymousUser;ABCD;1617970547;{ABCD map[toto:123]};map[event:123]`, + wantErr: assert.NoError, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + csvTemplate, err := template.New("test").Parse(tt.template) + assert.NoError(t, err) + got, err := tt.trackingEvent.FormatInCSV(csvTemplate) + tt.wantErr(t, err) + if err == nil { + assert.Equal(t, tt.want, string(got)) + } + }) + } +} + +func TestTrackingEvent_FormatInJSON(t *testing.T) { + tests := []struct { + name string + trackingEvent *exporter.TrackingEvent + want string + wantErr assert.ErrorAssertionFunc + }{ + { + name: "Should return a marshalled JSON string of the tracking event", + trackingEvent: &exporter.TrackingEvent{ + Kind: "tracking", + ContextKind: "anonymousUser", + UserKey: "ABCD", + CreationDate: 1617970547, + Key: "random-key", + EvaluationContext: ffcontext.NewEvaluationContextBuilder("ABCD").Build(), + TrackingDetails: map[string]interface{}{ + "event": "123", + }, + }, + want: `{"kind":"tracking","contextKind":"anonymousUser","userKey":"ABCD","creationDate":1617970547,"key":"random-key","evaluationContext":{"targetingKey":"ABCD","attributes":{}},"trackingEventDetails":{"event":"123"}}`, + wantErr: assert.NoError, + }, + { + name: "Should return a marshalled JSON string of the tracking event with evaluation context attributes", + trackingEvent: &exporter.TrackingEvent{ + Kind: "tracking", + ContextKind: "anonymousUser", + UserKey: "ABCD", + CreationDate: 1617970547, + Key: "random-key", + EvaluationContext: ffcontext.NewEvaluationContextBuilder("ABCD").AddCustom("toto", 123).Build(), + TrackingDetails: map[string]interface{}{ + "event": "123", + }, + }, + want: `{"kind":"tracking","contextKind":"anonymousUser","userKey":"ABCD","creationDate":1617970547,"key":"random-key","evaluationContext":{"targetingKey":"ABCD","attributes":{"toto":123}},"trackingEventDetails":{"event":"123"}}`, + wantErr: assert.NoError, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := tt.trackingEvent.FormatInJSON() + tt.wantErr(t, err) + if err == nil { + fmt.Println(string(got)) + assert.JSONEq(t, tt.want, string(got)) + } + }) + } +} + +func TestTrackingEvent_GetKey(t *testing.T) { + tests := []struct { + name string + trackingEvent *exporter.TrackingEvent + want string + }{ + { + name: "return existing key", + trackingEvent: &exporter.TrackingEvent{ + Kind: "tracking", + ContextKind: "anonymousUser", + UserKey: "ABCD", + CreationDate: 1617970547, + Key: "random-key", + }, + want: "random-key", + }, + { + name: "empty key", + trackingEvent: &exporter.TrackingEvent{ + Kind: "tracking", + ContextKind: "anonymousUser", + UserKey: "ABCD", + CreationDate: 1617970547, + Key: "", + }, + want: "", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.trackingEvent.GetKey()) + }) + } +} + +func TestTrackingEvent_GetUserKey(t *testing.T) { + tests := []struct { + name string + trackingEvent *exporter.TrackingEvent + want string + }{ + { + name: "return existing key", + trackingEvent: &exporter.TrackingEvent{ + Kind: "tracking", + ContextKind: "anonymousUser", + UserKey: "ABCD", + CreationDate: 1617970547, + Key: "random-key", + }, + want: "ABCD", + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, tt.trackingEvent.GetUserKey()) + }) + } +} diff --git a/ffcontext/context.go b/ffcontext/context.go index 58683e8e33b..e4125ec357c 100644 --- a/ffcontext/context.go +++ b/ffcontext/context.go @@ -1,33 +1,35 @@ package ffcontext +import "encoding/json" + type Context interface { - // GetKey return the unique key for the context. + // GetKey return the unique targetingKey for the context. GetKey() string // IsAnonymous return if the context is about an anonymous user or not. IsAnonymous() bool - // GetCustom return all the custom properties added to the context. + // GetCustom return all the attributes properties added to the context. GetCustom() map[string]interface{} - // AddCustomAttribute allows to add a custom attribute into the context. + // AddCustomAttribute allows to add a attributes attribute into the context. AddCustomAttribute(name string, value interface{}) // ExtractGOFFProtectedFields extract the goff specific attributes from the evaluation context. ExtractGOFFProtectedFields() GoffContextSpecifics } -// value is a type to define custom attribute. +// value is a type to define attributes. type value map[string]interface{} -// NewEvaluationContext creates a new evaluation context identified by the given key. +// NewEvaluationContext creates a new evaluation context identified by the given targetingKey. func NewEvaluationContext(key string) EvaluationContext { - return EvaluationContext{key: key, custom: map[string]interface{}{}} + return EvaluationContext{targetingKey: key, attributes: map[string]interface{}{}} } // Deprecated: NewAnonymousEvaluationContext is here for compatibility reason. -// Please use NewEvaluationContext instead and add a custom attribute to know that it is an anonymous user. +// Please use NewEvaluationContext instead and add a attributes attribute to know that it is an anonymous user. // -// ctx := NewEvaluationContext("my-key") +// ctx := NewEvaluationContext("my-targetingKey") // ctx.AddCustomAttribute("anonymous", true) func NewAnonymousEvaluationContext(key string) EvaluationContext { - return EvaluationContext{key: key, custom: map[string]interface{}{ + return EvaluationContext{targetingKey: key, attributes: map[string]interface{}{ "anonymous": true, }} } @@ -42,18 +44,31 @@ func NewAnonymousEvaluationContext(key string) EvaluationContext { // To construct an EvaluationContext, use either a simple constructor (NewEvaluationContext) or the builder pattern // with NewEvaluationContextBuilder. type EvaluationContext struct { - key string // only mandatory attribute - custom value + // uniquely identifying the subject (end-user, or client service) of a flag evaluation + targetingKey string + attributes value +} + +// MarshalJSON is a custom JSON marshaller for EvaluationContext. +// It will only marshal the targetingKey and the attributes of the context and avoid to expose the internal structure. +func (u EvaluationContext) MarshalJSON() ([]byte, error) { + return json.Marshal(&struct { + TargetingKey string `json:"targetingKey"` + Attributes value `json:"attributes"` + }{ + TargetingKey: u.targetingKey, + Attributes: u.attributes, + }) } -// GetKey return the unique key for the user. +// GetKey return the unique targetingKey for the user. func (u EvaluationContext) GetKey() string { - return u.key + return u.targetingKey } // IsAnonymous return if the user is anonymous or not. func (u EvaluationContext) IsAnonymous() bool { - anonymous := u.custom["anonymous"] + anonymous := u.attributes["anonymous"] switch v := anonymous.(type) { case bool: return v @@ -62,22 +77,22 @@ func (u EvaluationContext) IsAnonymous() bool { } } -// GetCustom return all the custom properties of a user. +// GetCustom return all the attributes properties of a user. func (u EvaluationContext) GetCustom() map[string]interface{} { - return u.custom + return u.attributes } -// AddCustomAttribute allows to add a custom attribute into the user. +// AddCustomAttribute allows to add a attributes attribute into the user. func (u EvaluationContext) AddCustomAttribute(name string, value interface{}) { if name != "" { - u.custom[name] = value + u.attributes[name] = value } } // ExtractGOFFProtectedFields extract the goff specific attributes from the evaluation context. func (u EvaluationContext) ExtractGOFFProtectedFields() GoffContextSpecifics { goff := GoffContextSpecifics{} - switch v := u.custom["gofeatureflag"].(type) { + switch v := u.attributes["gofeatureflag"].(type) { case map[string]string: goff.addCurrentDateTime(v["currentDateTime"]) goff.addListFlags(v["flagList"]) diff --git a/ffcontext/context_builder.go b/ffcontext/context_builder.go index 5401f98427e..6aa5a9b4df3 100644 --- a/ffcontext/context_builder.go +++ b/ffcontext/context_builder.go @@ -1,8 +1,8 @@ package ffcontext -// NewEvaluationContextBuilder constructs a new EvaluationContextBuilder, specifying the user key. +// NewEvaluationContextBuilder constructs a new EvaluationContextBuilder, specifying the user targetingKey. // -// For authenticated users, the key may be a username or e-mail address. For anonymous users, +// For authenticated users, the targetingKey may be a username or e-mail address. For anonymous users, // this could be an IP address or session ID. func NewEvaluationContextBuilder(key string) EvaluationContextBuilder { return &evaluationContextBuilderImpl{ @@ -36,7 +36,7 @@ func (u *evaluationContextBuilderImpl) Anonymous(anonymous bool) EvaluationConte return u } -// AddCustom allows you to add a custom attribute to the EvaluationContext. +// AddCustom allows you to add a attributes attribute to the EvaluationContext. func (u *evaluationContextBuilderImpl) AddCustom(key string, value interface{}) EvaluationContextBuilder { u.custom[key] = value return u @@ -45,7 +45,7 @@ func (u *evaluationContextBuilderImpl) AddCustom(key string, value interface{}) // Build is creating the EvaluationContext. func (u *evaluationContextBuilderImpl) Build() EvaluationContext { return EvaluationContext{ - key: u.key, - custom: u.custom, + targetingKey: u.key, + attributes: u.custom, } } diff --git a/ffcontext/context_builder_test.go b/ffcontext/context_builder_test.go index 43c29d6bff9..3b69dbf4440 100644 --- a/ffcontext/context_builder_test.go +++ b/ffcontext/context_builder_test.go @@ -13,71 +13,71 @@ func TestNewUser(t *testing.T) { want EvaluationContext }{ { - name: "Builder with only key", - got: NewEvaluationContextBuilder("random-key").Build(), + name: "Builder with only targetingKey", + got: NewEvaluationContextBuilder("random-targetingKey").Build(), want: EvaluationContext{ - key: "random-key", - custom: map[string]interface{}{}, + targetingKey: "random-targetingKey", + attributes: map[string]interface{}{}, }, }, { - name: "Builder with custom attribute", - got: NewEvaluationContextBuilder("random-key"). - AddCustom("test", "custom"). + name: "Builder with attributes attribute", + got: NewEvaluationContextBuilder("random-targetingKey"). + AddCustom("test", "attributes"). Build(), want: EvaluationContext{ - key: "random-key", - custom: map[string]interface{}{ - "test": "custom", + targetingKey: "random-targetingKey", + attributes: map[string]interface{}{ + "test": "attributes", }, }, }, { - name: "Builder with custom attribute", - got: NewEvaluationContextBuilder("random-key"). + name: "Builder with attributes attribute", + got: NewEvaluationContextBuilder("random-targetingKey"). Anonymous(true). - AddCustom("test", "custom"). + AddCustom("test", "attributes"). Build(), want: EvaluationContext{ - key: "random-key", - custom: map[string]interface{}{ - "test": "custom", + targetingKey: "random-targetingKey", + attributes: map[string]interface{}{ + "test": "attributes", "anonymous": true, }, }, }, { - name: "NewUser with key", - got: NewEvaluationContext("random-key"), + name: "NewUser with targetingKey", + got: NewEvaluationContext("random-targetingKey"), want: EvaluationContext{ - key: "random-key", - custom: map[string]interface{}{}, + targetingKey: "random-targetingKey", + attributes: map[string]interface{}{}, }, }, { - name: "NewUser without key", + name: "NewUser without targetingKey", got: NewEvaluationContext(""), want: EvaluationContext{ - key: "", - custom: map[string]interface{}{}, + targetingKey: "", + attributes: map[string]interface{}{}, }, }, { - name: "NewAnonymousUser with key", - got: NewAnonymousEvaluationContext("random-key"), + name: "NewAnonymousUser with targetingKey", + got: NewAnonymousEvaluationContext("random-targetingKey"), want: EvaluationContext{ - key: "random-key", - custom: map[string]interface{}{ + targetingKey: "random-targetingKey", + attributes: map[string]interface{}{ "anonymous": true, }, }, }, { - name: "NewAnonymousUser without key", + name: "NewAnonymousUser without targetingKey", got: NewAnonymousEvaluationContext(""), want: EvaluationContext{ - key: "", - custom: map[string]interface{}{ + targetingKey: "", + attributes: map[string]interface{}{ "anonymous": true, }, }, diff --git a/ffcontext/context_test.go b/ffcontext/context_test.go index 98a8fb585b1..7685b6b6bd2 100644 --- a/ffcontext/context_test.go +++ b/ffcontext/context_test.go @@ -54,7 +54,7 @@ func Test_ExtractGOFFProtectedFields(t *testing.T) { }{ { name: "context goff specifics as map[string]string", - ctx: ffcontext.NewEvaluationContextBuilder("my-key").AddCustom("gofeatureflag", map[string]string{ + ctx: ffcontext.NewEvaluationContextBuilder("my-targetingKey").AddCustom("gofeatureflag", map[string]string{ "currentDateTime": time.Date(2022, 8, 1, 0, 0, 10, 0, time.UTC).Format(time.RFC3339), }).Build(), want: ffcontext.GoffContextSpecifics{ @@ -63,7 +63,7 @@ func Test_ExtractGOFFProtectedFields(t *testing.T) { }, { name: "context goff specifics as map[string]interface and date as time.Time", - ctx: ffcontext.NewEvaluationContextBuilder("my-key").AddCustom("gofeatureflag", map[string]interface{}{ + ctx: ffcontext.NewEvaluationContextBuilder("my-targetingKey").AddCustom("gofeatureflag", map[string]interface{}{ "currentDateTime": time.Date(2022, 8, 1, 0, 0, 10, 0, time.UTC), }).Build(), want: ffcontext.GoffContextSpecifics{ @@ -72,7 +72,7 @@ func Test_ExtractGOFFProtectedFields(t *testing.T) { }, { name: "context goff specifics as map[string]interface and date as *time.Time", - ctx: ffcontext.NewEvaluationContextBuilder("my-key").AddCustom("gofeatureflag", map[string]interface{}{ + ctx: ffcontext.NewEvaluationContextBuilder("my-targetingKey").AddCustom("gofeatureflag", map[string]interface{}{ "currentDateTime": testconvert.Time(time.Date(2022, 8, 1, 0, 0, 10, 0, time.UTC)), }).Build(), want: ffcontext.GoffContextSpecifics{ @@ -81,7 +81,7 @@ func Test_ExtractGOFFProtectedFields(t *testing.T) { }, { name: "context goff specifics as map[string]interface", - ctx: ffcontext.NewEvaluationContextBuilder("my-key").AddCustom("gofeatureflag", map[string]interface{}{ + ctx: ffcontext.NewEvaluationContextBuilder("my-targetingKey").AddCustom("gofeatureflag", map[string]interface{}{ "currentDateTime": time.Date(2022, 8, 1, 0, 0, 10, 0, time.UTC).Format(time.RFC3339), }).Build(), want: ffcontext.GoffContextSpecifics{ @@ -90,21 +90,21 @@ func Test_ExtractGOFFProtectedFields(t *testing.T) { }, { name: "context goff specifics nil", - ctx: ffcontext.NewEvaluationContextBuilder("my-key").AddCustom("gofeatureflag", nil).Build(), + ctx: ffcontext.NewEvaluationContextBuilder("my-targetingKey").AddCustom("gofeatureflag", nil).Build(), want: ffcontext.GoffContextSpecifics{ CurrentDateTime: nil, }, }, { name: "no context goff specifics", - ctx: ffcontext.NewEvaluationContextBuilder("my-key").Build(), + ctx: ffcontext.NewEvaluationContextBuilder("my-targetingKey").Build(), want: ffcontext.GoffContextSpecifics{ CurrentDateTime: nil, }, }, { name: "context goff specifics as GoffContextSpecifics type", - ctx: ffcontext.NewEvaluationContextBuilder("my-key").AddCustom("gofeatureflag", ffcontext.GoffContextSpecifics{ + ctx: ffcontext.NewEvaluationContextBuilder("my-targetingKey").AddCustom("gofeatureflag", ffcontext.GoffContextSpecifics{ CurrentDateTime: testconvert.Time(time.Date(2022, 8, 1, 0, 0, 10, 0, time.UTC)), }).Build(), want: ffcontext.GoffContextSpecifics{ @@ -113,7 +113,7 @@ func Test_ExtractGOFFProtectedFields(t *testing.T) { }, { name: "context goff specifics as GoffContextSpecifics type contains flagList", - ctx: ffcontext.NewEvaluationContextBuilder("my-key").AddCustom("gofeatureflag", ffcontext.GoffContextSpecifics{ + ctx: ffcontext.NewEvaluationContextBuilder("my-targetingKey").AddCustom("gofeatureflag", ffcontext.GoffContextSpecifics{ CurrentDateTime: testconvert.Time(time.Date(2022, 8, 1, 0, 0, 10, 0, time.UTC)), FlagList: []string{"flag1", "flag2"}, }).Build(), @@ -124,7 +124,7 @@ func Test_ExtractGOFFProtectedFields(t *testing.T) { }, { name: "context goff specifics as map[string]interface type contains flagList", - ctx: ffcontext.NewEvaluationContextBuilder("my-key").AddCustom("gofeatureflag", map[string]interface{}{ + ctx: ffcontext.NewEvaluationContextBuilder("my-targetingKey").AddCustom("gofeatureflag", map[string]interface{}{ "currentDateTime": testconvert.Time(time.Date(2022, 8, 1, 0, 0, 10, 0, time.UTC)).Format(time.RFC3339), "flagList": []string{"flag1", "flag2"}, }).Build(), @@ -135,7 +135,7 @@ func Test_ExtractGOFFProtectedFields(t *testing.T) { }, { name: "context goff specifics only flagList", - ctx: ffcontext.NewEvaluationContextBuilder("my-key").AddCustom("gofeatureflag", map[string]interface{}{ + ctx: ffcontext.NewEvaluationContextBuilder("my-targetingKey").AddCustom("gofeatureflag", map[string]interface{}{ "flagList": []string{"flag1", "flag2"}, }).Build(), want: ffcontext.GoffContextSpecifics{ @@ -144,7 +144,7 @@ func Test_ExtractGOFFProtectedFields(t *testing.T) { }, { name: "context goff specifics with exporter metadata", - ctx: ffcontext.NewEvaluationContextBuilder("my-key").AddCustom("gofeatureflag", map[string]interface{}{ + ctx: ffcontext.NewEvaluationContextBuilder("my-targetingKey").AddCustom("gofeatureflag", map[string]interface{}{ "exporterMetadata": map[string]interface{}{ "toto": 123, "titi": 123.45, From c899dda95e2ba147e0a879d9a1ef940b6ac50844 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 13 Mar 2025 20:40:17 +0100 Subject: [PATCH 08/26] Add tracking function Signed-off-by: Thomas Poignant --- config_exporter.go | 11 + feature_flag.go | 190 ++++++++++++------ internal/cache/cache_manager.go | 5 +- internal/cache/cache_manager_test.go | 11 +- .../notification_service.go | 4 +- .../notification_service_priv_test.go | 10 +- .../notification_service_test.go | 2 +- .../mock/tracking_event_exporter_mock.go | 65 ++++++ tracking.go | 29 +++ tracking_test.go | 41 ++++ variation.go | 4 +- variation_test.go | 26 +-- 12 files changed, 302 insertions(+), 96 deletions(-) rename internal/{cache => notification}/notification_service.go (95%) rename internal/{cache => notification}/notification_service_priv_test.go (88%) rename internal/{cache => notification}/notification_service_test.go (99%) create mode 100644 testutils/mock/tracking_event_exporter_mock.go create mode 100644 tracking.go create mode 100644 tracking_test.go diff --git a/config_exporter.go b/config_exporter.go index c54c7f69cbe..27fee1d0862 100644 --- a/config_exporter.go +++ b/config_exporter.go @@ -6,6 +6,13 @@ import ( "github.com/thomaspoignant/go-feature-flag/exporter" ) +type ExporterEventType = string + +const ( + TrackingEventExporter ExporterEventType = "tracking" + FeatureEventExporter ExporterEventType = "feature" +) + // DataExporter is the configuration of your export target. type DataExporter struct { // FlushInterval is the interval we are waiting to export the data. @@ -22,4 +29,8 @@ type DataExporter struct { // Exporter is the configuration of your exporter. // You can see all available exporter in the exporter package. Exporter exporter.CommonExporter + + // ExporterEventType is the type of event the exporter is expecting. + // The default type if not set is FeatureEventExporter. + ExporterEventType ExporterEventType } diff --git a/feature_flag.go b/feature_flag.go index c848c0b28ed..b4f62074257 100644 --- a/feature_flag.go +++ b/feature_flag.go @@ -11,6 +11,7 @@ import ( "github.com/thomaspoignant/go-feature-flag/exporter" "github.com/thomaspoignant/go-feature-flag/internal/cache" + "github.com/thomaspoignant/go-feature-flag/internal/notification" "github.com/thomaspoignant/go-feature-flag/model/dto" "github.com/thomaspoignant/go-feature-flag/notifier/logsnotifier" "github.com/thomaspoignant/go-feature-flag/retriever" @@ -43,11 +44,12 @@ func Init(config Config) error { // GoFeatureFlag is the main object of the library // it contains the cache, the config, the updater and the exporter. type GoFeatureFlag struct { - cache cache.Manager - config Config - bgUpdater backgroundUpdater - dataExporter exporter.Manager[exporter.FeatureEvent] - retrieverManager *retriever.Manager + cache cache.Manager + config Config + bgUpdater backgroundUpdater + featureEventDataExporter exporter.Manager[exporter.FeatureEvent] + trackingEventDataExporter exporter.Manager[exporter.TrackingEvent] + retrieverManager *retriever.Manager } // ff is the default object for go-feature-flag @@ -57,21 +59,13 @@ var onceFF sync.Once // New creates a new go-feature-flag instances that retrieve the config from a YAML file // and return everything you need to manage your flags. func New(config Config) (*GoFeatureFlag, error) { - switch { - case config.PollingInterval == 0: - // The default value for the poll interval is 60 seconds - config.PollingInterval = 60 * time.Second - case config.PollingInterval > 0 && config.PollingInterval < time.Second: - // the minimum value for the polling policy is 1 second - config.PollingInterval = time.Second - default: - // do nothing - } + config.PollingInterval = adjustPollingInterval(config.PollingInterval) if config.offlineMutex == nil { config.offlineMutex = &sync.RWMutex{} } + // initialize internal logger config.internalLogger = &fflog.FFLogger{ LeveledLogger: config.LeveledLogger, LegacyLogger: config.Logger, @@ -81,70 +75,134 @@ func New(config Config) (*GoFeatureFlag, error) { config: config, } - if !config.Offline { - notifiers := config.Notifiers - notifiers = append(notifiers, &logsnotifier.Notifier{Logger: config.internalLogger}) + if config.Offline { + // in case we are in offline mode, we don't need to initialize the cache since we will not use it. + goFF.config.internalLogger.Info("GO Feature Flag is in offline mode") + return goFF, nil + } + + notificationService := initializeNotificationService(config) + + // init internal cache + goFF.cache = cache.New(notificationService, config.PersistentFlagConfigurationFile, config.internalLogger) - notificationService := cache.NewNotificationService(notifiers) - goFF.cache = cache.New(notificationService, config.PersistentFlagConfigurationFile, config.internalLogger) + retrieverManager, err := initializeRetrieverManager(config) + if err != nil && (retrieverManager == nil || !config.StartWithRetrieverError) { + return nil, fmt.Errorf("impossible to initialize the retrievers, please check your configuration: %v", err) + } + goFF.retrieverManager = retrieverManager - retrievers, err := config.GetRetrievers() - if err != nil { + // first retrieval of the flags + if err := retrieveFlagsAndUpdateCache(goFF.config, goFF.cache, goFF.retrieverManager, true); err != nil { + if err := handleFirstRetrieverError(config, goFF.config.internalLogger, goFF.cache, err); err != nil { return nil, err } - goFF.retrieverManager = retriever.NewManager(config.Context, retrievers, config.internalLogger) - err = goFF.retrieverManager.Init(config.Context) - if err != nil && !config.StartWithRetrieverError { - return nil, fmt.Errorf("impossible to initialize the retrievers, please check your configuration: %v", err) - } + } - err = retrieveFlagsAndUpdateCache(goFF.config, goFF.cache, goFF.retrieverManager, true) - if err != nil { - switch { - case config.PersistentFlagConfigurationFile != "": - errPersist := retrievePersistentLocalDisk(config.Context, config, goFF) - if errPersist != nil && !config.StartWithRetrieverError { - return nil, fmt.Errorf("impossible to use the persistent flag configuration file: %v "+ - "[original error: %v]", errPersist, err) - } - case !config.StartWithRetrieverError: - return nil, fmt.Errorf("impossible to retrieve the flags, please check your configuration: %v", err) - default: - // We accept to start with a retriever error, we will serve only default value - goFF.config.internalLogger.Error("Impossible to retrieve the flags, starting with the "+ - "retriever error", slog.Any("error", err)) + // start the background task to update the flags periodically + if config.PollingInterval > 0 { + goFF.bgUpdater = newBackgroundUpdater(config.PollingInterval, config.EnablePollingJitter) + go goFF.startFlagUpdaterDaemon() + } + + goFF.featureEventDataExporter, goFF.trackingEventDataExporter = initializeDataExporters(config, goFF.config.internalLogger) + config.internalLogger.Debug("GO Feature Flag is initialized") + return goFF, nil +} + +// adjustPollingInterval is a function that will check the polling interval and set it to the minimum value if it is +// lower than 1 second. It also set the default value to 60 seconds if the polling interval is 0. +func adjustPollingInterval(pollingInterval time.Duration) time.Duration { + switch { + case pollingInterval == 0: + // The default value for the poll interval is 60 seconds + return 60 * time.Second + case pollingInterval > 0 && pollingInterval < time.Second: + // the minimum value for the polling policy is 1 second + return time.Second + default: + return pollingInterval + } +} + +// initializeNotificationService is a function that will initialize the notification service with the notifiers +func initializeNotificationService(config Config) notification.Service { + notifiers := config.Notifiers + notifiers = append(notifiers, &logsnotifier.Notifier{Logger: config.internalLogger}) + return notification.NewService(notifiers) +} + +// initializeRetrieverManager is a function that will initialize the retriever manager with the retrievers +func initializeRetrieverManager(config Config) (*retriever.Manager, error) { + retrievers, err := config.GetRetrievers() + if err != nil { + return nil, err + } + manager := retriever.NewManager(config.Context, retrievers, config.internalLogger) + err = manager.Init(config.Context) + return manager, err +} + +func initializeDataExporters(config Config, logger *fflog.FFLogger) ( + exporter.Manager[exporter.FeatureEvent], exporter.Manager[exporter.TrackingEvent]) { + exporters := config.GetDataExporters() + featureEventExporterConfigs := make([]exporter.Config, 0) + trackingEventExporterConfigs := make([]exporter.Config, 0) + if len(exporters) > 0 { + for _, exp := range exporters { + c := exporter.Config{ + Exporter: exp.Exporter, + FlushInterval: exp.FlushInterval, + MaxEventInMemory: exp.MaxEventInMemory, + } + if exp.ExporterEventType == TrackingEventExporter { + trackingEventExporterConfigs = append(trackingEventExporterConfigs, c) + continue } + featureEventExporterConfigs = append(featureEventExporterConfigs, c) } + } - if config.PollingInterval > 0 { - goFF.bgUpdater = newBackgroundUpdater(config.PollingInterval, config.EnablePollingJitter) - go goFF.startFlagUpdaterDaemon() - } + var trackingEventManager exporter.Manager[exporter.TrackingEvent] + if len(trackingEventExporterConfigs) > 0 { + trackingEventManager = exporter.NewManager[exporter.TrackingEvent]( + config.Context, trackingEventExporterConfigs, config.ExporterCleanQueueInterval, logger) + trackingEventManager.Start() + } - exporters := goFF.config.GetDataExporters() - if len(exporters) > 0 { - // init the data exporter - expConfigs := make([]exporter.Config, len(exporters)) - for index, exp := range exporters { - expConfigs[index] = exporter.Config{ - Exporter: exp.Exporter, - FlushInterval: exp.FlushInterval, - MaxEventInMemory: exp.MaxEventInMemory, - } - } - goFF.dataExporter = exporter.NewManager[exporter.FeatureEvent]( - config.Context, expConfigs, config.ExporterCleanQueueInterval, goFF.config.internalLogger) - go goFF.dataExporter.Start() + var featureEventManager exporter.Manager[exporter.FeatureEvent] + if len(featureEventExporterConfigs) > 0 { + featureEventManager = exporter.NewManager[exporter.FeatureEvent]( + config.Context, featureEventExporterConfigs, config.ExporterCleanQueueInterval, logger) + featureEventManager.Start() + } + return featureEventManager, trackingEventManager +} + +// handleFirstRetrieverError is a function that will handle the first error when trying to retrieve +// the flags the first time when starting GO Feature Flag. +func handleFirstRetrieverError(config Config, logger *fflog.FFLogger, cache cache.Manager, err error) error { + switch { + case config.PersistentFlagConfigurationFile != "": + errPersist := retrievePersistentLocalDisk(config.Context, config, cache) + if errPersist != nil && !config.StartWithRetrieverError { + return fmt.Errorf("impossible to use the persistent flag configuration file: %v "+ + "[original error: %v]", errPersist, err) } + case !config.StartWithRetrieverError: + return fmt.Errorf("impossible to retrieve the flags, please check your configuration: %v", err) + default: + // We accept to start with a retriever error, we will serve only default value + logger.Error("Impossible to retrieve the flags, starting with the "+ + "retriever error", slog.Any("error", err)) } - config.internalLogger.Debug("GO Feature Flag is initialized") - return goFF, nil + return nil } // retrievePersistentLocalDisk is a function used in case we are not able to retrieve any flag when starting // GO Feature Flag. // This function will look at any pre-existent persistent configuration and start with it. -func retrievePersistentLocalDisk(ctx context.Context, config Config, goFF *GoFeatureFlag) error { +func retrievePersistentLocalDisk(ctx context.Context, config Config, cache cache.Manager) error { if config.PersistentFlagConfigurationFile != "" { config.internalLogger.Error("Impossible to retrieve your flag configuration, trying to use the persistent"+ " flag configuration file.", slog.String("path", config.PersistentFlagConfigurationFile)) @@ -158,7 +216,7 @@ func retrievePersistentLocalDisk(ctx context.Context, config Config, goFF *GoFea return err } defer func() { _ = fallBackRetrieverManager.Shutdown(ctx) }() - err = retrieveFlagsAndUpdateCache(goFF.config, goFF.cache, fallBackRetrieverManager, true) + err = retrieveFlagsAndUpdateCache(config, cache, fallBackRetrieverManager, true) if err != nil { return err } @@ -181,8 +239,8 @@ func (g *GoFeatureFlag) Close() { g.bgUpdater.close() } - if g.dataExporter != nil { - g.dataExporter.Stop() + if g.featureEventDataExporter != nil { + g.featureEventDataExporter.Stop() } if g.retrieverManager != nil { _ = g.retrieverManager.Shutdown(g.config.Context) diff --git a/internal/cache/cache_manager.go b/internal/cache/cache_manager.go index 836b5e5ac5e..8003a19d014 100644 --- a/internal/cache/cache_manager.go +++ b/internal/cache/cache_manager.go @@ -12,6 +12,7 @@ import ( "github.com/BurntSushi/toml" "github.com/google/go-cmp/cmp" "github.com/thomaspoignant/go-feature-flag/internal/flag" + "github.com/thomaspoignant/go-feature-flag/internal/notification" "github.com/thomaspoignant/go-feature-flag/model/dto" "github.com/thomaspoignant/go-feature-flag/utils/fflog" "gopkg.in/yaml.v3" @@ -29,13 +30,13 @@ type Manager interface { type cacheManagerImpl struct { inMemoryCache Cache mutex sync.RWMutex - notificationService Service + notificationService notification.Service latestUpdate time.Time logger *fflog.FFLogger persistentFlagConfigurationFile string } -func New(notificationService Service, persistentFlagConfigurationFile string, logger *fflog.FFLogger) Manager { +func New(notificationService notification.Service, persistentFlagConfigurationFile string, logger *fflog.FFLogger) Manager { return &cacheManagerImpl{ logger: logger, inMemoryCache: NewInMemoryCache(logger), diff --git a/internal/cache/cache_manager_test.go b/internal/cache/cache_manager_test.go index 48f99b225dc..d4646b4b1c8 100644 --- a/internal/cache/cache_manager_test.go +++ b/internal/cache/cache_manager_test.go @@ -9,6 +9,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/thomaspoignant/go-feature-flag/internal/cache" "github.com/thomaspoignant/go-feature-flag/internal/flag" + "github.com/thomaspoignant/go-feature-flag/internal/notification" "github.com/thomaspoignant/go-feature-flag/model/dto" "github.com/thomaspoignant/go-feature-flag/notifier" "github.com/thomaspoignant/go-feature-flag/testutils/mock" @@ -248,7 +249,7 @@ variation = "false_var" for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - fCache := cache.New(cache.NewNotificationService([]notifier.Notifier{}), "", + fCache := cache.New(notification.NewService([]notifier.Notifier{}), "", &fflog.FFLogger{LeveledLogger: slog.Default()}) newFlags, err := fCache.ConvertToFlagStruct(tt.args.loadedFlags, tt.flagFormat) if tt.wantErr { @@ -412,7 +413,7 @@ test-flag2: for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - fCache := cache.New(cache.NewNotificationService([]notifier.Notifier{}), "", nil) + fCache := cache.New(notification.NewService([]notifier.Notifier{}), "", nil) newFlags, err := fCache.ConvertToFlagStruct(tt.args.loadedFlags, tt.flagFormat) if tt.wantErr { assert.Error(t, err) @@ -453,7 +454,7 @@ func Test_cacheManagerImpl_GetLatestUpdateDate(t *testing.T) { trackEvents: false `) - fCache := cache.New(cache.NewNotificationService([]notifier.Notifier{}), "", nil) + fCache := cache.New(notification.NewService([]notifier.Notifier{}), "", nil) timeBefore := fCache.GetLatestUpdateDate() newFlags, _ := fCache.ConvertToFlagStruct(loadedFlags, "yaml") _ = fCache.UpdateCache(newFlags, &fflog.FFLogger{LeveledLogger: slog.Default()}, true) @@ -485,7 +486,7 @@ func Test_persistCacheAndRestartCacheWithIt(t *testing.T) { err = yaml.Unmarshal(loadedFlags, &loadedFlagsMap) assert.NoError(t, err) - fCache := cache.New(cache.NewNotificationService([]notifier.Notifier{}), file.Name(), nil) + fCache := cache.New(notification.NewService([]notifier.Notifier{}), file.Name(), nil) err = fCache.UpdateCache(loadedFlagsMap, &fflog.FFLogger{LeveledLogger: slog.Default()}, true) assert.NoError(t, err) allFlags1, err := fCache.AllFlags() @@ -494,7 +495,7 @@ func Test_persistCacheAndRestartCacheWithIt(t *testing.T) { time.Sleep(100 * time.Millisecond) // waiting to let the go routine write in the file // we start a new cache with the file persisted - fCache2 := cache.New(cache.NewNotificationService([]notifier.Notifier{}), "", nil) + fCache2 := cache.New(notification.NewService([]notifier.Notifier{}), "", nil) content, err := os.ReadFile(file.Name()) assert.NoError(t, err) loadedFlagsMap2 := map[string]dto.DTO{} diff --git a/internal/cache/notification_service.go b/internal/notification/notification_service.go similarity index 95% rename from internal/cache/notification_service.go rename to internal/notification/notification_service.go index 2e167b055b7..0fe915b5466 100644 --- a/internal/cache/notification_service.go +++ b/internal/notification/notification_service.go @@ -1,4 +1,4 @@ -package cache +package notification import ( "log/slog" @@ -15,7 +15,7 @@ type Service interface { Notify(oldCache map[string]flag.Flag, newCache map[string]flag.Flag, log *fflog.FFLogger) } -func NewNotificationService(notifiers []notifier.Notifier) Service { +func NewService(notifiers []notifier.Notifier) Service { return ¬ificationService{ Notifiers: notifiers, waitGroup: &sync.WaitGroup{}, diff --git a/internal/cache/notification_service_priv_test.go b/internal/notification/notification_service_priv_test.go similarity index 88% rename from internal/cache/notification_service_priv_test.go rename to internal/notification/notification_service_priv_test.go index cbb4930e176..764f56be784 100644 --- a/internal/cache/notification_service_priv_test.go +++ b/internal/notification/notification_service_priv_test.go @@ -1,6 +1,6 @@ //go:build !race -package cache_test +package notification_test import ( "fmt" @@ -10,8 +10,8 @@ import ( "github.com/stretchr/testify/assert" "github.com/thejerf/slogassert" - "github.com/thomaspoignant/go-feature-flag/internal/cache" "github.com/thomaspoignant/go-feature-flag/internal/flag" + "github.com/thomaspoignant/go-feature-flag/internal/notification" "github.com/thomaspoignant/go-feature-flag/notifier" "github.com/thomaspoignant/go-feature-flag/testutils/testconvert" "github.com/thomaspoignant/go-feature-flag/utils/fflog" @@ -19,7 +19,7 @@ import ( func Test_notificationService_callNotifier(t *testing.T) { n := &NotifierMock{} - c := cache.NewNotificationService([]notifier.Notifier{n}) + c := notification.NewService([]notifier.Notifier{n}) oldCache := map[string]flag.Flag{ "yo": &flag.InternalFlag{Version: testconvert.String("1.0")}, } @@ -33,7 +33,7 @@ func Test_notificationService_callNotifier(t *testing.T) { func Test_notificationService_no_difference(t *testing.T) { n := &NotifierMock{} - c := cache.NewNotificationService([]notifier.Notifier{n}) + c := notification.NewService([]notifier.Notifier{n}) oldCache := map[string]flag.Flag{ "yo": &flag.InternalFlag{Version: testconvert.String("1.0")}, } @@ -49,7 +49,7 @@ func Test_notificationService_with_error(t *testing.T) { handler := slogassert.New(t, slog.LevelDebug, nil) logger := slog.New(handler) n := &NotifierMock{WithError: true} - c := cache.NewNotificationService([]notifier.Notifier{n}) + c := notification.NewService([]notifier.Notifier{n}) oldCache := map[string]flag.Flag{ "yo": &flag.InternalFlag{Version: testconvert.String("1.0")}, } diff --git a/internal/cache/notification_service_test.go b/internal/notification/notification_service_test.go similarity index 99% rename from internal/cache/notification_service_test.go rename to internal/notification/notification_service_test.go index 83e71878f9a..2b68b8374c7 100644 --- a/internal/cache/notification_service_test.go +++ b/internal/notification/notification_service_test.go @@ -1,4 +1,4 @@ -package cache +package notification import ( "sync" diff --git a/testutils/mock/tracking_event_exporter_mock.go b/testutils/mock/tracking_event_exporter_mock.go new file mode 100644 index 00000000000..1b80f411ac8 --- /dev/null +++ b/testutils/mock/tracking_event_exporter_mock.go @@ -0,0 +1,65 @@ +package mock + +import ( + "context" + "sync" + + "github.com/thomaspoignant/go-feature-flag/exporter" + "github.com/thomaspoignant/go-feature-flag/utils/fflog" +) + +type TrackingEventExporter struct { + ExportedEvents []exporter.TrackingEvent + Err error + ExpectedNumberErr int + CurrentNumberErr int + Bulk bool + + mutex sync.Mutex + once sync.Once +} + +func (m *TrackingEventExporter) Export(_ context.Context, _ *fflog.FFLogger, events []exporter.ExportableEvent) error { + m.once.Do(m.initMutex) + m.mutex.Lock() + defer m.mutex.Unlock() + switch events := any(events).(type) { + case []exporter.ExportableEvent: + t := make([]exporter.TrackingEvent, len(events)) + for i, v := range events { + t[i] = v.(exporter.TrackingEvent) + } + m.ExportedEvents = append(m.ExportedEvents, t...) + break + case []exporter.TrackingEvent: + m.ExportedEvents = append(m.ExportedEvents, events...) + break + + } + if m.Err != nil { + if m.ExpectedNumberErr > m.CurrentNumberErr { + m.CurrentNumberErr++ + return m.Err + } + } + return nil +} + +func (m *TrackingEventExporter) GetExportedEvents() []exporter.ExportableEvent { + m.once.Do(m.initMutex) + m.mutex.Lock() + defer m.mutex.Unlock() + var trackingEvents []exporter.ExportableEvent + for _, event := range m.ExportedEvents { + trackingEvents = append(trackingEvents, event) + } + return trackingEvents +} + +func (m *TrackingEventExporter) IsBulk() bool { + return m.Bulk +} + +func (m *TrackingEventExporter) initMutex() { + m.mutex = sync.Mutex{} +} diff --git a/tracking.go b/tracking.go new file mode 100644 index 00000000000..2fbb8cfd414 --- /dev/null +++ b/tracking.go @@ -0,0 +1,29 @@ +package ffclient + +import ( + "time" + + "github.com/thomaspoignant/go-feature-flag/exporter" + "github.com/thomaspoignant/go-feature-flag/ffcontext" +) + +// Tracking is used to track an event. +func (g *GoFeatureFlag) Tracking( + trackingEventName string, ctx ffcontext.EvaluationContext, trackingEventDetails exporter.TrackingEventDetails) { + if g != nil && g.trackingEventDataExporter != nil { + contextKind := "user" + if ctx.IsAnonymous() { + contextKind = "anonymousUser" + } + event := exporter.TrackingEvent{ + Kind: "tracking", + ContextKind: contextKind, + UserKey: ctx.GetKey(), + CreationDate: time.Now().Unix(), + Key: trackingEventName, + EvaluationContext: ctx, + TrackingDetails: trackingEventDetails, + } + g.trackingEventDataExporter.AddEvent(event) + } +} diff --git a/tracking_test.go b/tracking_test.go new file mode 100644 index 00000000000..89b6e6d634e --- /dev/null +++ b/tracking_test.go @@ -0,0 +1,41 @@ +package ffclient_test + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + ffclient "github.com/thomaspoignant/go-feature-flag" + "github.com/thomaspoignant/go-feature-flag/ffcontext" + "github.com/thomaspoignant/go-feature-flag/retriever/fileretriever" + "github.com/thomaspoignant/go-feature-flag/testutils/mock" +) + +func TestValidTrackingEvent(t *testing.T) { + exp := mock.TrackingEventExporter{Bulk: false} + goff, err := ffclient.New(ffclient.Config{ + PollingInterval: 500 * time.Millisecond, + Retriever: &fileretriever.Retriever{Path: "./testdata/flag-config.yaml"}, + DataExporters: []ffclient.DataExporter{ + { + FlushInterval: 100 * time.Millisecond, + MaxEventInMemory: 100, + Exporter: &exp, + ExporterEventType: ffclient.TrackingEventExporter, + }, + }, + }) + assert.NoError(t, err) + + goff.Tracking("my-feature-flag", + ffcontext.NewEvaluationContextBuilder("1668d845-051d-4dd9-907a-7ebe6aa2c9da").AddCustom("admin", true).Build(), + map[string]interface{}{"additional data": "value"}) + + assert.Equal(t, 1, len(exp.ExportedEvents)) + assert.Equal(t, "1668d845-051d-4dd9-907a-7ebe6aa2c9da", exp.ExportedEvents[0].UserKey) + assert.Equal(t, "my-feature-flag", exp.ExportedEvents[0].Key) + assert.Equal(t, + ffcontext.NewEvaluationContextBuilder("1668d845-051d-4dd9-907a-7ebe6aa2c9da").AddCustom("admin", true).Build(), + exp.ExportedEvents[0].EvaluationContext) + assert.Equal(t, map[string]interface{}{"additional data": "value"}, exp.ExportedEvents[0].TrackingDetails) +} diff --git a/variation.go b/variation.go index e154d3ac067..d381fdaa278 100644 --- a/variation.go +++ b/variation.go @@ -254,9 +254,9 @@ func (g *GoFeatureFlag) getFlagFromCache(flagKey string) (flag.Flag, error) { // CollectEventData is collecting events and sending them to the data exporter to be stored. func (g *GoFeatureFlag) CollectEventData(event exporter.FeatureEvent) { - if g != nil && g.dataExporter != nil { + if g != nil && g.featureEventDataExporter != nil { // Add event in the exporter - g.dataExporter.AddEvent(event) + g.featureEventDataExporter.AddEvent(event) } } diff --git a/variation_test.go b/variation_test.go index b09eacd40a0..cdb00d7a4eb 100644 --- a/variation_test.go +++ b/variation_test.go @@ -328,7 +328,7 @@ func TestBoolVariation(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - dataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ { FlushInterval: 0, MaxEventInMemory: 0, @@ -733,7 +733,7 @@ func TestBoolVariationDetails(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - dataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ { FlushInterval: 0, MaxEventInMemory: 0, @@ -1047,7 +1047,7 @@ func TestFloat64Variation(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - dataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ { FlushInterval: 0, MaxEventInMemory: 0, @@ -1370,7 +1370,7 @@ func TestFloat64VariationDetails(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - dataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ { FlushInterval: 0, MaxEventInMemory: 0, @@ -1669,7 +1669,7 @@ func TestJSONArrayVariation(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - dataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ { FlushInterval: 0, MaxEventInMemory: 0, @@ -1981,7 +1981,7 @@ func TestJSONArrayVariationDetails(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - dataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ { FlushInterval: 0, MaxEventInMemory: 0, @@ -2260,7 +2260,7 @@ func TestJSONVariation(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - dataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ { FlushInterval: 0, MaxEventInMemory: 0, @@ -2498,7 +2498,7 @@ func TestJSONVariationDetails(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - dataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ { FlushInterval: 0, MaxEventInMemory: 0, @@ -2780,7 +2780,7 @@ func TestStringVariation(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - dataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ { FlushInterval: 0, MaxEventInMemory: 0, @@ -3017,7 +3017,7 @@ func TestStringVariationDetails(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - dataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ { FlushInterval: 0, MaxEventInMemory: 0, @@ -3329,7 +3329,7 @@ func TestIntVariation(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - dataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ { FlushInterval: 0, MaxEventInMemory: 0, @@ -3608,7 +3608,7 @@ func TestIntVariationDetails(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - dataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ { FlushInterval: 0, MaxEventInMemory: 0, @@ -3981,7 +3981,7 @@ func TestRawVariation(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - dataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ { FlushInterval: 0, MaxEventInMemory: 0, From b5dfb948274ff625a94ded8a7b9c990412a82e8f Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 13 Mar 2025 21:14:52 +0100 Subject: [PATCH 09/26] fix lint Signed-off-by: Thomas Poignant --- feature_flag.go | 3 ++- internal/cache/cache_manager.go | 5 ++++- testutils/mock/tracking_event_exporter_mock.go | 3 +-- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/feature_flag.go b/feature_flag.go index b4f62074257..8c70dcae4f4 100644 --- a/feature_flag.go +++ b/feature_flag.go @@ -105,7 +105,8 @@ func New(config Config) (*GoFeatureFlag, error) { go goFF.startFlagUpdaterDaemon() } - goFF.featureEventDataExporter, goFF.trackingEventDataExporter = initializeDataExporters(config, goFF.config.internalLogger) + goFF.featureEventDataExporter, goFF.trackingEventDataExporter = + initializeDataExporters(config, goFF.config.internalLogger) config.internalLogger.Debug("GO Feature Flag is initialized") return goFF, nil } diff --git a/internal/cache/cache_manager.go b/internal/cache/cache_manager.go index 8003a19d014..ce692d9a789 100644 --- a/internal/cache/cache_manager.go +++ b/internal/cache/cache_manager.go @@ -36,7 +36,10 @@ type cacheManagerImpl struct { persistentFlagConfigurationFile string } -func New(notificationService notification.Service, persistentFlagConfigurationFile string, logger *fflog.FFLogger) Manager { +func New( + notificationService notification.Service, + persistentFlagConfigurationFile string, + logger *fflog.FFLogger) Manager { return &cacheManagerImpl{ logger: logger, inMemoryCache: NewInMemoryCache(logger), diff --git a/testutils/mock/tracking_event_exporter_mock.go b/testutils/mock/tracking_event_exporter_mock.go index 1b80f411ac8..5976f0ad91f 100644 --- a/testutils/mock/tracking_event_exporter_mock.go +++ b/testutils/mock/tracking_event_exporter_mock.go @@ -34,7 +34,6 @@ func (m *TrackingEventExporter) Export(_ context.Context, _ *fflog.FFLogger, eve case []exporter.TrackingEvent: m.ExportedEvents = append(m.ExportedEvents, events...) break - } if m.Err != nil { if m.ExpectedNumberErr > m.CurrentNumberErr { @@ -49,7 +48,7 @@ func (m *TrackingEventExporter) GetExportedEvents() []exporter.ExportableEvent { m.once.Do(m.initMutex) m.mutex.Lock() defer m.mutex.Unlock() - var trackingEvents []exporter.ExportableEvent + trackingEvents := make([]exporter.ExportableEvent, 0) for _, event := range m.ExportedEvents { trackingEvents = append(trackingEvents, event) } From a215304937b616efcb96a754638011b5568c845f Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 13 Mar 2025 21:46:12 +0100 Subject: [PATCH 10/26] Add parquet specific for tracking event Signed-off-by: Thomas Poignant --- exporter/fileexporter/exporter.go | 36 ++++++++++++++++++++++++++++--- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/exporter/fileexporter/exporter.go b/exporter/fileexporter/exporter.go index e60f2681789..158fa00b94c 100644 --- a/exporter/fileexporter/exporter.go +++ b/exporter/fileexporter/exporter.go @@ -131,18 +131,22 @@ func (f *Exporter) writeFile(filePath string, events []exporter.ExportableEvent) func (f *Exporter) writeParquet(filePath string, events []exporter.ExportableEvent) error { parquetFeatureEvents := make([]exporter.FeatureEvent, 0) + parquetTrackingEvents := make([]exporter.TrackingEvent, 0) for _, event := range events { switch ev := any(event).(type) { case exporter.FeatureEvent: parquetFeatureEvents = append(parquetFeatureEvents, ev) break + case exporter.TrackingEvent: + parquetTrackingEvents = append(parquetTrackingEvents, ev) + break default: // do nothing } } - - // TODO: create same logic for TrackingEvents - + if len(parquetTrackingEvents) > 0 { + return f.writeParquetTrackingEvent(filePath, parquetTrackingEvents) + } return f.writeParquetFeatureEvent(filePath, parquetFeatureEvents) } @@ -176,3 +180,29 @@ func (f *Exporter) writeParquetFeatureEvent(filePath string, events []exporter.F return pw.WriteStop() } + +func (f *Exporter) writeParquetTrackingEvent(filePath string, events []exporter.TrackingEvent) error { + fw, err := local.NewLocalFileWriter(filePath) + if err != nil { + return err + } + defer fw.Close() + + pw, err := writer.NewParquetWriter(fw, new(exporter.FeatureEvent), int64(runtime.NumCPU())) + if err != nil { + return err + } + + pw.CompressionType = parquet.CompressionCodec_SNAPPY + if ct, err := parquet.CompressionCodecFromString(f.ParquetCompressionCodec); err == nil { + pw.CompressionType = ct + } + + for _, event := range events { + if err = pw.Write(event); err != nil { + return fmt.Errorf("error while writing the export file: %v", err) + } + } + + return pw.WriteStop() +} From e36dfe2fec274b00adbaf5526d5057323fb63071 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Wed, 26 Mar 2025 23:25:41 +0100 Subject: [PATCH 11/26] Add global tracking func Signed-off-by: Thomas Poignant --- tracking.go | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tracking.go b/tracking.go index 2fbb8cfd414..e0749a6d894 100644 --- a/tracking.go +++ b/tracking.go @@ -8,6 +8,7 @@ import ( ) // Tracking is used to track an event. +// Note: Use this function only if you are using multiple go-feature-flag instances. func (g *GoFeatureFlag) Tracking( trackingEventName string, ctx ffcontext.EvaluationContext, trackingEventDetails exporter.TrackingEventDetails) { if g != nil && g.trackingEventDataExporter != nil { @@ -27,3 +28,9 @@ func (g *GoFeatureFlag) Tracking( g.trackingEventDataExporter.AddEvent(event) } } + +// Tracking is used to track an event. +func Tracking( + trackingEventName string, ctx ffcontext.EvaluationContext, trackingEventDetails exporter.TrackingEventDetails) { + ff.Tracking(trackingEventName, ctx, trackingEventDetails) +} From a2e458cfa164550edafa6d57ae96ec589c6d2245 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Wed, 26 Mar 2025 23:44:47 +0100 Subject: [PATCH 12/26] stop tracking event exporter when stop Signed-off-by: Thomas Poignant --- feature_flag.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/feature_flag.go b/feature_flag.go index 8c70dcae4f4..c102452c8d0 100644 --- a/feature_flag.go +++ b/feature_flag.go @@ -243,6 +243,11 @@ func (g *GoFeatureFlag) Close() { if g.featureEventDataExporter != nil { g.featureEventDataExporter.Stop() } + + if g.trackingEventDataExporter != nil { + g.trackingEventDataExporter.Stop() + } + if g.retrieverManager != nil { _ = g.retrieverManager.Shutdown(g.config.Context) } From a0f5a135797b53c21d468bc5d1c6e9265e737bc9 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 27 Mar 2025 14:58:44 +0100 Subject: [PATCH 13/26] Rename method track Signed-off-by: Thomas Poignant --- tracking.go | 10 +++++----- tracking_test.go | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/tracking.go b/tracking.go index e0749a6d894..91c764fca69 100644 --- a/tracking.go +++ b/tracking.go @@ -7,9 +7,9 @@ import ( "github.com/thomaspoignant/go-feature-flag/ffcontext" ) -// Tracking is used to track an event. +// Track is used to track an event. // Note: Use this function only if you are using multiple go-feature-flag instances. -func (g *GoFeatureFlag) Tracking( +func (g *GoFeatureFlag) Track( trackingEventName string, ctx ffcontext.EvaluationContext, trackingEventDetails exporter.TrackingEventDetails) { if g != nil && g.trackingEventDataExporter != nil { contextKind := "user" @@ -29,8 +29,8 @@ func (g *GoFeatureFlag) Tracking( } } -// Tracking is used to track an event. -func Tracking( +// Track is used to track an event. +func Track( trackingEventName string, ctx ffcontext.EvaluationContext, trackingEventDetails exporter.TrackingEventDetails) { - ff.Tracking(trackingEventName, ctx, trackingEventDetails) + ff.Track(trackingEventName, ctx, trackingEventDetails) } diff --git a/tracking_test.go b/tracking_test.go index 89b6e6d634e..2f1bdce0c13 100644 --- a/tracking_test.go +++ b/tracking_test.go @@ -27,7 +27,7 @@ func TestValidTrackingEvent(t *testing.T) { }) assert.NoError(t, err) - goff.Tracking("my-feature-flag", + goff.Track("my-feature-flag", ffcontext.NewEvaluationContextBuilder("1668d845-051d-4dd9-907a-7ebe6aa2c9da").AddCustom("admin", true).Build(), map[string]interface{}{"additional data": "value"}) From c856b81ae72ed53388af761ff499eefd6174e5b3 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Tue, 1 Apr 2025 09:50:03 +0200 Subject: [PATCH 14/26] fix flaky tests Signed-off-by: Thomas Poignant --- exporter/even_store_test.go | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/exporter/even_store_test.go b/exporter/even_store_test.go index 8836c0a2b31..307471dcb5b 100644 --- a/exporter/even_store_test.go +++ b/exporter/even_store_test.go @@ -154,10 +154,8 @@ func Test_MultipleConsumersMultipleGORoutines(t *testing.T) { time.Sleep(50 * time.Millisecond) wg := &sync.WaitGroup{} - consumeFunc := func(eventStore exporter.EventStore[testutils.ExportableMockEvent], consumerName string) { - wg.Add(1) + consumeFunc := func(eventStore exporter.EventStore[testutils.ExportableMockEvent], consumerName string, eventCounters *map[string]int) { defer wg.Done() - err := eventStore.ProcessPendingEvents(consumerName, func(ctx context.Context, events []testutils.ExportableMockEvent) error { assert.True(t, len(events) > 0) @@ -168,15 +166,22 @@ func Test_MultipleConsumersMultipleGORoutines(t *testing.T) { err = eventStore.ProcessPendingEvents(consumerName, func(ctx context.Context, events []testutils.ExportableMockEvent) error { - assert.True(t, len(events) > 0) + if eventCounters != nil { + (*eventCounters)[consumerName] = len(events) + } return nil }) assert.Nil(t, err) } - go consumeFunc(eventStore, consumerNames[0]) - go consumeFunc(eventStore, consumerNames[1]) + wg.Add(2) + eventCounters := map[string]int{} + go consumeFunc(eventStore, consumerNames[0], &eventCounters) + go consumeFunc(eventStore, consumerNames[1], &eventCounters) wg.Wait() + + assert.Greater(t, eventCounters[consumerNames[0]], 0) + assert.Greater(t, eventCounters[consumerNames[1]], 0) } func Test_ProcessPendingEventInError(t *testing.T) { From fd3142f59032effe1bf5c10feefefa8c0305d6ce Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Tue, 1 Apr 2025 16:40:36 +0100 Subject: [PATCH 15/26] manage both kind of events inside the collector events Signed-off-by: Thomas Poignant --- .../controller/collect_eval_data.go | 88 +++++++++++++++---- .../controller/collect_eval_data_test.go | 64 ++++++++++++++ .../model/collect_eval_data_request.go | 2 +- ...valid_request_mix_tracking_evaluation.json | 37 ++++++++ exporter/tracking_event.go | 4 +- exporter/tracking_event_test.go | 17 ++-- ffcontext/context.go | 20 +++++ tracking.go | 2 +- tracking_test.go | 3 +- variation.go | 8 ++ 10 files changed, 212 insertions(+), 33 deletions(-) create mode 100644 cmd/relayproxy/testdata/controller/collect_eval_data/valid_request_mix_tracking_evaluation.json diff --git a/cmd/relayproxy/controller/collect_eval_data.go b/cmd/relayproxy/controller/collect_eval_data.go index 6ce38f9ccec..2c919e2578d 100644 --- a/cmd/relayproxy/controller/collect_eval_data.go +++ b/cmd/relayproxy/controller/collect_eval_data.go @@ -1,15 +1,18 @@ package controller import ( + "encoding/json" "fmt" "net/http" "strconv" + "github.com/go-viper/mapstructure/v2" "github.com/labstack/echo/v4" ffclient "github.com/thomaspoignant/go-feature-flag" "github.com/thomaspoignant/go-feature-flag/cmd/relayproxy/config" "github.com/thomaspoignant/go-feature-flag/cmd/relayproxy/metric" "github.com/thomaspoignant/go-feature-flag/cmd/relayproxy/model" + "github.com/thomaspoignant/go-feature-flag/exporter" "go.opentelemetry.io/otel" "go.opentelemetry.io/otel/attribute" "go.uber.org/zap" @@ -59,28 +62,79 @@ func (h *collectEvalData) Handler(c echo.Context) error { _, span := tracer.Start(c.Request().Context(), "collectEventData") defer span.End() span.SetAttributes(attribute.Int("collectEventData.eventCollectionSize", len(reqBody.Events))) + counterTracking := 0 + counterEvaluation := 0 for _, event := range reqBody.Events { - if event.Source == "" { - event.Source = "PROVIDER_CACHE" + switch event["kind"] { + case "tracking": + e, err := convertTrackingEvent(event, h.logger) + if err != nil { + h.logger.Error("impossible to convert the event to a tracking event", zap.Error(err)) + continue + } + h.goFF.CollectTrackingEventData(e) + counterTracking++ + default: + e, err := convertFeatureEvent(event, reqBody.Meta, h.logger) + if err != nil { + h.logger.Error("impossible to convert the event to a feature event", zap.Error(err)) + continue + } + h.goFF.CollectEventData(e) + counterEvaluation++ } - // force the creation date to be a unix timestamp - if event.CreationDate > 9999999999 { - h.logger.Warn( - "creationDate received is in milliseconds, we convert it to seconds", - zap.Int64("creationDate", event.CreationDate)) - // if we receive a timestamp in milliseconds, we convert it to seconds - // but since it is totally possible to have a timestamp in seconds that is bigger than 9999999999 - // we will accept timestamp up to 9999999999 (2286-11-20 18:46:39 +0100 CET) - event.CreationDate, _ = strconv.ParseInt( - strconv.FormatInt(event.CreationDate, 10)[:10], 10, 64) - } - if reqBody.Meta != nil { - event.Metadata = reqBody.Meta - } - h.goFF.CollectEventData(event) } + span.SetAttributes(attribute.Int("collectEventData.trackingCollectionSize", counterTracking)) + span.SetAttributes(attribute.Int("collectEventData.evaluationCollectionSize", counterEvaluation)) h.metrics.IncCollectEvalData(float64(len(reqBody.Events))) return c.JSON(http.StatusOK, model.CollectEvalDataResponse{ IngestedContentCount: len(reqBody.Events), }) } + +func convertTrackingEvent(event map[string]any, logger *zap.Logger) (exporter.TrackingEvent, error) { + var e exporter.TrackingEvent + marshalled, err := json.Marshal(event) + if err != nil { + return exporter.TrackingEvent{}, err + } + err = json.Unmarshal(marshalled, &e) + if err != nil { + return exporter.TrackingEvent{}, err + } + e.CreationDate = formatCreationDate(e.CreationDate, logger) + return e, nil +} + +func convertFeatureEvent(event map[string]any, + metadata exporter.FeatureEventMetadata, + logger *zap.Logger) (exporter.FeatureEvent, error) { + var e exporter.FeatureEvent + err := mapstructure.Decode(event, &e) + if err != nil { + return exporter.FeatureEvent{}, err + } + if e.Source == "" { + e.Source = "PROVIDER_CACHE" + } + if metadata != nil { + e.Metadata = metadata + } + e.CreationDate = formatCreationDate(e.CreationDate, logger) + return e, nil +} + +func formatCreationDate(input int64, logger *zap.Logger) int64 { + if input > 9999999999 { + logger.Warn( + "creationDate received is in milliseconds, we convert it to seconds", + zap.Int64("creationDate", input)) + // if we receive a timestamp in milliseconds, we convert it to seconds + // but since it is totally possible to have a timestamp in seconds that is bigger than 9999999999 + // we will accept timestamp up to 9999999999 (2286-11-20 18:46:39 +0100 CET) + converted, _ := strconv.ParseInt( + strconv.FormatInt(input, 10)[:10], 10, 64) + return converted + } + return input +} diff --git a/cmd/relayproxy/controller/collect_eval_data_test.go b/cmd/relayproxy/controller/collect_eval_data_test.go index d31350e7363..e9b5375605e 100644 --- a/cmd/relayproxy/controller/collect_eval_data_test.go +++ b/cmd/relayproxy/controller/collect_eval_data_test.go @@ -2,6 +2,7 @@ package controller_test import ( "context" + "fmt" "io" "log/slog" "net/http" @@ -184,3 +185,66 @@ func Test_collect_eval_data_Handler(t *testing.T) { }) } } + +func Test_collect_tracking_and_evaluation_events(t *testing.T) { + evalExporter, err := os.CreateTemp("", "evalExport.json") + assert.NoError(t, err) + trackingExporter, err := os.CreateTemp("", "trackExport.json") + assert.NoError(t, err) + defer func() { + _ = os.Remove(evalExporter.Name()) + _ = os.Remove(trackingExporter.Name()) + }() + + // init go-feature-flag + goFF, _ := ffclient.New(ffclient.Config{ + PollingInterval: 10 * time.Second, + LeveledLogger: slog.Default(), + Context: context.Background(), + Retriever: &fileretriever.Retriever{ + Path: configFlagsLocation, + }, + + DataExporters: []ffclient.DataExporter{ + { + FlushInterval: 10 * time.Second, + MaxEventInMemory: 10000, + Exporter: &fileexporter.Exporter{Filename: evalExporter.Name()}, + }, + { + FlushInterval: 10 * time.Second, + MaxEventInMemory: 10000, + Exporter: &fileexporter.Exporter{Filename: trackingExporter.Name()}, + ExporterEventType: ffclient.TrackingEventExporter, + }, + }, + }) + logger, err := zap.NewDevelopment() + require.NoError(t, err) + ctrl := controller.NewCollectEvalData(goFF, metric.Metrics{}, logger) + + bodyReq, err := os.ReadFile( + "../testdata/controller/collect_eval_data/valid_request_mix_tracking_evaluation.json") + + e := echo.New() + rec := httptest.NewRecorder() + + req := httptest.NewRequest(echo.POST, "/v1/data/collector", strings.NewReader(string(bodyReq))) + req.Header.Set(echo.HeaderContentType, echo.MIMEApplicationJSON) + c := e.NewContext(req, rec) + c.SetPath("/v1/data/collector") + handlerErr := ctrl.Handler(c) + assert.NoError(t, handlerErr) + goFF.Close() + + fmt.Println("Evaluation events:") + evalEvents, err := os.ReadFile(evalExporter.Name()) + assert.NoError(t, err) + fmt.Println(string(evalEvents)) + + fmt.Println("Tracking events:") + trackingEvents, err := os.ReadFile(trackingExporter.Name()) + assert.NoError(t, err) + fmt.Println(string(trackingEvents)) + +} diff --git a/cmd/relayproxy/model/collect_eval_data_request.go b/cmd/relayproxy/model/collect_eval_data_request.go index 9891aa7503e..4d7ac1d435e 100644 --- a/cmd/relayproxy/model/collect_eval_data_request.go +++ b/cmd/relayproxy/model/collect_eval_data_request.go @@ -10,5 +10,5 @@ type CollectEvalDataRequest struct { Meta exporter.FeatureEventMetadata `json:"meta"` // Events is the list of the event we send in the payload - Events []exporter.FeatureEvent `json:"events"` + Events []map[string]any `json:"events"` } diff --git a/cmd/relayproxy/testdata/controller/collect_eval_data/valid_request_mix_tracking_evaluation.json b/cmd/relayproxy/testdata/controller/collect_eval_data/valid_request_mix_tracking_evaluation.json new file mode 100644 index 00000000000..14d9305f900 --- /dev/null +++ b/cmd/relayproxy/testdata/controller/collect_eval_data/valid_request_mix_tracking_evaluation.json @@ -0,0 +1,37 @@ +{ + "events": [ + { + "contextKind": "user", + "creationDate": 1680246000, + "default": false, + "key": "my-feature-flag", + "kind": "feature", + "userKey": "94a25909-20d8-40cc-8500-fee99b569345", + "value": "string", + "variation": "admin-variation", + "version": "v1.0.0" + }, + { + "kind": "tracking", + "creationDate": 1680246020, + "contextKind": "user", + "userKey": "94a25909-20d8-40cc-8500-fee99b569345", + "key": "my-feature-flag", + "evaluationContext": { + "targetingKey": "94a25909-20d8-40cc-8500-fee99b569345", + "name": "john doe", + "admin": true + }, + "trackingEventDetails": { + "value": "string", + "version": "v1.0.0" + } + } + ], + "meta": { + "environment": "production", + "sdkVersion": "v1.0.0", + "source": "my-source", + "timestamp": 1680246000 + } +} \ No newline at end of file diff --git a/exporter/tracking_event.go b/exporter/tracking_event.go index eb460d563b0..02069d50a51 100644 --- a/exporter/tracking_event.go +++ b/exporter/tracking_event.go @@ -5,8 +5,6 @@ import ( "encoding/json" "text/template" "time" - - "github.com/thomaspoignant/go-feature-flag/ffcontext" ) type TrackingEventDetails = map[string]interface{} @@ -33,7 +31,7 @@ type TrackingEvent struct { Key string `json:"key" example:"my-feature-flag" parquet:"name=key, type=BYTE_ARRAY, convertedtype=UTF8"` // EvaluationContext contains the evaluation context used for the tracking - EvaluationContext ffcontext.EvaluationContext `json:"evaluationContext" parquet:"name=evaluationContext, type=MAP, keytype=BYTE_ARRAY, keyconvertedtype=UTF8, valuetype=BYTE_ARRAY, valueconvertedtype=UTF8"` + EvaluationContext map[string]any `json:"evaluationContext" parquet:"name=evaluationContext, type=MAP, keytype=BYTE_ARRAY, keyconvertedtype=UTF8, valuetype=BYTE_ARRAY, valueconvertedtype=UTF8"` // TrackingDetails contains the details of the tracking event TrackingDetails TrackingEventDetails `json:"trackingEventDetails" parquet:"name=evaluationContext, type=MAP, keytype=BYTE_ARRAY, keyconvertedtype=UTF8, valuetype=BYTE_ARRAY, valueconvertedtype=UTF8"` diff --git a/exporter/tracking_event_test.go b/exporter/tracking_event_test.go index a17be3d872d..ff1bb6e750c 100644 --- a/exporter/tracking_event_test.go +++ b/exporter/tracking_event_test.go @@ -7,7 +7,6 @@ import ( "github.com/stretchr/testify/assert" "github.com/thomaspoignant/go-feature-flag/exporter" - "github.com/thomaspoignant/go-feature-flag/ffcontext" ) func TestTrackingEvent_FormatInCSV(t *testing.T) { @@ -26,13 +25,13 @@ func TestTrackingEvent_FormatInCSV(t *testing.T) { UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", - EvaluationContext: ffcontext.NewEvaluationContextBuilder("ABCD").Build(), + EvaluationContext: map[string]any{"targetingKey": "ABCD"}, TrackingDetails: map[string]interface{}{ "event": "123", }, }, template: `{{ .Kind}};{{ .ContextKind}};{{ .UserKey}};{{ .CreationDate}};{{ .EvaluationContext}};{{ .TrackingDetails}}`, - want: `tracking;anonymousUser;ABCD;1617970547;{ABCD map[]};map[event:123]`, + want: `tracking;anonymousUser;ABCD;1617970547;map[targetingKey:ABCD];map[event:123]`, wantErr: assert.NoError, }, { @@ -43,13 +42,13 @@ func TestTrackingEvent_FormatInCSV(t *testing.T) { UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", - EvaluationContext: ffcontext.NewEvaluationContextBuilder("ABCD").AddCustom("toto", 123).Build(), + EvaluationContext: map[string]any{"targetingKey": "ABCD", "toto": 123}, TrackingDetails: map[string]interface{}{ "event": "123", }, }, template: `{{ .Kind}};{{ .ContextKind}};{{ .UserKey}};{{ .CreationDate}};{{ .EvaluationContext}};{{ .TrackingDetails}}`, - want: `tracking;anonymousUser;ABCD;1617970547;{ABCD map[toto:123]};map[event:123]`, + want: `tracking;anonymousUser;ABCD;1617970547;map[targetingKey:ABCD toto:123];map[event:123]`, wantErr: assert.NoError, }, } @@ -81,12 +80,12 @@ func TestTrackingEvent_FormatInJSON(t *testing.T) { UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", - EvaluationContext: ffcontext.NewEvaluationContextBuilder("ABCD").Build(), + EvaluationContext: map[string]any{"targetingKey": "ABCD"}, TrackingDetails: map[string]interface{}{ "event": "123", }, }, - want: `{"kind":"tracking","contextKind":"anonymousUser","userKey":"ABCD","creationDate":1617970547,"key":"random-key","evaluationContext":{"targetingKey":"ABCD","attributes":{}},"trackingEventDetails":{"event":"123"}}`, + want: `{"kind":"tracking","contextKind":"anonymousUser","userKey":"ABCD","creationDate":1617970547,"key":"random-key","evaluationContext":{"targetingKey":"ABCD"},"trackingEventDetails":{"event":"123"}}`, wantErr: assert.NoError, }, { @@ -97,12 +96,12 @@ func TestTrackingEvent_FormatInJSON(t *testing.T) { UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", - EvaluationContext: ffcontext.NewEvaluationContextBuilder("ABCD").AddCustom("toto", 123).Build(), + EvaluationContext: map[string]any{"targetingKey": "ABCD", "toto": 123}, TrackingDetails: map[string]interface{}{ "event": "123", }, }, - want: `{"kind":"tracking","contextKind":"anonymousUser","userKey":"ABCD","creationDate":1617970547,"key":"random-key","evaluationContext":{"targetingKey":"ABCD","attributes":{"toto":123}},"trackingEventDetails":{"event":"123"}}`, + want: `{"kind":"tracking","contextKind":"anonymousUser","userKey":"ABCD","creationDate":1617970547,"key":"random-key","evaluationContext":{"targetingKey":"ABCD","toto":123},"trackingEventDetails":{"event":"123"}}`, wantErr: assert.NoError, }, } diff --git a/ffcontext/context.go b/ffcontext/context.go index e4125ec357c..d9fb518e84d 100644 --- a/ffcontext/context.go +++ b/ffcontext/context.go @@ -61,6 +61,20 @@ func (u EvaluationContext) MarshalJSON() ([]byte, error) { }) } +func (u EvaluationContext) UnmarshalJSON(data []byte) error { + var aux struct { + TargetingKey string `json:"targetingKey"` + Attributes value `json:"attributes"` + } + err := json.Unmarshal(data, &aux) + if err != nil { + return err + } + u.targetingKey = aux.TargetingKey + u.attributes = aux.Attributes + return nil +} + // GetKey return the unique targetingKey for the user. func (u EvaluationContext) GetKey() string { return u.targetingKey @@ -89,6 +103,12 @@ func (u EvaluationContext) AddCustomAttribute(name string, value interface{}) { } } +func (u EvaluationContext) ToMap() map[string]any { + resMap := u.attributes + resMap["targetingKey"] = u.targetingKey + return resMap +} + // ExtractGOFFProtectedFields extract the goff specific attributes from the evaluation context. func (u EvaluationContext) ExtractGOFFProtectedFields() GoffContextSpecifics { goff := GoffContextSpecifics{} diff --git a/tracking.go b/tracking.go index 91c764fca69..46db41f03cb 100644 --- a/tracking.go +++ b/tracking.go @@ -22,7 +22,7 @@ func (g *GoFeatureFlag) Track( UserKey: ctx.GetKey(), CreationDate: time.Now().Unix(), Key: trackingEventName, - EvaluationContext: ctx, + EvaluationContext: ctx.ToMap(), TrackingDetails: trackingEventDetails, } g.trackingEventDataExporter.AddEvent(event) diff --git a/tracking_test.go b/tracking_test.go index 2f1bdce0c13..fc8f2849307 100644 --- a/tracking_test.go +++ b/tracking_test.go @@ -34,8 +34,7 @@ func TestValidTrackingEvent(t *testing.T) { assert.Equal(t, 1, len(exp.ExportedEvents)) assert.Equal(t, "1668d845-051d-4dd9-907a-7ebe6aa2c9da", exp.ExportedEvents[0].UserKey) assert.Equal(t, "my-feature-flag", exp.ExportedEvents[0].Key) - assert.Equal(t, - ffcontext.NewEvaluationContextBuilder("1668d845-051d-4dd9-907a-7ebe6aa2c9da").AddCustom("admin", true).Build(), + assert.Equal(t, map[string]interface{}{"targetingKey": "1668d845-051d-4dd9-907a-7ebe6aa2c9da", "admin": true}, exp.ExportedEvents[0].EvaluationContext) assert.Equal(t, map[string]interface{}{"additional data": "value"}, exp.ExportedEvents[0].TrackingDetails) } diff --git a/variation.go b/variation.go index d381fdaa278..db5125b59e9 100644 --- a/variation.go +++ b/variation.go @@ -260,6 +260,14 @@ func (g *GoFeatureFlag) CollectEventData(event exporter.FeatureEvent) { } } +// CollectTrackingEventData is collecting tracking events and sending them to the data exporter to be stored. +func (g *GoFeatureFlag) CollectTrackingEventData(event exporter.TrackingEvent) { + if g != nil && g.featureEventDataExporter != nil { + // Add event in the exporter + g.trackingEventDataExporter.AddEvent(event) + } +} + // notifyVariation is logging the evaluation result for a flag // if no logger is provided in the configuration we are not logging anything. func notifyVariation[T model.JSONType]( From 06b8bc3de8d1fef165296ee8ef3f28475466ee99 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Wed, 2 Apr 2025 15:27:40 +0100 Subject: [PATCH 16/26] fix linter Signed-off-by: Thomas Poignant --- .golangci.yml | 1 + .../controller/collect_eval_data.go | 14 +- .../controller/collect_eval_data_test.go | 14 +- .../model/collect_eval_data_request.go | 1 + exporter/azureexporter/exporter.go | 21 +- exporter/common_test.go | 15 +- exporter/data_exporter.go | 3 - exporter/data_exporter_test.go | 16 +- exporter/even_store_test.go | 107 +++-- exporter/fileexporter/exporter.go | 33 +- exporter/gcstorageexporter/exporter.go | 14 +- exporter/kafkaexporter/exporter.go | 10 +- exporter/kinesisexporter/exporter.go | 11 +- exporter/kinesisexporter/exporter_test.go | 14 +- exporter/logsexporter/exporter.go | 6 +- exporter/pubsubexporter/exporter.go | 6 +- exporter/pubsubexporter/exporter_test.go | 14 +- exporter/s3exporter/exporter.go | 6 +- exporter/s3exporter/exporter_test.go | 23 +- exporter/s3exporterv2/exporter.go | 11 +- exporter/s3exporterv2/exporter_test.go | 26 +- exporter/sqsexporter/exporter.go | 6 +- exporter/webhookexporter/exporter.go | 11 +- exporter/webhookexporter/exporter_test.go | 12 +- feature_flag.go | 74 +++- ffcontext/context.go | 14 - go.mod | 2 +- internal/cache/cache_manager.go | 3 +- testutils/mock/exporter_mock.go | 18 +- .../mock/tracking_event_exporter_mock.go | 6 +- tracking.go | 10 +- tracking_test.go | 26 +- variation.go | 4 +- variation_test.go | 407 ++++++++++++------ 34 files changed, 680 insertions(+), 279 deletions(-) diff --git a/.golangci.yml b/.golangci.yml index becfb8f2b93..f5e2325eabe 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -131,6 +131,7 @@ linters: - lll - path: _test\.go linters: + - gosec - errcheck - funlen - maligned diff --git a/cmd/relayproxy/controller/collect_eval_data.go b/cmd/relayproxy/controller/collect_eval_data.go index d43f565b5d9..d9400bbf2a6 100644 --- a/cmd/relayproxy/controller/collect_eval_data.go +++ b/cmd/relayproxy/controller/collect_eval_data.go @@ -73,7 +73,10 @@ func (h *collectEvalData) Handler(c echo.Context) error { case "tracking": e, err := convertTrackingEvent(event, h.logger) if err != nil { - h.logger.Error("impossible to convert the event to a tracking event", zap.Error(err)) + h.logger.Error( + "impossible to convert the event to a tracking event", + zap.Error(err), + ) continue } h.goFF.CollectTrackingEventData(e) @@ -89,14 +92,19 @@ func (h *collectEvalData) Handler(c echo.Context) error { } } span.SetAttributes(attribute.Int("collectEventData.trackingCollectionSize", counterTracking)) - span.SetAttributes(attribute.Int("collectEventData.evaluationCollectionSize", counterEvaluation)) + span.SetAttributes( + attribute.Int("collectEventData.evaluationCollectionSize", counterEvaluation), + ) h.metrics.IncCollectEvalData(float64(len(reqBody.Events))) return c.JSON(http.StatusOK, model.CollectEvalDataResponse{ IngestedContentCount: len(reqBody.Events), }) } -func convertTrackingEvent(event map[string]any, logger *zap.Logger) (exporter.TrackingEvent, error) { +func convertTrackingEvent( + event map[string]any, + logger *zap.Logger, +) (exporter.TrackingEvent, error) { var e exporter.TrackingEvent marshalled, err := json.Marshal(event) if err != nil { diff --git a/cmd/relayproxy/controller/collect_eval_data_test.go b/cmd/relayproxy/controller/collect_eval_data_test.go index e9b5375605e..b31a0b2ef1b 100644 --- a/cmd/relayproxy/controller/collect_eval_data_test.go +++ b/cmd/relayproxy/controller/collect_eval_data_test.go @@ -187,6 +187,17 @@ func Test_collect_eval_data_Handler(t *testing.T) { } func Test_collect_tracking_and_evaluation_events(t *testing.T) { + // + // + // + // + // PLEASE REWORK THIS TEST + // + // + // + // + // + evalExporter, err := os.CreateTemp("", "evalExport.json") assert.NoError(t, err) trackingExporter, err := os.CreateTemp("", "trackExport.json") @@ -225,7 +236,7 @@ func Test_collect_tracking_and_evaluation_events(t *testing.T) { bodyReq, err := os.ReadFile( "../testdata/controller/collect_eval_data/valid_request_mix_tracking_evaluation.json") - + assert.NoError(t, err) e := echo.New() rec := httptest.NewRecorder() @@ -246,5 +257,4 @@ func Test_collect_tracking_and_evaluation_events(t *testing.T) { trackingEvents, err := os.ReadFile(trackingExporter.Name()) assert.NoError(t, err) fmt.Println(string(trackingEvents)) - } diff --git a/cmd/relayproxy/model/collect_eval_data_request.go b/cmd/relayproxy/model/collect_eval_data_request.go index 4d7ac1d435e..dd17f2039eb 100644 --- a/cmd/relayproxy/model/collect_eval_data_request.go +++ b/cmd/relayproxy/model/collect_eval_data_request.go @@ -10,5 +10,6 @@ type CollectEvalDataRequest struct { Meta exporter.FeatureEventMetadata `json:"meta"` // Events is the list of the event we send in the payload + // here the type is any because we will unmarshal later in the different event types Events []map[string]any `json:"events"` } diff --git a/exporter/azureexporter/exporter.go b/exporter/azureexporter/exporter.go index b98e15af891..cf6f928f21e 100644 --- a/exporter/azureexporter/exporter.go +++ b/exporter/azureexporter/exporter.go @@ -52,7 +52,11 @@ func (f *Exporter) initializeAzureClient() (*azblob.Client, error) { return azblob.NewClientWithSharedKeyCredential(url, cred, nil) } -func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEvents []exporter.ExportableEvent) error { +func (f *Exporter) Export( + ctx context.Context, + logger *fflog.FFLogger, + featureEvents []exporter.ExportableEvent, +) error { if f.AccountName == "" { return fmt.Errorf("you should specify an AccountName. %v is invalid", f.AccountName) } @@ -92,7 +96,10 @@ func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEv fileName := file.Name() of, err := os.Open(outputDir + "/" + fileName) if err != nil { - logger.Error("[Azure Exporter] impossible to open file", slog.String("path", outputDir+"/"+fileName)) + logger.Error( + "[Azure Exporter] impossible to open file", + slog.String("path", outputDir+"/"+fileName), + ) continue } defer func() { _ = of.Close() }() @@ -105,11 +112,17 @@ func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEv _, err = client.UploadFile(context.Background(), f.Container, source, of, nil) if err != nil { - logger.Error("[Azure Exporter] failed to upload file", slog.String("path", outputDir+"/"+fileName)) + logger.Error( + "[Azure Exporter] failed to upload file", + slog.String("path", outputDir+"/"+fileName), + ) return err } - logger.Info("[Azure Exporter] file uploaded.", slog.String("location", f.Container+"/"+fileName)) + logger.Info( + "[Azure Exporter] file uploaded.", + slog.String("location", f.Container+"/"+fileName), + ) } return nil } diff --git a/exporter/common_test.go b/exporter/common_test.go index c71a14ff277..dc21411c069 100644 --- a/exporter/common_test.go +++ b/exporter/common_test.go @@ -172,10 +172,21 @@ func TestFormatEventInCSV(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := tt.args.event.FormatInCSV(tt.args.csvTemplate) - if !tt.wantErr(t, err, fmt.Sprintf("FormatInCSV(%v, %v)", tt.args.csvTemplate, tt.args.event)) { + if !tt.wantErr( + t, + err, + fmt.Sprintf("FormatInCSV(%v, %v)", tt.args.csvTemplate, tt.args.event), + ) { return } - assert.Equalf(t, tt.want, string(got), "FormatInCSV(%v, %v)", tt.args.csvTemplate, tt.args.event) + assert.Equalf( + t, + tt.want, + string(got), + "FormatInCSV(%v, %v)", + tt.args.csvTemplate, + tt.args.event, + ) }) } } diff --git a/exporter/data_exporter.go b/exporter/data_exporter.go index bcd891d24c8..d175c7d9ab5 100644 --- a/exporter/data_exporter.go +++ b/exporter/data_exporter.go @@ -153,7 +153,6 @@ func (d *dataExporterImpl[T]) sendEvents(ctx context.Context, events []T) error default: return fmt.Errorf("trying to send unknown object to the exporter (deprecated)") } - break case DeprecatedExporterV2: switch events := any(events).(type) { case []FeatureEvent: @@ -164,7 +163,6 @@ func (d *dataExporterImpl[T]) sendEvents(ctx context.Context, events []T) error default: return fmt.Errorf("trying to send unknown object to the exporter") } - break case Exporter: exportableEvents := make([]ExportableEvent, len(events)) for i, event := range events { @@ -174,7 +172,6 @@ func (d *dataExporterImpl[T]) sendEvents(ctx context.Context, events []T) error if err != nil { return fmt.Errorf("error while exporting data: %w", err) } - break default: return fmt.Errorf("this is not a valid exporter") } diff --git a/exporter/data_exporter_test.go b/exporter/data_exporter_test.go index 6b919b1dfeb..ed6b995e0c8 100644 --- a/exporter/data_exporter_test.go +++ b/exporter/data_exporter_test.go @@ -68,11 +68,17 @@ func TestDataExporterFlush_TriggerErrorIfNotKnowType(t *testing.T) { defer func() { _ = os.Remove(logFile.Name()) }() exporterMock := tt.exporter - exp := exporter.NewDataExporter[testutils.ExportableMockEvent](context.TODO(), exporter.Config{ - Exporter: exporterMock, - FlushInterval: 0, - MaxEventInMemory: 0, - }, "id-consumer", &evStore, logger) + exp := exporter.NewDataExporter[testutils.ExportableMockEvent]( + context.TODO(), + exporter.Config{ + Exporter: exporterMock, + FlushInterval: 0, + MaxEventInMemory: 0, + }, + "id-consumer", + &evStore, + logger, + ) exp.Flush() // flush should error and not return any event diff --git a/exporter/even_store_test.go b/exporter/even_store_test.go index 307471dcb5b..2b30061567f 100644 --- a/exporter/even_store_test.go +++ b/exporter/even_store_test.go @@ -16,27 +16,40 @@ import ( const defaultTestCleanQueueDuration = 100 * time.Millisecond func Test_ConsumerNameInvalid(t *testing.T) { - t.Run("GetPendingEventCount: should return an error if the consumer name is invalid", func(t *testing.T) { - eventStore := exporter.NewEventStore[testutils.ExportableMockEvent](defaultTestCleanQueueDuration) - eventStore.AddConsumer("consumer1") - defer eventStore.Stop() - _, err := eventStore.GetPendingEventCount("wrong name") - assert.NotNil(t, err) - }) - t.Run("ProcessPendingEvents: should return an error if the consumer name is invalid", func(t *testing.T) { - eventStore := exporter.NewEventStore[testutils.ExportableMockEvent](defaultTestCleanQueueDuration) - eventStore.AddConsumer("consumer1") - defer eventStore.Stop() - err := eventStore.ProcessPendingEvents( - "wrong name", - func(ctx context.Context, events []testutils.ExportableMockEvent) error { return nil }) - assert.NotNil(t, err) - }) + t.Run( + "GetPendingEventCount: should return an error if the consumer name is invalid", + func(t *testing.T) { + eventStore := exporter.NewEventStore[testutils.ExportableMockEvent]( + defaultTestCleanQueueDuration, + ) + eventStore.AddConsumer("consumer1") + defer eventStore.Stop() + _, err := eventStore.GetPendingEventCount("wrong name") + assert.NotNil(t, err) + }, + ) + t.Run( + "ProcessPendingEvents: should return an error if the consumer name is invalid", + func(t *testing.T) { + eventStore := exporter.NewEventStore[testutils.ExportableMockEvent]( + defaultTestCleanQueueDuration, + ) + eventStore.AddConsumer("consumer1") + defer eventStore.Stop() + err := eventStore.ProcessPendingEvents( + "wrong name", + func(ctx context.Context, events []testutils.ExportableMockEvent) error { return nil }, + ) + assert.NotNil(t, err) + }, + ) } func Test_SingleConsumer(t *testing.T) { consumerName := "consumer1" - eventStore := exporter.NewEventStore[testutils.ExportableMockEvent](defaultTestCleanQueueDuration) + eventStore := exporter.NewEventStore[testutils.ExportableMockEvent]( + defaultTestCleanQueueDuration, + ) eventStore.AddConsumer(consumerName) defer eventStore.Stop() got, _ := eventStore.GetPendingEventCount(consumerName) @@ -82,7 +95,9 @@ func Test_SingleConsumer(t *testing.T) { func Test_MultipleConsumersSingleThread(t *testing.T) { consumerNames := []string{"consumer1", "consumer2"} - eventStore := exporter.NewEventStore[testutils.ExportableMockEvent](defaultTestCleanQueueDuration) + eventStore := exporter.NewEventStore[testutils.ExportableMockEvent]( + defaultTestCleanQueueDuration, + ) for _, name := range consumerNames { eventStore.AddConsumer(name) } @@ -142,7 +157,9 @@ func Test_MultipleConsumersSingleThread(t *testing.T) { func Test_MultipleConsumersMultipleGORoutines(t *testing.T) { consumerNames := []string{"consumer1", "consumer2"} - eventStore := exporter.NewEventStore[testutils.ExportableMockEvent](defaultTestCleanQueueDuration) + eventStore := exporter.NewEventStore[testutils.ExportableMockEvent]( + defaultTestCleanQueueDuration, + ) for _, name := range consumerNames { eventStore.AddConsumer(name) } @@ -162,7 +179,9 @@ func Test_MultipleConsumersMultipleGORoutines(t *testing.T) { return nil }) assert.Nil(t, err) - time.Sleep(50 * time.Millisecond) // we wait to be sure that the producer has produce new events + time.Sleep( + 50 * time.Millisecond, + ) // we wait to be sure that the producer has produce new events err = eventStore.ProcessPendingEvents(consumerName, func(ctx context.Context, events []testutils.ExportableMockEvent) error { @@ -186,7 +205,9 @@ func Test_MultipleConsumersMultipleGORoutines(t *testing.T) { func Test_ProcessPendingEventInError(t *testing.T) { consumerName := "consumer1" - eventStore := exporter.NewEventStore[testutils.ExportableMockEvent](defaultTestCleanQueueDuration) + eventStore := exporter.NewEventStore[testutils.ExportableMockEvent]( + defaultTestCleanQueueDuration, + ) eventStore.AddConsumer(consumerName) defer eventStore.Stop() // start producer @@ -198,10 +219,13 @@ func Test_ProcessPendingEventInError(t *testing.T) { assert.Nil(t, err) // process is in error, so we are not able to update the offset - err = eventStore.ProcessPendingEvents(consumerName, func(ctx context.Context, events []testutils.ExportableMockEvent) error { - assert.Equal(t, 1000, len(events)) - return fmt.Errorf("error") - }) + err = eventStore.ProcessPendingEvents( + consumerName, + func(ctx context.Context, events []testutils.ExportableMockEvent) error { + assert.Equal(t, 1000, len(events)) + return fmt.Errorf("error") + }, + ) assert.NotNil(t, err) // We still have the same number of items waiting for next process @@ -210,10 +234,13 @@ func Test_ProcessPendingEventInError(t *testing.T) { assert.Nil(t, err) // process is not in error anymore - err = eventStore.ProcessPendingEvents(consumerName, func(ctx context.Context, events []testutils.ExportableMockEvent) error { - assert.Equal(t, 1000, len(events)) - return nil - }) + err = eventStore.ProcessPendingEvents( + consumerName, + func(ctx context.Context, events []testutils.ExportableMockEvent) error { + assert.Equal(t, 1000, len(events)) + return nil + }, + ) assert.Nil(t, err) // we have consumed all the items @@ -224,7 +251,9 @@ func Test_ProcessPendingEventInError(t *testing.T) { func Test_WaitForEmptyClean(t *testing.T) { consumerNames := []string{"consumer1"} - eventStore := exporter.NewEventStore[testutils.ExportableMockEvent](defaultTestCleanQueueDuration) + eventStore := exporter.NewEventStore[testutils.ExportableMockEvent]( + defaultTestCleanQueueDuration, + ) for _, name := range consumerNames { eventStore.AddConsumer(name) } @@ -233,17 +262,25 @@ func Test_WaitForEmptyClean(t *testing.T) { // start producer ctx := context.Background() startEventProducer(ctx, eventStore, 100, false) - err := eventStore.ProcessPendingEvents(consumerNames[0], func(ctx context.Context, events []testutils.ExportableMockEvent) error { - assert.Equal(t, 100, len(events)) - return nil - }) + err := eventStore.ProcessPendingEvents( + consumerNames[0], + func(ctx context.Context, events []testutils.ExportableMockEvent) error { + assert.Equal(t, 100, len(events)) + return nil + }, + ) assert.Nil(t, err) assert.True(t, eventStore.GetTotalEventCount() > 0) time.Sleep(3 * defaultTestCleanQueueDuration) assert.Equal(t, int64(0), eventStore.GetTotalEventCount()) } -func startEventProducer(ctx context.Context, eventStore exporter.EventStore[testutils.ExportableMockEvent], produceMax int, randomizeProducingTime bool) { +func startEventProducer( + ctx context.Context, + eventStore exporter.EventStore[testutils.ExportableMockEvent], + produceMax int, + randomizeProducingTime bool, +) { for i := 0; i < produceMax; i++ { select { case <-ctx.Done(): diff --git a/exporter/fileexporter/exporter.go b/exporter/fileexporter/exporter.go index 511c625667f..e07bc362440 100644 --- a/exporter/fileexporter/exporter.go +++ b/exporter/fileexporter/exporter.go @@ -53,11 +53,23 @@ type Exporter struct { } // Export is saving a collection of events in a file. -func (f *Exporter) Export(_ context.Context, _ *fflog.FFLogger, events []exporter.ExportableEvent) error { +func (f *Exporter) Export( + _ context.Context, + _ *fflog.FFLogger, + events []exporter.ExportableEvent, +) error { // Parse the template only once f.initTemplates.Do(func() { - f.csvTemplate = exporter.ParseTemplate("csvFormat", f.CsvTemplate, exporter.DefaultCsvTemplate) - f.filenameTemplate = exporter.ParseTemplate("filenameFormat", f.Filename, exporter.DefaultFilenameTemplate) + f.csvTemplate = exporter.ParseTemplate( + "csvFormat", + f.CsvTemplate, + exporter.DefaultCsvTemplate, + ) + f.filenameTemplate = exporter.ParseTemplate( + "filenameFormat", + f.Filename, + exporter.DefaultFilenameTemplate, + ) }) // Default format for the output @@ -79,6 +91,7 @@ func (f *Exporter) Export(_ context.Context, _ *fflog.FFLogger, events []exporte filePath = filename } else { // Ensure OutputDir exists or create it + // nolint:gosec if err := os.MkdirAll(outputDir, 0755); err != nil { return fmt.Errorf("failed to create output directory: %v", err) } @@ -97,12 +110,13 @@ func (f *Exporter) IsBulk() bool { return true } -func (f *Exporter) writeFile(filePath string, featureEvents []exporter.FeatureEvent) error { +func (f *Exporter) writeFile(filePath string, events []exporter.ExportableEvent) error { + //nolint:gosec file, err := os.OpenFile(filePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o644) if err != nil { return err } - defer file.Close() + defer func() { _ = file.Close() }() for _, event := range events { var line []byte var err error @@ -136,10 +150,8 @@ func (f *Exporter) writeParquet(filePath string, events []exporter.ExportableEve switch ev := any(event).(type) { case exporter.FeatureEvent: parquetFeatureEvents = append(parquetFeatureEvents, ev) - break case exporter.TrackingEvent: parquetTrackingEvents = append(parquetTrackingEvents, ev) - break default: // do nothing } @@ -181,12 +193,15 @@ func (f *Exporter) writeParquetFeatureEvent(filePath string, events []exporter.F return pw.WriteStop() } -func (f *Exporter) writeParquetTrackingEvent(filePath string, events []exporter.TrackingEvent) error { +func (f *Exporter) writeParquetTrackingEvent( + filePath string, + events []exporter.TrackingEvent, +) error { fw, err := local.NewLocalFileWriter(filePath) if err != nil { return err } - defer fw.Close() + defer func() { _ = fw.Close() }() pw, err := writer.NewParquetWriter(fw, new(exporter.FeatureEvent), int64(runtime.NumCPU())) if err != nil { diff --git a/exporter/gcstorageexporter/exporter.go b/exporter/gcstorageexporter/exporter.go index 7ea0243a2aa..fb4430a8283 100644 --- a/exporter/gcstorageexporter/exporter.go +++ b/exporter/gcstorageexporter/exporter.go @@ -54,7 +54,11 @@ func (f *Exporter) IsBulk() bool { } // Export is saving a collection of events in a file. -func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, events []exporter.ExportableEvent) error { +func (f *Exporter) Export( + ctx context.Context, + logger *fflog.FFLogger, + events []exporter.ExportableEvent, +) error { // Init google storage client client, err := storage.NewClient(ctx, f.Options...) if err != nil { @@ -111,8 +115,12 @@ func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, events [] _, err = io.Copy(wc, of) _ = wc.Close() if err != nil { - return fmt.Errorf("error: [GCP Exporter] impossible to copy the file from %s to bucket %s: %v", - source, f.Bucket, err) + return fmt.Errorf( + "error: [GCP Exporter] impossible to copy the file from %s to bucket %s: %v", + source, + f.Bucket, + err, + ) } } diff --git a/exporter/kafkaexporter/exporter.go b/exporter/kafkaexporter/exporter.go index 66aff8cab98..656a44287e5 100644 --- a/exporter/kafkaexporter/exporter.go +++ b/exporter/kafkaexporter/exporter.go @@ -45,7 +45,11 @@ type Exporter struct { // Export will produce a message to the Kafka topic. The message's value will contain the event encoded in the // selected format. Messages are published synchronously and will error immediately on failure. -func (e *Exporter) Export(_ context.Context, _ *fflog.FFLogger, events []exporter.ExportableEvent) error { +func (e *Exporter) Export( + _ context.Context, + _ *fflog.FFLogger, + events []exporter.ExportableEvent, +) error { if e.sender == nil { err := e.initializeProducer() if err != nil { @@ -84,10 +88,10 @@ func (e *Exporter) IsBulk() bool { func (e *Exporter) initializeProducer() error { if e.Settings.Config == nil { e.Settings.Config = sarama.NewConfig() - e.Settings.Config.Producer.Return.Successes = true // Needs to be true for sync producers + e.Settings.Producer.Return.Successes = true // Needs to be true for sync producers } - if err := e.Settings.Config.Validate(); err != nil { + if err := e.Settings.Validate(); err != nil { return fmt.Errorf("invalid configuration: %w", err) } diff --git a/exporter/kinesisexporter/exporter.go b/exporter/kinesisexporter/exporter.go index eb1146c5d5b..7ffd8f3a280 100644 --- a/exporter/kinesisexporter/exporter.go +++ b/exporter/kinesisexporter/exporter.go @@ -26,7 +26,10 @@ var DefaultPartitionKey = func(context context.Context, _ exporter.ExportableEve } type MessageSender interface { - SendMessages(ctx context.Context, msgs *kinesis.PutRecordsInput) (*kinesis.PutRecordsOutput, error) + SendMessages( + ctx context.Context, + msgs *kinesis.PutRecordsInput, + ) (*kinesis.PutRecordsOutput, error) } type DefaultKinesisSender struct { @@ -145,7 +148,11 @@ func (e *Exporter) initializeProducer(ctx context.Context) error { return initErr } -func (e *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, featureEvents []exporter.ExportableEvent) error { +func (e *Exporter) Export( + ctx context.Context, + logger *fflog.FFLogger, + featureEvents []exporter.ExportableEvent, +) error { err := e.initializeProducer(ctx) if err != nil { return fmt.Errorf("writer: %w", err) diff --git a/exporter/kinesisexporter/exporter_test.go b/exporter/kinesisexporter/exporter_test.go index 6d6d0ad1a13..fe7ce6ac8fc 100644 --- a/exporter/kinesisexporter/exporter_test.go +++ b/exporter/kinesisexporter/exporter_test.go @@ -195,7 +195,9 @@ func TestExporterSettingsCreation(t *testing.T) { WithStreamName("test-stream-name"), WithStreamArn("test-stream-arn"), WithExplicitHashKey("test-explicit-hash-key"), - WithPartitionKey(func(_ context.Context, _ exporter.ExportableEvent) string { return "non-default" }), + WithPartitionKey( + func(_ context.Context, _ exporter.ExportableEvent) string { return "non-default" }, + ), ) assert.Equal(t, settings.PartitionKey(context.TODO(), NewFeatureEvent()), "non-default") assert.Nil(t, settings.StreamName) // overwritten by streamArn @@ -267,7 +269,10 @@ type MockKinesisSender struct { PutRecordsInputs []*kinesis.PutRecordsInput } -func (k *MockKinesisSender) SendMessages(ctx context.Context, msgs *kinesis.PutRecordsInput) (*kinesis.PutRecordsOutput, error) { +func (k *MockKinesisSender) SendMessages( + ctx context.Context, + msgs *kinesis.PutRecordsInput, +) (*kinesis.PutRecordsOutput, error) { k.PutRecordsInputs = append(k.PutRecordsInputs, msgs) failedRecordCount := int32(0) output := kinesis.PutRecordsOutput{ @@ -281,6 +286,9 @@ func (k *MockKinesisSender) SendMessages(ctx context.Context, msgs *kinesis.PutR type MockKinesisSenderWithError struct{} -func (k *MockKinesisSenderWithError) SendMessages(ctx context.Context, msgs *kinesis.PutRecordsInput) (*kinesis.PutRecordsOutput, error) { +func (k *MockKinesisSenderWithError) SendMessages( + ctx context.Context, + msgs *kinesis.PutRecordsInput, +) (*kinesis.PutRecordsOutput, error) { return nil, errors.New("failure to send message: datacenter on fire") } diff --git a/exporter/logsexporter/exporter.go b/exporter/logsexporter/exporter.go index 0c7239807b3..9120b1fac6f 100644 --- a/exporter/logsexporter/exporter.go +++ b/exporter/logsexporter/exporter.go @@ -29,7 +29,11 @@ type Exporter struct { } // Export is saving a collection of events in a file. -func (f *Exporter) Export(_ context.Context, logger *fflog.FFLogger, events []exporter.ExportableEvent) error { +func (f *Exporter) Export( + _ context.Context, + logger *fflog.FFLogger, + events []exporter.ExportableEvent, +) error { f.initTemplates.Do(func() { // Remove below after deprecation of Format if f.LogFormat == "" && f.Format != "" { diff --git a/exporter/pubsubexporter/exporter.go b/exporter/pubsubexporter/exporter.go index a964cd33bc8..d9fbd354495 100644 --- a/exporter/pubsubexporter/exporter.go +++ b/exporter/pubsubexporter/exporter.go @@ -36,7 +36,11 @@ type Exporter struct { } // Export publishes a PubSub message for each exporter.FeatureEvent received. -func (e *Exporter) Export(ctx context.Context, _ *fflog.FFLogger, events []exporter.ExportableEvent) error { +func (e *Exporter) Export( + ctx context.Context, + _ *fflog.FFLogger, + events []exporter.ExportableEvent, +) error { if e.publisher == nil { if err := e.initPublisher(ctx); err != nil { return err diff --git a/exporter/pubsubexporter/exporter_test.go b/exporter/pubsubexporter/exporter_test.go index 1ee7fd22523..ce0b0e008cb 100644 --- a/exporter/pubsubexporter/exporter_test.go +++ b/exporter/pubsubexporter/exporter_test.go @@ -178,7 +178,7 @@ func TestExporter_Export(t *testing.T) { EnableMessageOrdering: tt.fields.enableMessageOrdering, newClientFunc: tt.fields.newClientFunc, } - err = e.Export(ctx, logger, tt.featureEvents) + err = e.Export(ctx, logger, tt.events) if tt.wantErr { assert.Error(t, err) @@ -201,7 +201,11 @@ func TestExporter_IsBulk(t *testing.T) { assert.False(t, e.IsBulk(), "PubSub exporter is not a bulk one") } -func assertMessages(t *testing.T, expectedEvents []exporter.ExportableEvent, messages []*pstest.Message) { +func assertMessages( + t *testing.T, + expectedEvents []exporter.ExportableEvent, + messages []*pstest.Message, +) { events := make([]exporter.FeatureEvent, len(messages)) for i, message := range messages { assert.Equal(t, map[string]string{"emitter": "GO Feature Flag"}, message.Attributes, @@ -216,7 +220,11 @@ func assertMessages(t *testing.T, expectedEvents []exporter.ExportableEvent, mes assert.ElementsMatchf(t, expectedEvents, events, "events should match in any order") } -func assertPublisherSettings(t *testing.T, expectedSettings *pubsub.PublishSettings, publisher *pubsub.Topic) { +func assertPublisherSettings( + t *testing.T, + expectedSettings *pubsub.PublishSettings, + publisher *pubsub.Topic, +) { if expectedSettings != nil { assert.Equal(t, *expectedSettings, publisher.PublishSettings) } else { diff --git a/exporter/s3exporter/exporter.go b/exporter/s3exporter/exporter.go index 14cd6eda7ce..55931f0f6cd 100644 --- a/exporter/s3exporter/exporter.go +++ b/exporter/s3exporter/exporter.go @@ -56,7 +56,11 @@ type Exporter struct { } // Export is saving a collection of events in a file. -func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, events []exporter.ExportableEvent) error { +func (f *Exporter) Export( + ctx context.Context, + logger *fflog.FFLogger, + events []exporter.ExportableEvent, +) error { // init the s3 uploader if f.s3Uploader == nil { var initErr error diff --git a/exporter/s3exporter/exporter_test.go b/exporter/s3exporter/exporter_test.go index 36b40084972..d9322a19b51 100644 --- a/exporter/s3exporter/exporter_test.go +++ b/exporter/s3exporter/exporter_test.go @@ -156,8 +156,8 @@ func TestS3_Export(t *testing.T) { Format: "csv", CsvTemplate: "{{ .Foo}}", }, - events: []exporter.FeatureEvent{ - { + events: []exporter.ExportableEvent{ + exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", }, @@ -177,14 +177,23 @@ func TestS3_Export(t *testing.T) { CsvTemplate: tt.fields.CsvTemplate, s3Uploader: &s3ManagerMock, } - err := f.Export(context.Background(), &fflog.FFLogger{LeveledLogger: slog.Default()}, tt.events) + err := f.Export( + context.Background(), + &fflog.FFLogger{LeveledLogger: slog.Default()}, + tt.events, + ) if tt.wantErr { assert.Error(t, err, "Export should error") return } assert.NoError(t, err, "Export should not error") - assert.Equal(t, 1, len(s3ManagerMock.S3ManagerMockFileSystem), "we should have 1 file in our mock") + assert.Equal( + t, + 1, + len(s3ManagerMock.S3ManagerMockFileSystem), + "we should have 1 file in our mock", + ) expectedContent, _ := os.ReadFile(tt.expectedFile) for k, v := range s3ManagerMock.S3ManagerMockFileSystem { assert.Equal(t, string(expectedContent), v, "invalid file content") @@ -199,7 +208,11 @@ func Test_errSDK(t *testing.T) { Bucket: "empty", AwsConfig: &aws.Config{}, } - err := f.Export(context.Background(), &fflog.FFLogger{LeveledLogger: slog.Default()}, []exporter.ExportableEvent{}) + err := f.Export( + context.Background(), + &fflog.FFLogger{LeveledLogger: slog.Default()}, + []exporter.ExportableEvent{}, + ) assert.Error(t, err, "Empty AWS config should failed") } diff --git a/exporter/s3exporterv2/exporter.go b/exporter/s3exporterv2/exporter.go index 20c93d42003..c2e89e5ff03 100644 --- a/exporter/s3exporterv2/exporter.go +++ b/exporter/s3exporterv2/exporter.go @@ -82,7 +82,11 @@ func (f *Exporter) initializeUploader(ctx context.Context) error { } // Export is saving a collection of events in a file. -func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, events []exporter.ExportableEvent) error { +func (f *Exporter) Export( + ctx context.Context, + logger *fflog.FFLogger, + events []exporter.ExportableEvent, +) error { if ctx == nil { ctx = context.Background() } @@ -123,7 +127,10 @@ func (f *Exporter) Export(ctx context.Context, logger *fflog.FFLogger, events [] // read file of, err := os.Open(outputDir + "/" + file.Name()) if err != nil { - f.ffLogger.Error("[S3Exporter] impossible to open the file", slog.String("path", outputDir+"/"+file.Name())) + f.ffLogger.Error( + "[S3Exporter] impossible to open the file", + slog.String("path", outputDir+"/"+file.Name()), + ) continue } diff --git a/exporter/s3exporterv2/exporter_test.go b/exporter/s3exporterv2/exporter_test.go index 06e36fa61fd..912fcd7b83c 100644 --- a/exporter/s3exporterv2/exporter_test.go +++ b/exporter/s3exporterv2/exporter_test.go @@ -228,17 +228,31 @@ func TestS3_Export(t *testing.T) { // Verify that S3ClientOptions are correctly set on the Exporter if tt.fields.S3ClientOptions != nil { - assert.Equal(t, tt.fields.S3ClientOptions, f.S3ClientOptions, "S3ClientOptions should be set correctly on the Exporter") + assert.Equal( + t, + tt.fields.S3ClientOptions, + f.S3ClientOptions, + "S3ClientOptions should be set correctly on the Exporter", + ) } - err := f.Export(tt.fields.Context, &fflog.FFLogger{LeveledLogger: slog.Default()}, tt.events) + err := f.Export( + tt.fields.Context, + &fflog.FFLogger{LeveledLogger: slog.Default()}, + tt.events, + ) if tt.wantErr { assert.Error(t, err, "Export should error") return } assert.NoError(t, err, "Export should not error") - assert.Equal(t, 1, len(s3ManagerMock.S3ManagerMockFileSystem), "we should have 1 file in our mock") + assert.Equal( + t, + 1, + len(s3ManagerMock.S3ManagerMockFileSystem), + "we should have 1 file in our mock", + ) expectedContent, _ := os.ReadFile(tt.expectedFile) for k, v := range s3ManagerMock.S3ManagerMockFileSystem { assert.Equal(t, string(expectedContent), v, "invalid file content") @@ -253,7 +267,11 @@ func Test_errSDK(t *testing.T) { Bucket: "empty", AwsConfig: &aws.Config{}, } - err := f.Export(context.Background(), &fflog.FFLogger{LeveledLogger: slog.Default()}, []exporter.ExportableEvent{}) + err := f.Export( + context.Background(), + &fflog.FFLogger{LeveledLogger: slog.Default()}, + []exporter.ExportableEvent{}, + ) assert.Error(t, err, "Empty AWS config should failed") } diff --git a/exporter/sqsexporter/exporter.go b/exporter/sqsexporter/exporter.go index 02e38b2249f..9ba76d423a9 100644 --- a/exporter/sqsexporter/exporter.go +++ b/exporter/sqsexporter/exporter.go @@ -28,7 +28,11 @@ type Exporter struct { } // Export is sending SQS event for each events received. -func (f *Exporter) Export(ctx context.Context, _ *fflog.FFLogger, events []exporter.ExportableEvent) error { +func (f *Exporter) Export( + ctx context.Context, + _ *fflog.FFLogger, + events []exporter.ExportableEvent, +) error { if f.AwsConfig == nil { cfg, err := config.LoadDefaultConfig(ctx) if err != nil { diff --git a/exporter/webhookexporter/exporter.go b/exporter/webhookexporter/exporter.go index 7e0f297c2c2..fce9be62fe5 100644 --- a/exporter/webhookexporter/exporter.go +++ b/exporter/webhookexporter/exporter.go @@ -60,7 +60,11 @@ type webhookPayload struct { } // Export is sending a collection of events in a webhook call. -func (f *Exporter) Export(ctx context.Context, _ *fflog.FFLogger, events []exporter.ExportableEvent) error { +func (f *Exporter) Export( + ctx context.Context, + _ *fflog.FFLogger, + events []exporter.ExportableEvent, +) error { f.init.Do(func() { if f.httpClient == nil { f.httpClient = internal.DefaultHTTPClient() @@ -109,7 +113,10 @@ func (f *Exporter) Export(ctx context.Context, _ *fflog.FFLogger, events []expor defer func() { _ = response.Body.Close() }() if response.StatusCode > 399 { return fmt.Errorf( - "error while calling the webhook, HTTP Code %d received, response: %v", response.StatusCode, response.Body) + "error while calling the webhook, HTTP Code %d received, response: %v", + response.StatusCode, + response.Body, + ) } return nil } diff --git a/exporter/webhookexporter/exporter_test.go b/exporter/webhookexporter/exporter_test.go index 6f1e92695de..86b46907801 100644 --- a/exporter/webhookexporter/exporter_test.go +++ b/exporter/webhookexporter/exporter_test.go @@ -217,6 +217,14 @@ func TestWebhook_Export_impossibleToParse(t *testing.T) { EndpointURL: " http://invalid.com/", } - err := f.Export(context.Background(), &fflog.FFLogger{LeveledLogger: slog.Default()}, []exporter.ExportableEvent{}) - assert.EqualError(t, err, "parse \" http://invalid.com/\": first path segment in URL cannot contain colon") + err := f.Export( + context.Background(), + &fflog.FFLogger{LeveledLogger: slog.Default()}, + []exporter.ExportableEvent{}, + ) + assert.EqualError( + t, + err, + "parse \" http://invalid.com/\": first path segment in URL cannot contain colon", + ) } diff --git a/feature_flag.go b/feature_flag.go index c480182d6e8..c35747741e7 100644 --- a/feature_flag.go +++ b/feature_flag.go @@ -50,6 +50,8 @@ type GoFeatureFlag struct { featureEventDataExporter exporter.Manager[exporter.FeatureEvent] trackingEventDataExporter exporter.Manager[exporter.TrackingEvent] retrieverManager *retriever.Manager + // evalExporterWg is a wait group to wait for the evaluation exporter to finish the export before closing GOFF + evalExporterWg sync.WaitGroup } // ff is the default object for go-feature-flag @@ -60,7 +62,6 @@ var onceFF sync.Once // and return everything you need to manage your flags. func New(config Config) (*GoFeatureFlag, error) { config.PollingInterval = adjustPollingInterval(config.PollingInterval) - if config.offlineMutex == nil { config.offlineMutex = &sync.RWMutex{} } @@ -72,7 +73,8 @@ func New(config Config) (*GoFeatureFlag, error) { } goFF := &GoFeatureFlag{ - config: config, + config: config, + evalExporterWg: sync.WaitGroup{}, } if config.Offline { @@ -84,11 +86,18 @@ func New(config Config) (*GoFeatureFlag, error) { notificationService := initializeNotificationService(config) // init internal cache - goFF.cache = cache.New(notificationService, config.PersistentFlagConfigurationFile, config.internalLogger) + goFF.cache = cache.New( + notificationService, + config.PersistentFlagConfigurationFile, + config.internalLogger, + ) retrieverManager, err := initializeRetrieverManager(config) if err != nil && (retrieverManager == nil || !config.StartWithRetrieverError) { - return nil, fmt.Errorf("impossible to initialize the retrievers, please check your configuration: %v", err) + return nil, fmt.Errorf( + "impossible to initialize the retrievers, please check your configuration: %v", + err, + ) } goFF.retrieverManager = retrieverManager @@ -182,7 +191,12 @@ func initializeDataExporters(config Config, logger *fflog.FFLogger) ( // handleFirstRetrieverError is a function that will handle the first error when trying to retrieve // the flags the first time when starting GO Feature Flag. -func handleFirstRetrieverError(config Config, logger *fflog.FFLogger, cache cache.Manager, err error) error { +func handleFirstRetrieverError( + config Config, + logger *fflog.FFLogger, + cache cache.Manager, + err error, +) error { switch { case config.PersistentFlagConfigurationFile != "": errPersist := retrievePersistentLocalDisk(config.Context, config, cache) @@ -191,34 +205,42 @@ func handleFirstRetrieverError(config Config, logger *fflog.FFLogger, cache cach "[original error: %v]", errPersist, err) } case !config.StartWithRetrieverError: - return fmt.Errorf("impossible to retrieve the flags, please check your configuration: %v", err) + return fmt.Errorf( + "impossible to retrieve the flags, please check your configuration: %v", + err, + ) default: // We accept to start with a retriever error, we will serve only default value logger.Error("Impossible to retrieve the flags, starting with the "+ "retriever error", slog.Any("error", err)) } - config.internalLogger.Debug("GO Feature Flag is initialized") - return goFF, nil + return nil } // retrievePersistentLocalDisk is a function used in case we are not able to retrieve any flag when starting // GO Feature Flag. // This function will look at any pre-existent persistent configuration and start with it. -func retrievePersistentLocalDisk(ctx context.Context, config Config, goFF *GoFeatureFlag) error { +func retrievePersistentLocalDisk(ctx context.Context, config Config, cache cache.Manager) error { if config.PersistentFlagConfigurationFile != "" { - config.internalLogger.Error("Impossible to retrieve your flag configuration, trying to use the persistent"+ - " flag configuration file.", slog.String("path", config.PersistentFlagConfigurationFile)) + config.internalLogger.Error( + "Impossible to retrieve your flag configuration, trying to use the persistent"+ + " flag configuration file.", + slog.String("path", config.PersistentFlagConfigurationFile), + ) if _, err := os.Stat(config.PersistentFlagConfigurationFile); err == nil { // we found the configuration file on the disk r := &fileretriever.Retriever{Path: config.PersistentFlagConfigurationFile} - - fallBackRetrieverManager := retriever.NewManager(config.Context, []retriever.Retriever{r}, config.internalLogger) + fallBackRetrieverManager := retriever.NewManager( + config.Context, + []retriever.Retriever{r}, + config.internalLogger, + ) err := fallBackRetrieverManager.Init(ctx) if err != nil { return err } defer func() { _ = fallBackRetrieverManager.Shutdown(ctx) }() - err = retrieveFlagsAndUpdateCache(goFF.config, goFF.cache, fallBackRetrieverManager, true) + err = retrieveFlagsAndUpdateCache(config, cache, fallBackRetrieverManager, true) if err != nil { return err } @@ -234,13 +256,13 @@ func retrievePersistentLocalDisk(ctx context.Context, config Config, goFF *GoFea func (g *GoFeatureFlag) Close() { if g != nil { if g.cache != nil { - // clear the cache g.cache.Close() } if g.bgUpdater.updaterChan != nil && g.bgUpdater.ticker != nil { g.bgUpdater.close() } - + // we have to wait for the GO routine before stopping the exporter + g.evalExporterWg.Wait() if g.featureEventDataExporter != nil { g.featureEventDataExporter.Stop() } @@ -263,7 +285,10 @@ func (g *GoFeatureFlag) startFlagUpdaterDaemon() { if !g.IsOffline() { err := retrieveFlagsAndUpdateCache(g.config, g.cache, g.retrieverManager, false) if err != nil { - g.config.internalLogger.Error("Error while updating the cache.", slog.Any("error", err)) + g.config.internalLogger.Error( + "Error while updating the cache.", + slog.Any("error", err), + ) } } case <-g.bgUpdater.updaterChan: @@ -306,7 +331,8 @@ func retreiveFlags( defer wg.Done() // If the retriever is not ready, we ignore it - if rr, ok := r.(retriever.CommonInitializableRetriever); ok && rr.Status() != retriever.RetrieverReady { + if rr, ok := r.(retriever.CommonInitializableRetriever); ok && + rr.Status() != retriever.RetrieverReady { resultsChan <- Results{Error: nil, Value: map[string]dto.DTO{}, Index: index} return } @@ -348,7 +374,11 @@ func retrieveFlagsAndUpdateCache(config Config, cache cache.Manager, return err } - err = cache.UpdateCache(newFlags, config.internalLogger, !(isInit && config.DisableNotifierOnInit)) + err = cache.UpdateCache( + newFlags, + config.internalLogger, + !isInit || !config.DisableNotifierOnInit, + ) if err != nil { log.Printf("error: impossible to update the cache of the flags: %v", err) return err @@ -373,7 +403,10 @@ func (g *GoFeatureFlag) ForceRefresh() bool { } err := retrieveFlagsAndUpdateCache(g.config, g.cache, g.retrieverManager, false) if err != nil { - g.config.internalLogger.Error("Error while force updating the cache.", slog.Any("error", err)) + g.config.internalLogger.Error( + "Error while force updating the cache.", + slog.Any("error", err), + ) return false } return true @@ -419,6 +452,5 @@ func ForceRefresh() bool { // Close the component by stopping the background refresh and clean the cache. func Close() { onceFF = sync.Once{} - ff.exporterWg.Wait() ff.Close() } diff --git a/ffcontext/context.go b/ffcontext/context.go index d9fb518e84d..f0694098803 100644 --- a/ffcontext/context.go +++ b/ffcontext/context.go @@ -61,20 +61,6 @@ func (u EvaluationContext) MarshalJSON() ([]byte, error) { }) } -func (u EvaluationContext) UnmarshalJSON(data []byte) error { - var aux struct { - TargetingKey string `json:"targetingKey"` - Attributes value `json:"attributes"` - } - err := json.Unmarshal(data, &aux) - if err != nil { - return err - } - u.targetingKey = aux.TargetingKey - u.attributes = aux.Attributes - return nil -} - // GetKey return the unique targetingKey for the user. func (u EvaluationContext) GetKey() string { return u.targetingKey diff --git a/go.mod b/go.mod index 6038ff3d600..3b55dec25e1 100644 --- a/go.mod +++ b/go.mod @@ -26,6 +26,7 @@ require ( github.com/awslabs/aws-lambda-go-api-proxy v0.16.2 github.com/diegoholiveira/jsonlogic/v3 v3.8.1 github.com/fsouza/fake-gcs-server v1.52.2 + github.com/go-viper/mapstructure/v2 v2.2.1 github.com/golang/mock v1.6.0 github.com/google/go-cmp v0.7.0 github.com/google/uuid v1.6.0 @@ -149,7 +150,6 @@ require ( github.com/go-openapi/jsonreference v0.21.0 // indirect github.com/go-openapi/spec v0.21.0 // indirect github.com/go-openapi/swag v0.23.0 // indirect - github.com/go-viper/mapstructure/v2 v2.2.1 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang-jwt/jwt/v5 v5.2.2 // indirect github.com/golang/protobuf v1.5.4 // indirect diff --git a/internal/cache/cache_manager.go b/internal/cache/cache_manager.go index 844789cb219..c9118e31e73 100644 --- a/internal/cache/cache_manager.go +++ b/internal/cache/cache_manager.go @@ -141,7 +141,8 @@ func (c *cacheManagerImpl) PersistCache( newCache map[string]flag.Flag, ) { go func() { - if _, err := os.Stat(c.persistentFlagConfigurationFile); !os.IsNotExist(err) && cmp.Equal(oldCache, newCache) { + if _, err := os.Stat(c.persistentFlagConfigurationFile); !os.IsNotExist(err) && + cmp.Equal(oldCache, newCache) { c.logger.Debug("No change in the cache, skipping the persist") return } diff --git a/testutils/mock/exporter_mock.go b/testutils/mock/exporter_mock.go index 5f64651a652..26b1c28937a 100644 --- a/testutils/mock/exporter_mock.go +++ b/testutils/mock/exporter_mock.go @@ -24,7 +24,11 @@ type Exporter struct { once sync.Once } -func (m *Exporter) Export(_ context.Context, _ *fflog.FFLogger, events []exporter.ExportableEvent) error { +func (m *Exporter) Export( + _ context.Context, + _ *fflog.FFLogger, + events []exporter.ExportableEvent, +) error { m.once.Do(m.initMutex) m.mutex.Lock() defer m.mutex.Unlock() @@ -65,7 +69,11 @@ type ExporterDeprecated struct { once sync.Once } -func (m *ExporterDeprecated) Export(_ context.Context, _ *log.Logger, events []exporter.FeatureEvent) error { +func (m *ExporterDeprecated) Export( + _ context.Context, + _ *log.Logger, + events []exporter.FeatureEvent, +) error { m.once.Do(m.initMutex) m.mutex.Lock() defer m.mutex.Unlock() @@ -111,7 +119,11 @@ type ExporterDeprecatedV2 struct { once sync.Once } -func (m *ExporterDeprecatedV2) Export(_ context.Context, _ *fflog.FFLogger, events []exporter.FeatureEvent) error { +func (m *ExporterDeprecatedV2) Export( + _ context.Context, + _ *fflog.FFLogger, + events []exporter.FeatureEvent, +) error { m.once.Do(m.initMutex) m.mutex.Lock() defer m.mutex.Unlock() diff --git a/testutils/mock/tracking_event_exporter_mock.go b/testutils/mock/tracking_event_exporter_mock.go index 5976f0ad91f..122bf59621e 100644 --- a/testutils/mock/tracking_event_exporter_mock.go +++ b/testutils/mock/tracking_event_exporter_mock.go @@ -19,7 +19,11 @@ type TrackingEventExporter struct { once sync.Once } -func (m *TrackingEventExporter) Export(_ context.Context, _ *fflog.FFLogger, events []exporter.ExportableEvent) error { +func (m *TrackingEventExporter) Export( + _ context.Context, + _ *fflog.FFLogger, + events []exporter.ExportableEvent, +) error { m.once.Do(m.initMutex) m.mutex.Lock() defer m.mutex.Unlock() diff --git a/tracking.go b/tracking.go index 46db41f03cb..e6876811342 100644 --- a/tracking.go +++ b/tracking.go @@ -10,7 +10,10 @@ import ( // Track is used to track an event. // Note: Use this function only if you are using multiple go-feature-flag instances. func (g *GoFeatureFlag) Track( - trackingEventName string, ctx ffcontext.EvaluationContext, trackingEventDetails exporter.TrackingEventDetails) { + trackingEventName string, + ctx ffcontext.EvaluationContext, + trackingEventDetails exporter.TrackingEventDetails, +) { if g != nil && g.trackingEventDataExporter != nil { contextKind := "user" if ctx.IsAnonymous() { @@ -31,6 +34,9 @@ func (g *GoFeatureFlag) Track( // Track is used to track an event. func Track( - trackingEventName string, ctx ffcontext.EvaluationContext, trackingEventDetails exporter.TrackingEventDetails) { + trackingEventName string, + ctx ffcontext.EvaluationContext, + trackingEventDetails exporter.TrackingEventDetails, +) { ff.Track(trackingEventName, ctx, trackingEventDetails) } diff --git a/tracking_test.go b/tracking_test.go index fc8f2849307..5d7974b4d05 100644 --- a/tracking_test.go +++ b/tracking_test.go @@ -27,14 +27,28 @@ func TestValidTrackingEvent(t *testing.T) { }) assert.NoError(t, err) - goff.Track("my-feature-flag", - ffcontext.NewEvaluationContextBuilder("1668d845-051d-4dd9-907a-7ebe6aa2c9da").AddCustom("admin", true).Build(), - map[string]interface{}{"additional data": "value"}) + goff.Track( + "my-feature-flag", + ffcontext.NewEvaluationContextBuilder("1668d845-051d-4dd9-907a-7ebe6aa2c9da"). + AddCustom("admin", true). + Build(), + map[string]interface{}{"additional data": "value"}, + ) assert.Equal(t, 1, len(exp.ExportedEvents)) assert.Equal(t, "1668d845-051d-4dd9-907a-7ebe6aa2c9da", exp.ExportedEvents[0].UserKey) assert.Equal(t, "my-feature-flag", exp.ExportedEvents[0].Key) - assert.Equal(t, map[string]interface{}{"targetingKey": "1668d845-051d-4dd9-907a-7ebe6aa2c9da", "admin": true}, - exp.ExportedEvents[0].EvaluationContext) - assert.Equal(t, map[string]interface{}{"additional data": "value"}, exp.ExportedEvents[0].TrackingDetails) + assert.Equal( + t, + map[string]interface{}{ + "targetingKey": "1668d845-051d-4dd9-907a-7ebe6aa2c9da", + "admin": true, + }, + exp.ExportedEvents[0].EvaluationContext, + ) + assert.Equal( + t, + map[string]interface{}{"additional data": "value"}, + exp.ExportedEvents[0].TrackingDetails, + ) } diff --git a/variation.go b/variation.go index 8082667995f..c0d480523b8 100644 --- a/variation.go +++ b/variation.go @@ -336,9 +336,9 @@ func notifyVariation[T model.JSONType]( "SERVER", ctx.ExtractGOFFProtectedFields().ExporterMetadata, ) - g.exporterWg.Add(1) + g.evalExporterWg.Add(1) go func() { - defer g.exporterWg.Done() + defer g.evalExporterWg.Done() g.CollectEventData(event) }() } diff --git a/variation_test.go b/variation_test.go index 620720de4e5..1de83867528 100644 --- a/variation_test.go +++ b/variation_test.go @@ -131,7 +131,10 @@ func TestBoolVariation(t *testing.T) { flagKey: "key-not-exist", user: ffcontext.NewEvaluationContext("random-key"), defaultValue: true, - cacheMock: NewCacheMock(&flag.InternalFlag{}, errors.New("flag [key-not-exist] does not exists")), + cacheMock: NewCacheMock( + &flag.InternalFlag{}, + errors.New("flag [key-not-exist] does not exists"), + ), }, want: true, wantErr: true, @@ -328,23 +331,30 @@ func TestBoolVariation(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ - { - FlushInterval: 0, - MaxEventInMemory: 0, - Exporter: &logsexporter.Exporter{ - LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + - "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent]( + context.Background(), + []exporter.Config{ + { + FlushInterval: 0, + MaxEventInMemory: 0, + Exporter: &logsexporter.Exporter{ + LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + + "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + }, }, }, - }, exporter.DefaultExporterCleanQueueInterval, &fflog.FFLogger{LeveledLogger: logger}), + exporter.DefaultExporterCleanQueueInterval, + &fflog.FFLogger{LeveledLogger: logger}, + ), } } got, err := BoolVariation(tt.args.flagKey, tt.args.user, tt.args.defaultValue) if tt.expectedLog != "" { - time.Sleep(40 * time.Millisecond) // since the log is async, we are waiting to be sure it's written + time.Sleep( + 40 * time.Millisecond, + ) // since the log is async, we are waiting to be sure it's written if tt.expectedLog == "" { handler.AssertEmpty() } else { @@ -450,7 +460,10 @@ func TestBoolVariationDetails(t *testing.T) { flagKey: "key-not-exist", user: ffcontext.NewEvaluationContext("random-key"), defaultValue: true, - cacheMock: NewCacheMock(&flag.InternalFlag{}, errors.New("flag [key-not-exist] does not exists")), + cacheMock: NewCacheMock( + &flag.InternalFlag{}, + errors.New("flag [key-not-exist] does not exists"), + ), }, wantErr: true, expectedLog: `user="random-key", flag="key-not-exist", value="true", variation="SdkDefault"`, @@ -733,23 +746,30 @@ func TestBoolVariationDetails(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ - { - FlushInterval: 0, - MaxEventInMemory: 0, - Exporter: &logsexporter.Exporter{ - LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + - "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent]( + context.Background(), + []exporter.Config{ + { + FlushInterval: 0, + MaxEventInMemory: 0, + Exporter: &logsexporter.Exporter{ + LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + + "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + }, }, }, - }, exporter.DefaultExporterCleanQueueInterval, &fflog.FFLogger{LeveledLogger: logger}), + exporter.DefaultExporterCleanQueueInterval, + &fflog.FFLogger{LeveledLogger: logger}, + ), } } got, err := BoolVariationDetails(tt.args.flagKey, tt.args.user, tt.args.defaultValue) if tt.expectedLog != "" { - time.Sleep(40 * time.Millisecond) // since the log is async, we are waiting to be sure it's written + time.Sleep( + 40 * time.Millisecond, + ) // since the log is async, we are waiting to be sure it's written if tt.expectedLog == "" { handler.AssertEmpty() } else { @@ -764,7 +784,13 @@ func TestBoolVariationDetails(t *testing.T) { } if tt.wantErr { - assert.Error(t, err, "BoolVariationDetails() error = %v, wantErr %v", err, tt.wantErr) + assert.Error( + t, + err, + "BoolVariationDetails() error = %v, wantErr %v", + err, + tt.wantErr, + ) return } assert.Equal(t, tt.want, got, "BoolVariationDetails() got = %v, want %v", got, tt.want) @@ -850,7 +876,10 @@ func TestFloat64Variation(t *testing.T) { flagKey: "key-not-exist", user: ffcontext.NewEvaluationContext("random-key"), defaultValue: 118.12, - cacheMock: NewCacheMock(&flag.InternalFlag{}, errors.New("flag [key-not-exist] does not exists")), + cacheMock: NewCacheMock( + &flag.InternalFlag{}, + errors.New("flag [key-not-exist] does not exists"), + ), }, want: 118.12, wantErr: true, @@ -1047,23 +1076,30 @@ func TestFloat64Variation(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ - { - FlushInterval: 0, - MaxEventInMemory: 0, - Exporter: &logsexporter.Exporter{ - LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + - "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent]( + context.Background(), + []exporter.Config{ + { + FlushInterval: 0, + MaxEventInMemory: 0, + Exporter: &logsexporter.Exporter{ + LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + + "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + }, }, }, - }, exporter.DefaultExporterCleanQueueInterval, &fflog.FFLogger{LeveledLogger: logger}), + exporter.DefaultExporterCleanQueueInterval, + &fflog.FFLogger{LeveledLogger: logger}, + ), } } got, err := Float64Variation(tt.args.flagKey, tt.args.user, tt.args.defaultValue) if tt.expectedLog != "" { - time.Sleep(40 * time.Millisecond) // since the log is async, we are waiting to be sure it's written + time.Sleep( + 40 * time.Millisecond, + ) // since the log is async, we are waiting to be sure it's written if tt.expectedLog == "" { handler.AssertEmpty() } else { @@ -1168,7 +1204,10 @@ func TestFloat64VariationDetails(t *testing.T) { flagKey: "key-not-exist", user: ffcontext.NewEvaluationContext("random-key"), defaultValue: 118.12, - cacheMock: NewCacheMock(&flag.InternalFlag{}, errors.New("flag [key-not-exist] does not exists")), + cacheMock: NewCacheMock( + &flag.InternalFlag{}, + errors.New("flag [key-not-exist] does not exists"), + ), }, wantErr: true, expectedLog: `user="random-key", flag="key-not-exist", value="118.12", variation="SdkDefault"`, @@ -1370,23 +1409,30 @@ func TestFloat64VariationDetails(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ - { - FlushInterval: 0, - MaxEventInMemory: 0, - Exporter: &logsexporter.Exporter{ - LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + - "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent]( + context.Background(), + []exporter.Config{ + { + FlushInterval: 0, + MaxEventInMemory: 0, + Exporter: &logsexporter.Exporter{ + LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + + "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + }, }, }, - }, exporter.DefaultExporterCleanQueueInterval, &fflog.FFLogger{LeveledLogger: logger}), + exporter.DefaultExporterCleanQueueInterval, + &fflog.FFLogger{LeveledLogger: logger}, + ), } } got, err := Float64VariationDetails(tt.args.flagKey, tt.args.user, tt.args.defaultValue) if tt.expectedLog != "" { - time.Sleep(40 * time.Millisecond) // since the log is async, we are waiting to be sure it's written + time.Sleep( + 40 * time.Millisecond, + ) // since the log is async, we are waiting to be sure it's written if tt.expectedLog == "" { handler.AssertEmpty() } else { @@ -1486,7 +1532,10 @@ func TestJSONArrayVariation(t *testing.T) { flagKey: "key-not-exist", user: ffcontext.NewEvaluationContext("random-key"), defaultValue: []interface{}{"toto"}, - cacheMock: NewCacheMock(&flag.InternalFlag{}, errors.New("flag [key-not-exist] does not exists")), + cacheMock: NewCacheMock( + &flag.InternalFlag{}, + errors.New("flag [key-not-exist] does not exists"), + ), }, want: []interface{}{"toto"}, wantErr: true, @@ -1669,16 +1718,21 @@ func TestJSONArrayVariation(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ - { - FlushInterval: 0, - MaxEventInMemory: 0, - Exporter: &logsexporter.Exporter{ - LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + - "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent]( + context.Background(), + []exporter.Config{ + { + FlushInterval: 0, + MaxEventInMemory: 0, + Exporter: &logsexporter.Exporter{ + LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + + "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + }, }, }, - }, exporter.DefaultExporterCleanQueueInterval, &fflog.FFLogger{LeveledLogger: logger}), + exporter.DefaultExporterCleanQueueInterval, + &fflog.FFLogger{LeveledLogger: logger}, + ), } } @@ -1690,7 +1744,9 @@ func TestJSONArrayVariation(t *testing.T) { } assert.Equal(t, tt.want, got, "JSONArrayVariation() got = %v, want %v", got, tt.want) if tt.expectedLog != "" { - time.Sleep(40 * time.Millisecond) // since the log is async, we are waiting to be sure it's written + time.Sleep( + 40 * time.Millisecond, + ) // since the log is async, we are waiting to be sure it's written if tt.expectedLog == "" { handler.AssertEmpty() } else { @@ -1789,7 +1845,10 @@ func TestJSONArrayVariationDetails(t *testing.T) { flagKey: "key-not-exist", user: ffcontext.NewEvaluationContext("random-key"), defaultValue: []interface{}{"toto"}, - cacheMock: NewCacheMock(&flag.InternalFlag{}, errors.New("flag [key-not-exist] does not exists")), + cacheMock: NewCacheMock( + &flag.InternalFlag{}, + errors.New("flag [key-not-exist] does not exists"), + ), }, wantErr: true, expectedLog: "^\\[" + testutils.RFC3339Regex + "\\] user=\"random-key\", flag=\"key-not-exist\", value=\"\\[toto\\]\"\n", @@ -1981,20 +2040,29 @@ func TestJSONArrayVariationDetails(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ - { - FlushInterval: 0, - MaxEventInMemory: 0, - Exporter: &logsexporter.Exporter{ - LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + - "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent]( + context.Background(), + []exporter.Config{ + { + FlushInterval: 0, + MaxEventInMemory: 0, + Exporter: &logsexporter.Exporter{ + LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + + "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + }, }, }, - }, exporter.DefaultExporterCleanQueueInterval, &fflog.FFLogger{LeveledLogger: logger}), + exporter.DefaultExporterCleanQueueInterval, + &fflog.FFLogger{LeveledLogger: logger}, + ), } } - got, err := JSONArrayVariationDetails(tt.args.flagKey, tt.args.user, tt.args.defaultValue) + got, err := JSONArrayVariationDetails( + tt.args.flagKey, + tt.args.user, + tt.args.defaultValue, + ) if tt.wantErr { assert.Error(t, err, "JSONArrayVariation() error = %v, wantErr %v", err, tt.wantErr) @@ -2002,7 +2070,9 @@ func TestJSONArrayVariationDetails(t *testing.T) { } assert.Equal(t, tt.want, got, "JSONArrayVariation() got = %v, want %v", got, tt.want) if tt.expectedLog != "" { - time.Sleep(40 * time.Millisecond) // since the log is async, we are waiting to be sure it's written + time.Sleep( + 40 * time.Millisecond, + ) // since the log is async, we are waiting to be sure it's written if tt.expectedLog == "" { handler.AssertEmpty() } else { @@ -2096,7 +2166,10 @@ func TestJSONVariation(t *testing.T) { flagKey: "key-not-exist", user: ffcontext.NewEvaluationContext("random-key"), defaultValue: map[string]interface{}{"default-notkey": true}, - cacheMock: NewCacheMock(&flag.InternalFlag{}, errors.New("flag [key-not-exist] does not exists")), + cacheMock: NewCacheMock( + &flag.InternalFlag{}, + errors.New("flag [key-not-exist] does not exists"), + ), }, want: map[string]interface{}{"default-notkey": true}, wantErr: true, @@ -2260,23 +2333,30 @@ func TestJSONVariation(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ - { - FlushInterval: 0, - MaxEventInMemory: 0, - Exporter: &logsexporter.Exporter{ - LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + - "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent]( + context.Background(), + []exporter.Config{ + { + FlushInterval: 0, + MaxEventInMemory: 0, + Exporter: &logsexporter.Exporter{ + LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + + "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + }, }, }, - }, exporter.DefaultExporterCleanQueueInterval, &fflog.FFLogger{LeveledLogger: logger}), + exporter.DefaultExporterCleanQueueInterval, + &fflog.FFLogger{LeveledLogger: logger}, + ), } } got, err := JSONVariation(tt.args.flagKey, tt.args.user, tt.args.defaultValue) if tt.expectedLog != "" { - time.Sleep(40 * time.Millisecond) // since the log is async, we are waiting to be sure it's written + time.Sleep( + 40 * time.Millisecond, + ) // since the log is async, we are waiting to be sure it's written if tt.expectedLog == "" { handler.AssertEmpty() } else { @@ -2498,23 +2578,30 @@ func TestJSONVariationDetails(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ - { - FlushInterval: 0, - MaxEventInMemory: 0, - Exporter: &logsexporter.Exporter{ - LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + - "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent]( + context.Background(), + []exporter.Config{ + { + FlushInterval: 0, + MaxEventInMemory: 0, + Exporter: &logsexporter.Exporter{ + LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + + "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + }, }, }, - }, exporter.DefaultExporterCleanQueueInterval, &fflog.FFLogger{LeveledLogger: logger}), + exporter.DefaultExporterCleanQueueInterval, + &fflog.FFLogger{LeveledLogger: logger}, + ), } } got, err := JSONVariationDetails(tt.args.flagKey, tt.args.user, tt.args.defaultValue) if tt.expectedLog != "" { - time.Sleep(40 * time.Millisecond) // since the log is async, we are waiting to be sure it's written + time.Sleep( + 40 * time.Millisecond, + ) // since the log is async, we are waiting to be sure it's written if tt.expectedLog == "" { handler.AssertEmpty() } else { @@ -2615,7 +2702,10 @@ func TestStringVariation(t *testing.T) { flagKey: "key-not-exist", user: ffcontext.NewEvaluationContext("random-key"), defaultValue: "default-notkey", - cacheMock: NewCacheMock(&flag.InternalFlag{}, errors.New("flag [key-not-exist] does not exists")), + cacheMock: NewCacheMock( + &flag.InternalFlag{}, + errors.New("flag [key-not-exist] does not exists"), + ), }, want: "default-notkey", wantErr: true, @@ -2780,22 +2870,29 @@ func TestStringVariation(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ - { - FlushInterval: 0, - MaxEventInMemory: 0, - Exporter: &logsexporter.Exporter{ - LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + - "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent]( + context.Background(), + []exporter.Config{ + { + FlushInterval: 0, + MaxEventInMemory: 0, + Exporter: &logsexporter.Exporter{ + LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + + "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + }, }, }, - }, exporter.DefaultExporterCleanQueueInterval, &fflog.FFLogger{LeveledLogger: logger}), + exporter.DefaultExporterCleanQueueInterval, + &fflog.FFLogger{LeveledLogger: logger}, + ), } } got, err := StringVariation(tt.args.flagKey, tt.args.user, tt.args.defaultValue) if tt.expectedLog != "" { - time.Sleep(40 * time.Millisecond) // since the log is async, we are waiting to be sure it's written + time.Sleep( + 40 * time.Millisecond, + ) // since the log is async, we are waiting to be sure it's written if tt.expectedLog == "" { handler.AssertEmpty() } else { @@ -3017,22 +3114,29 @@ func TestStringVariationDetails(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ - { - FlushInterval: 0, - MaxEventInMemory: 0, - Exporter: &logsexporter.Exporter{ - LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + - "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent]( + context.Background(), + []exporter.Config{ + { + FlushInterval: 0, + MaxEventInMemory: 0, + Exporter: &logsexporter.Exporter{ + LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + + "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + }, }, }, - }, exporter.DefaultExporterCleanQueueInterval, &fflog.FFLogger{LeveledLogger: logger}), + exporter.DefaultExporterCleanQueueInterval, + &fflog.FFLogger{LeveledLogger: logger}, + ), } } got, err := StringVariationDetails(tt.args.flagKey, tt.args.user, tt.args.defaultValue) if tt.expectedLog != "" { - time.Sleep(40 * time.Millisecond) // since the log is async, we are waiting to be sure it's written + time.Sleep( + 40 * time.Millisecond, + ) // since the log is async, we are waiting to be sure it's written if tt.expectedLog == "" { handler.AssertEmpty() } else { @@ -3133,7 +3237,10 @@ func TestIntVariation(t *testing.T) { flagKey: "key-not-exist", user: ffcontext.NewEvaluationContext("random-key"), defaultValue: 118, - cacheMock: NewCacheMock(&flag.InternalFlag{}, errors.New("flag [key-not-exist] does not exists")), + cacheMock: NewCacheMock( + &flag.InternalFlag{}, + errors.New("flag [key-not-exist] does not exists"), + ), }, want: 118, wantErr: true, @@ -3329,22 +3436,29 @@ func TestIntVariation(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ - { - FlushInterval: 0, - MaxEventInMemory: 0, - Exporter: &logsexporter.Exporter{ - LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + - "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent]( + context.Background(), + []exporter.Config{ + { + FlushInterval: 0, + MaxEventInMemory: 0, + Exporter: &logsexporter.Exporter{ + LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + + "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + }, }, }, - }, exporter.DefaultExporterCleanQueueInterval, &fflog.FFLogger{LeveledLogger: logger}), + exporter.DefaultExporterCleanQueueInterval, + &fflog.FFLogger{LeveledLogger: logger}, + ), } } got, err := IntVariation(tt.args.flagKey, tt.args.user, tt.args.defaultValue) if tt.expectedLog != "" { - time.Sleep(40 * time.Millisecond) // since the log is async, we are waiting to be sure it's written + time.Sleep( + 40 * time.Millisecond, + ) // since the log is async, we are waiting to be sure it's written if tt.expectedLog == "" { handler.AssertEmpty() } else { @@ -3608,22 +3722,29 @@ func TestIntVariationDetails(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ - { - FlushInterval: 0, - MaxEventInMemory: 0, - Exporter: &logsexporter.Exporter{ - LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + - "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent]( + context.Background(), + []exporter.Config{ + { + FlushInterval: 0, + MaxEventInMemory: 0, + Exporter: &logsexporter.Exporter{ + LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + + "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + }, }, }, - }, exporter.DefaultExporterCleanQueueInterval, &fflog.FFLogger{LeveledLogger: logger}), + exporter.DefaultExporterCleanQueueInterval, + &fflog.FFLogger{LeveledLogger: logger}, + ), } } got, err := IntVariationDetails(tt.args.flagKey, tt.args.user, tt.args.defaultValue) if tt.expectedLog != "" { - time.Sleep(40 * time.Millisecond) // since the log is async, we are waiting to be sure it's written + time.Sleep( + 40 * time.Millisecond, + ) // since the log is async, we are waiting to be sure it's written if tt.expectedLog == "" { handler.AssertEmpty() } else { @@ -3745,7 +3866,10 @@ func TestRawVariation(t *testing.T) { flagKey: "key-not-exist", user: ffcontext.NewEvaluationContext("random-key"), defaultValue: 123456, - cacheMock: NewCacheMock(&flag.InternalFlag{}, errors.New("flag [key-not-exist] does not exists")), + cacheMock: NewCacheMock( + &flag.InternalFlag{}, + errors.New("flag [key-not-exist] does not exists"), + ), }, want: model.RawVarResult{ Value: 123456, @@ -3981,23 +4105,30 @@ func TestRawVariation(t *testing.T) { LeveledLogger: logger, Offline: tt.args.offline, }, - featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent](context.Background(), []exporter.Config{ - { - FlushInterval: 0, - MaxEventInMemory: 0, - Exporter: &logsexporter.Exporter{ - LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + - "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + featureEventDataExporter: exporter.NewManager[exporter.FeatureEvent]( + context.Background(), + []exporter.Config{ + { + FlushInterval: 0, + MaxEventInMemory: 0, + Exporter: &logsexporter.Exporter{ + LogFormat: "user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", " + + "value=\"{{ .Value}}\", variation=\"{{ .Variation}}\"", + }, }, }, - }, exporter.DefaultExporterCleanQueueInterval, &fflog.FFLogger{LeveledLogger: logger}), + exporter.DefaultExporterCleanQueueInterval, + &fflog.FFLogger{LeveledLogger: logger}, + ), } } got, err := ff.RawVariation(tt.args.flagKey, tt.args.user, tt.args.defaultValue) if tt.expectedLog != "" { - time.Sleep(40 * time.Millisecond) // since the log is async, we are waiting to be sure it's written + time.Sleep( + 40 * time.Millisecond, + ) // since the log is async, we are waiting to be sure it's written if tt.expectedLog == "" { handler.AssertEmpty() } else { @@ -4097,7 +4228,11 @@ flag1: }) require.NoError(t, err) - res, err1 := goff.BoolVariation("flag1", ffcontext.NewEvaluationContextBuilder("my-key").Build(), false) + res, err1 := goff.BoolVariation( + "flag1", + ffcontext.NewEvaluationContextBuilder("my-key").Build(), + false, + ) assert.True(t, res) assert.NoError(t, err1) allFlags := goff.AllFlagsState(ffcontext.NewEvaluationContextBuilder("my-key").Build()) @@ -4112,7 +4247,11 @@ flag1: }, }) require.NoError(t, err2) - res2, err3 := goff2.BoolVariation("flag1", ffcontext.NewEvaluationContextBuilder("my-key").Build(), false) + res2, err3 := goff2.BoolVariation( + "flag1", + ffcontext.NewEvaluationContextBuilder("my-key").Build(), + false, + ) assert.True(t, res2) assert.NoError(t, err3) allFlags2 := goff2.AllFlagsState(ffcontext.NewEvaluationContextBuilder("my-key").Build()) @@ -4128,7 +4267,11 @@ flag1: }, }) require.NoError(t, err4) - res3, err5 := goff3.BoolVariation("flag1", ffcontext.NewEvaluationContextBuilder("my-key").Build(), false) + res3, err5 := goff3.BoolVariation( + "flag1", + ffcontext.NewEvaluationContextBuilder("my-key").Build(), + false, + ) assert.True(t, res3) assert.NoError(t, err5) From dd732aae943d5cf475c05a63fcc3202aeb75fb35 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 3 Apr 2025 06:53:24 +0100 Subject: [PATCH 17/26] WIP Signed-off-by: Thomas Poignant --- cmd/relayproxy/config/config.go | 3 +++ cmd/relayproxy/config/exporter.go | 1 + cmd/relayproxy/controller/collect_eval_data_test.go | 2 +- cmd/relayproxy/service/gofeatureflag.go | 5 +++++ 4 files changed, 10 insertions(+), 1 deletion(-) diff --git a/cmd/relayproxy/config/config.go b/cmd/relayproxy/config/config.go index d7936293196..192ae7fc41a 100644 --- a/cmd/relayproxy/config/config.go +++ b/cmd/relayproxy/config/config.go @@ -18,6 +18,7 @@ import ( "github.com/knadh/koanf/providers/posflag" "github.com/knadh/koanf/v2" "github.com/spf13/pflag" + ffclient "github.com/thomaspoignant/go-feature-flag" "github.com/xitongsys/parquet-go/parquet" "go.uber.org/zap" "go.uber.org/zap/zapcore" @@ -45,6 +46,7 @@ var DefaultExporter = struct { MaxEventInMemory int64 ParquetCompressionCodec string LogLevel string + ExporterEventType ffclient.ExporterEventType }{ Format: "JSON", LogFormat: "[{{ .FormattedDate}}] user=\"{{ .UserKey}}\", flag=\"{{ .Key}}\", value=\"{{ .Value}}\"", @@ -55,6 +57,7 @@ var DefaultExporter = struct { MaxEventInMemory: 100000, ParquetCompressionCodec: parquet.CompressionCodec_SNAPPY.String(), LogLevel: DefaultLogLevel, + ExporterEventType: ffclient.FeatureEventExporter, } // New is reading the configuration file diff --git a/cmd/relayproxy/config/exporter.go b/cmd/relayproxy/config/exporter.go index 64073b86ec9..73efb539c84 100644 --- a/cmd/relayproxy/config/exporter.go +++ b/cmd/relayproxy/config/exporter.go @@ -33,6 +33,7 @@ type ExporterConf struct { AccountName string `mapstructure:"accountName" koanf:"accountname"` AccountKey string `mapstructure:"accountKey" koanf:"accountkey"` Container string `mapstructure:"container" koanf:"container"` + ExporterEventType string `mapstructure:"eventType" koanf:"eventtype"` } func (c *ExporterConf) IsValid() error { diff --git a/cmd/relayproxy/controller/collect_eval_data_test.go b/cmd/relayproxy/controller/collect_eval_data_test.go index b31a0b2ef1b..f837bb587f3 100644 --- a/cmd/relayproxy/controller/collect_eval_data_test.go +++ b/cmd/relayproxy/controller/collect_eval_data_test.go @@ -193,7 +193,7 @@ func Test_collect_tracking_and_evaluation_events(t *testing.T) { // // PLEASE REWORK THIS TEST // - // + // TODO: Do some tests that the exporterEventType is correctly set in the exporter // // // diff --git a/cmd/relayproxy/service/gofeatureflag.go b/cmd/relayproxy/service/gofeatureflag.go index ecc46fde3bb..e6c49ad5dcc 100644 --- a/cmd/relayproxy/service/gofeatureflag.go +++ b/cmd/relayproxy/service/gofeatureflag.go @@ -168,6 +168,10 @@ func initDataExporters(proxyConf *config.Config) ([]ffclient.DataExporter, error } func initDataExporter(c *config.ExporterConf) (ffclient.DataExporter, error) { + exporterEventType := c.ExporterEventType + if exporterEventType == "" { + exporterEventType = config.DefaultExporter.ExporterEventType + } dataExp := ffclient.DataExporter{ FlushInterval: func() time.Duration { if c.FlushInterval != 0 { @@ -181,6 +185,7 @@ func initDataExporter(c *config.ExporterConf) (ffclient.DataExporter, error) { } return config.DefaultExporter.MaxEventInMemory }(), + ExporterEventType: exporterEventType, } var err error From 607afd5402c96dd272bebf9260f0ed19f6c3062d Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 10 Apr 2025 11:40:02 +0200 Subject: [PATCH 18/26] update swagger Signed-off-by: Thomas Poignant --- cmd/relayproxy/docs/docs.go | 66 ++---------------------------- cmd/relayproxy/docs/swagger.json | 66 ++---------------------------- cmd/relayproxy/docs/swagger.yaml | 69 +++----------------------------- 3 files changed, 11 insertions(+), 190 deletions(-) diff --git a/cmd/relayproxy/docs/docs.go b/cmd/relayproxy/docs/docs.go index 56f57a22906..e418fddfa2e 100644 --- a/cmd/relayproxy/docs/docs.go +++ b/cmd/relayproxy/docs/docs.go @@ -623,67 +623,6 @@ const docTemplate = `{ } } }, - "exporter.FeatureEvent": { - "type": "object", - "properties": { - "contextKind": { - "description": "ContextKind is the kind of context which generated an event. This will only be \"anonymousUser\" for events generated\non behalf of an anonymous user or the reserved word \"user\" for events generated on behalf of a non-anonymous user", - "type": "string", - "example": "user" - }, - "creationDate": { - "description": "CreationDate When the feature flag was requested at Unix epoch time in milliseconds.", - "type": "integer", - "example": 1680246000011 - }, - "default": { - "description": "Default value is set to true if feature flag evaluation failed, in which case the value returned was the default\nvalue passed to variation. If the default field is omitted, it is assumed to be false.", - "type": "boolean", - "example": false - }, - "key": { - "description": "Key of the feature flag requested.", - "type": "string", - "example": "my-feature-flag" - }, - "kind": { - "description": "Kind for a feature event is feature.\nA feature event will only be generated if the trackEvents attribute of the flag is set to true.", - "type": "string", - "example": "feature" - }, - "metadata": { - "description": "Metadata are static information added in the providers to give context about the events generated.", - "allOf": [ - { - "$ref": "#/definitions/exporter.FeatureEventMetadata" - } - ] - }, - "source": { - "description": "Source indicates where the event was generated.\nThis is set to SERVER when the event was evaluated in the relay-proxy and PROVIDER_CACHE when it is evaluated from the cache.", - "type": "string", - "example": "SERVER" - }, - "userKey": { - "description": "UserKey The key of the user object used in a feature flag evaluation. Details for the user object used in a feature\nflag evaluation as reported by the \"feature\" event are transmitted periodically with a separate index event.", - "type": "string", - "example": "94a25909-20d8-40cc-8500-fee99b569345" - }, - "value": { - "description": "Value of the feature flag returned by feature flag evaluation." - }, - "variation": { - "description": "Variation of the flag requested. Flag variation values can be \"True\", \"False\", \"Default\" or \"SdkDefault\"\ndepending on which value was taken during flag evaluation. \"SdkDefault\" is used when an error is detected and the\ndefault value passed during the call to your variation is used.", - "type": "string", - "example": "admin-variation" - }, - "version": { - "description": "Version contains the version of the flag. If the field is omitted for the flag in the configuration file\nthe default version will be 0.", - "type": "string", - "example": "v1.0.0" - } - } - }, "exporter.FeatureEventMetadata": { "type": "object", "additionalProperties": true @@ -748,10 +687,11 @@ const docTemplate = `{ "type": "object", "properties": { "events": { - "description": "Events is the list of the event we send in the payload", + "description": "Events is the list of the event we send in the payload\nhere the type is any because we will unmarshal later in the different event types", "type": "array", "items": { - "$ref": "#/definitions/exporter.FeatureEvent" + "type": "object", + "additionalProperties": {} } }, "meta": { diff --git a/cmd/relayproxy/docs/swagger.json b/cmd/relayproxy/docs/swagger.json index 94ca6026b32..80c7f4310c2 100644 --- a/cmd/relayproxy/docs/swagger.json +++ b/cmd/relayproxy/docs/swagger.json @@ -615,67 +615,6 @@ } } }, - "exporter.FeatureEvent": { - "type": "object", - "properties": { - "contextKind": { - "description": "ContextKind is the kind of context which generated an event. This will only be \"anonymousUser\" for events generated\non behalf of an anonymous user or the reserved word \"user\" for events generated on behalf of a non-anonymous user", - "type": "string", - "example": "user" - }, - "creationDate": { - "description": "CreationDate When the feature flag was requested at Unix epoch time in milliseconds.", - "type": "integer", - "example": 1680246000011 - }, - "default": { - "description": "Default value is set to true if feature flag evaluation failed, in which case the value returned was the default\nvalue passed to variation. If the default field is omitted, it is assumed to be false.", - "type": "boolean", - "example": false - }, - "key": { - "description": "Key of the feature flag requested.", - "type": "string", - "example": "my-feature-flag" - }, - "kind": { - "description": "Kind for a feature event is feature.\nA feature event will only be generated if the trackEvents attribute of the flag is set to true.", - "type": "string", - "example": "feature" - }, - "metadata": { - "description": "Metadata are static information added in the providers to give context about the events generated.", - "allOf": [ - { - "$ref": "#/definitions/exporter.FeatureEventMetadata" - } - ] - }, - "source": { - "description": "Source indicates where the event was generated.\nThis is set to SERVER when the event was evaluated in the relay-proxy and PROVIDER_CACHE when it is evaluated from the cache.", - "type": "string", - "example": "SERVER" - }, - "userKey": { - "description": "UserKey The key of the user object used in a feature flag evaluation. Details for the user object used in a feature\nflag evaluation as reported by the \"feature\" event are transmitted periodically with a separate index event.", - "type": "string", - "example": "94a25909-20d8-40cc-8500-fee99b569345" - }, - "value": { - "description": "Value of the feature flag returned by feature flag evaluation." - }, - "variation": { - "description": "Variation of the flag requested. Flag variation values can be \"True\", \"False\", \"Default\" or \"SdkDefault\"\ndepending on which value was taken during flag evaluation. \"SdkDefault\" is used when an error is detected and the\ndefault value passed during the call to your variation is used.", - "type": "string", - "example": "admin-variation" - }, - "version": { - "description": "Version contains the version of the flag. If the field is omitted for the flag in the configuration file\nthe default version will be 0.", - "type": "string", - "example": "v1.0.0" - } - } - }, "exporter.FeatureEventMetadata": { "type": "object", "additionalProperties": true @@ -740,10 +679,11 @@ "type": "object", "properties": { "events": { - "description": "Events is the list of the event we send in the payload", + "description": "Events is the list of the event we send in the payload\nhere the type is any because we will unmarshal later in the different event types", "type": "array", "items": { - "$ref": "#/definitions/exporter.FeatureEvent" + "type": "object", + "additionalProperties": {} } }, "meta": { diff --git a/cmd/relayproxy/docs/swagger.yaml b/cmd/relayproxy/docs/swagger.yaml index 174385bedbc..aec9c758747 100644 --- a/cmd/relayproxy/docs/swagger.yaml +++ b/cmd/relayproxy/docs/swagger.yaml @@ -14,68 +14,6 @@ definitions: refreshed: type: boolean type: object - exporter.FeatureEvent: - properties: - contextKind: - description: |- - ContextKind is the kind of context which generated an event. This will only be "anonymousUser" for events generated - on behalf of an anonymous user or the reserved word "user" for events generated on behalf of a non-anonymous user - example: user - type: string - creationDate: - description: CreationDate When the feature flag was requested at Unix epoch - time in milliseconds. - example: 1680246000011 - type: integer - default: - description: |- - Default value is set to true if feature flag evaluation failed, in which case the value returned was the default - value passed to variation. If the default field is omitted, it is assumed to be false. - example: false - type: boolean - key: - description: Key of the feature flag requested. - example: my-feature-flag - type: string - kind: - description: |- - Kind for a feature event is feature. - A feature event will only be generated if the trackEvents attribute of the flag is set to true. - example: feature - type: string - metadata: - allOf: - - $ref: '#/definitions/exporter.FeatureEventMetadata' - description: Metadata are static information added in the providers to give - context about the events generated. - source: - description: |- - Source indicates where the event was generated. - This is set to SERVER when the event was evaluated in the relay-proxy and PROVIDER_CACHE when it is evaluated from the cache. - example: SERVER - type: string - userKey: - description: |- - UserKey The key of the user object used in a feature flag evaluation. Details for the user object used in a feature - flag evaluation as reported by the "feature" event are transmitted periodically with a separate index event. - example: 94a25909-20d8-40cc-8500-fee99b569345 - type: string - value: - description: Value of the feature flag returned by feature flag evaluation. - variation: - description: |- - Variation of the flag requested. Flag variation values can be "True", "False", "Default" or "SdkDefault" - depending on which value was taken during flag evaluation. "SdkDefault" is used when an error is detected and the - default value passed during the call to your variation is used. - example: admin-variation - type: string - version: - description: |- - Version contains the version of the flag. If the field is omitted for the flag in the configuration file - the default version will be 0. - example: v1.0.0 - type: string - type: object exporter.FeatureEventMetadata: additionalProperties: true type: object @@ -124,9 +62,12 @@ definitions: model.CollectEvalDataRequest: properties: events: - description: Events is the list of the event we send in the payload + description: |- + Events is the list of the event we send in the payload + here the type is any because we will unmarshal later in the different event types items: - $ref: '#/definitions/exporter.FeatureEvent' + additionalProperties: {} + type: object type: array meta: allOf: From 8f32d227e460410f6ef376d0f6b38638ba487ec0 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 10 Apr 2025 11:54:28 +0200 Subject: [PATCH 19/26] add test Signed-off-by: Thomas Poignant --- cmd/relayproxy/controller/collect_eval_data_test.go | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/cmd/relayproxy/controller/collect_eval_data_test.go b/cmd/relayproxy/controller/collect_eval_data_test.go index f837bb587f3..f631920d3cb 100644 --- a/cmd/relayproxy/controller/collect_eval_data_test.go +++ b/cmd/relayproxy/controller/collect_eval_data_test.go @@ -2,7 +2,6 @@ package controller_test import ( "context" - "fmt" "io" "log/slog" "net/http" @@ -207,7 +206,6 @@ func Test_collect_tracking_and_evaluation_events(t *testing.T) { _ = os.Remove(trackingExporter.Name()) }() - // init go-feature-flag goFF, _ := ffclient.New(ffclient.Config{ PollingInterval: 10 * time.Second, LeveledLogger: slog.Default(), @@ -247,14 +245,12 @@ func Test_collect_tracking_and_evaluation_events(t *testing.T) { handlerErr := ctrl.Handler(c) assert.NoError(t, handlerErr) goFF.Close() - - fmt.Println("Evaluation events:") evalEvents, err := os.ReadFile(evalExporter.Name()) assert.NoError(t, err) - fmt.Println(string(evalEvents)) - - fmt.Println("Tracking events:") + want := "{\"kind\":\"feature\",\"contextKind\":\"user\",\"userKey\":\"94a25909-20d8-40cc-8500-fee99b569345\",\"creationDate\":1680246000,\"key\":\"my-feature-flag\",\"variation\":\"admin-variation\",\"value\":\"string\",\"default\":false,\"version\":\"v1.0.0\",\"source\":\"PROVIDER_CACHE\",\"metadata\":{\"environment\":\"production\",\"sdkVersion\":\"v1.0.0\",\"source\":\"my-source\",\"timestamp\":1680246000}}\n" + assert.JSONEq(t, want, string(evalEvents), "Invalid exported data") + wantTracking := "{\"kind\":\"tracking\",\"contextKind\":\"user\",\"userKey\":\"94a25909-20d8-40cc-8500-fee99b569345\",\"creationDate\":1680246020,\"key\":\"my-feature-flag\",\"evaluationContext\":{\"admin\":true,\"name\":\"john doe\",\"targetingKey\":\"94a25909-20d8-40cc-8500-fee99b569345\"},\"trackingEventDetails\":{\"value\":\"string\",\"version\":\"v1.0.0\"}}\n" trackingEvents, err := os.ReadFile(trackingExporter.Name()) assert.NoError(t, err) - fmt.Println(string(trackingEvents)) + assert.JSONEq(t, wantTracking, string(trackingEvents), "Invalid exported data") } From 0e64e8499aa14faec7ff407540a0d6ed871237ec Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 10 Apr 2025 14:04:03 +0200 Subject: [PATCH 20/26] fix tests Signed-off-by: Thomas Poignant --- cmd/relayproxy/controller/collect_eval_data_test.go | 11 ----------- cmd/relayproxy/service/gofeatureflag_test.go | 7 +++++++ 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/cmd/relayproxy/controller/collect_eval_data_test.go b/cmd/relayproxy/controller/collect_eval_data_test.go index f631920d3cb..0fe51e1fcc2 100644 --- a/cmd/relayproxy/controller/collect_eval_data_test.go +++ b/cmd/relayproxy/controller/collect_eval_data_test.go @@ -186,17 +186,6 @@ func Test_collect_eval_data_Handler(t *testing.T) { } func Test_collect_tracking_and_evaluation_events(t *testing.T) { - // - // - // - // - // PLEASE REWORK THIS TEST - // - // TODO: Do some tests that the exporterEventType is correctly set in the exporter - // - // - // - evalExporter, err := os.CreateTemp("", "evalExport.json") assert.NoError(t, err) trackingExporter, err := os.CreateTemp("", "trackExport.json") diff --git a/cmd/relayproxy/service/gofeatureflag_test.go b/cmd/relayproxy/service/gofeatureflag_test.go index 4209ef4f3d5..4ce40f7a3f4 100644 --- a/cmd/relayproxy/service/gofeatureflag_test.go +++ b/cmd/relayproxy/service/gofeatureflag_test.go @@ -477,6 +477,7 @@ func Test_initExporter(t *testing.T) { Secret: "1234", Meta: nil, }, + ExporterEventType: ffclient.FeatureEventExporter, }, wantType: &webhookexporter.Exporter{}, }, @@ -498,6 +499,7 @@ func Test_initExporter(t *testing.T) { CsvTemplate: config.DefaultExporter.CsvFormat, ParquetCompressionCodec: parquet.CompressionCodec_UNCOMPRESSED.String(), }, + ExporterEventType: ffclient.FeatureEventExporter, }, wantType: &fileexporter.Exporter{}, }, @@ -513,6 +515,7 @@ func Test_initExporter(t *testing.T) { Exporter: &logsexporter.Exporter{ LogFormat: config.DefaultExporter.LogFormat, }, + ExporterEventType: ffclient.FeatureEventExporter, }, wantType: &logsexporter.Exporter{}, }, @@ -595,6 +598,7 @@ func Test_initExporter(t *testing.T) { CsvTemplate: config.DefaultExporter.CsvFormat, ParquetCompressionCodec: config.DefaultExporter.ParquetCompressionCodec, }, + ExporterEventType: ffclient.FeatureEventExporter, }, wantType: &gcstorageexporter.Exporter{}, }, @@ -608,6 +612,7 @@ func Test_initExporter(t *testing.T) { Topic: "example-topic", Addresses: []string{"addr1", "addr2"}, }, + ExporterEventType: ffclient.FeatureEventExporter, }, want: ffclient.DataExporter{ FlushInterval: config.DefaultExporter.FlushInterval, @@ -619,6 +624,7 @@ func Test_initExporter(t *testing.T) { Addresses: []string{"addr1", "addr2"}, }, }, + ExporterEventType: ffclient.FeatureEventExporter, }, wantType: &kafkaexporter.Exporter{}, }, @@ -662,6 +668,7 @@ func Test_initExporter(t *testing.T) { CsvTemplate: config.DefaultExporter.CsvFormat, ParquetCompressionCodec: config.DefaultExporter.ParquetCompressionCodec, }, + ExporterEventType: ffclient.FeatureEventExporter, }, wantType: &azureexporter.Exporter{}, }, From d10e309fb1d55b294166d76ca03616e8af80fd80 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 10 Apr 2025 14:40:41 +0200 Subject: [PATCH 21/26] adding test case Signed-off-by: Thomas Poignant --- feature_flag_test.go | 19 ++++++++++++++----- tracking_test.go | 2 ++ 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/feature_flag_test.go b/feature_flag_test.go index 23968d1fbe5..368db880bbc 100644 --- a/feature_flag_test.go +++ b/feature_flag_test.go @@ -85,16 +85,23 @@ func TestStartWithMinInterval(t *testing.T) { } func TestValidUseCase(t *testing.T) { + cliExport := mock.Exporter{Bulk: false} // Valid use case err := ffclient.Init(ffclient.Config{ PollingInterval: 5 * time.Second, Retriever: &fileretriever.Retriever{Path: "testdata/flag-config.yaml"}, LeveledLogger: slog.Default(), - DataExporter: ffclient.DataExporter{ - FlushInterval: 10 * time.Second, - MaxEventInMemory: 1000, - Exporter: &mock.Exporter{ - Bulk: true, + DataExporters: []ffclient.DataExporter{ + { + FlushInterval: 10 * time.Second, + MaxEventInMemory: 1000, + Exporter: &mock.Exporter{ + Bulk: true, + }, + }, + { + Exporter: &cliExport, + ExporterEventType: ffclient.TrackingEventExporter, }, }, }) @@ -117,6 +124,8 @@ func TestValidUseCase(t *testing.T) { ffclient.SetOffline(false) assert.False(t, ffclient.IsOffline()) assert.True(t, ffclient.ForceRefresh()) + ffclient.Track("toto", user, map[string]interface{}{"key": "value"}) + assert.Equal(t, 1, len(cliExport.ExportedEvents)) } func TestValidUseCaseToml(t *testing.T) { diff --git a/tracking_test.go b/tracking_test.go index 5d7974b4d05..486753a15ee 100644 --- a/tracking_test.go +++ b/tracking_test.go @@ -31,6 +31,7 @@ func TestValidTrackingEvent(t *testing.T) { "my-feature-flag", ffcontext.NewEvaluationContextBuilder("1668d845-051d-4dd9-907a-7ebe6aa2c9da"). AddCustom("admin", true). + AddCustom("anonymous", true). Build(), map[string]interface{}{"additional data": "value"}, ) @@ -43,6 +44,7 @@ func TestValidTrackingEvent(t *testing.T) { map[string]interface{}{ "targetingKey": "1668d845-051d-4dd9-907a-7ebe6aa2c9da", "admin": true, + "anonymous": true, }, exp.ExportedEvents[0].EvaluationContext, ) From e36141825f0e3ced74a2dddaf733a724d480ea90 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 10 Apr 2025 14:47:37 +0200 Subject: [PATCH 22/26] adding test for marshaling in JSON Signed-off-by: Thomas Poignant --- ffcontext/context_test.go | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/ffcontext/context_test.go b/ffcontext/context_test.go index feea3b5a4b1..3d5dc040f2d 100644 --- a/ffcontext/context_test.go +++ b/ffcontext/context_test.go @@ -195,3 +195,33 @@ func Test_ExtractGOFFProtectedFields(t *testing.T) { }) } } + +func TestEvaluationContext_MarshalJSON(t *testing.T) { + tests := []struct { + name string + context ffcontext.EvaluationContext + expected string + }{ + { + name: "marshal with empty attributes", + context: ffcontext.NewEvaluationContext("test-key"), + expected: `{"targetingKey":"test-key","attributes":{}}`, + }, + { + name: "marshal with attributes", + context: ffcontext.NewEvaluationContextBuilder("test-key"). + AddCustom("attr1", "value1"). + AddCustom("attr2", 123). + Build(), + expected: `{"targetingKey":"test-key","attributes":{"attr1":"value1","attr2":123}}`, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + data, err := tt.context.MarshalJSON() + assert.NoError(t, err) + assert.JSONEq(t, tt.expected, string(data)) + }) + } +} From cd15298e4072a7617b2231157778175d4d881a09 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 10 Apr 2025 14:55:19 +0200 Subject: [PATCH 23/26] adding tests Signed-off-by: Thomas Poignant --- ffcontext/context_test.go | 59 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/ffcontext/context_test.go b/ffcontext/context_test.go index 3d5dc040f2d..cb982496039 100644 --- a/ffcontext/context_test.go +++ b/ffcontext/context_test.go @@ -225,3 +225,62 @@ func TestEvaluationContext_MarshalJSON(t *testing.T) { }) } } + +func TestEvaluationContext_ToMap(t *testing.T) { + tests := []struct { + name string + context ffcontext.EvaluationContext + expected map[string]interface{} + }{ + { + name: "empty attributes", + context: ffcontext.NewEvaluationContext("test-key"), + expected: map[string]interface{}{"targetingKey": "test-key"}, + }, + { + name: "attributes with values", + context: ffcontext.NewEvaluationContextBuilder("test-key"). + AddCustom("attr1", "value1"). + AddCustom("attr2", 123). + Build(), + expected: map[string]interface{}{ + "targetingKey": "test-key", + "attr1": "value1", + "attr2": 123, + }, + }, + { + name: "attributes with nested map", + context: ffcontext.NewEvaluationContextBuilder("test-key"). + AddCustom("nested", map[string]interface{}{ + "key1": "value1", + "key2": 42, + }). + Build(), + expected: map[string]interface{}{ + "targetingKey": "test-key", + "nested": map[string]interface{}{ + "key1": "value1", + "key2": 42, + }, + }, + }, + { + name: "attributes with nil value", + context: ffcontext.NewEvaluationContextBuilder("test-key"). + AddCustom("attr1", nil). + Build(), + expected: map[string]interface{}{ + "targetingKey": "test-key", + "attr1": nil, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := tt.context.ToMap() + assert.Equal(t, tt.expected, got) + }) + } +} From ed3b7f451cf127e68d1d42511d7ce7f15eedb449 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 10 Apr 2025 17:59:39 +0200 Subject: [PATCH 24/26] adding test for parquet Signed-off-by: Thomas Poignant --- exporter/fileexporter/exporter.go | 2 +- exporter/fileexporter/exporter_test.go | 129 ++++++++++++++++++------- exporter/tracking_event.go | 2 +- 3 files changed, 97 insertions(+), 36 deletions(-) diff --git a/exporter/fileexporter/exporter.go b/exporter/fileexporter/exporter.go index e07bc362440..f33605c6295 100644 --- a/exporter/fileexporter/exporter.go +++ b/exporter/fileexporter/exporter.go @@ -203,7 +203,7 @@ func (f *Exporter) writeParquetTrackingEvent( } defer func() { _ = fw.Close() }() - pw, err := writer.NewParquetWriter(fw, new(exporter.FeatureEvent), int64(runtime.NumCPU())) + pw, err := writer.NewParquetWriter(fw, new(exporter.TrackingEvent), int64(runtime.NumCPU())) if err != nil { return err } diff --git a/exporter/fileexporter/exporter_test.go b/exporter/fileexporter/exporter_test.go index 549b781f78a..68ea6bd6683 100644 --- a/exporter/fileexporter/exporter_test.go +++ b/exporter/fileexporter/exporter_test.go @@ -12,6 +12,7 @@ import ( "github.com/stretchr/testify/require" "github.com/thomaspoignant/go-feature-flag/exporter" "github.com/thomaspoignant/go-feature-flag/exporter/fileexporter" + "github.com/thomaspoignant/go-feature-flag/ffcontext" "github.com/thomaspoignant/go-feature-flag/utils/fflog" "github.com/xitongsys/parquet-go-source/local" "github.com/xitongsys/parquet-go/parquet" @@ -33,15 +34,17 @@ func TestFile_Export(t *testing.T) { CsvTemplate string OutputDir string ParquetCompressionCodec string + EventType string } type args struct { - logger *fflog.FFLogger - featureEvents []exporter.ExportableEvent + logger *fflog.FFLogger + events []exporter.ExportableEvent } type expected struct { - fileNameRegex string - content string - featureEvents []exporter.FeatureEvent + fileNameRegex string + content string + featureEvents []exporter.FeatureEvent + trackingEvents []exporter.TrackingEvent } tests := []struct { name string @@ -57,7 +60,7 @@ func TestFile_Export(t *testing.T) { wantErr: false, fields: fields{}, args: args{ - featureEvents: []exporter.ExportableEvent{ + events: []exporter.ExportableEvent{ exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", @@ -80,7 +83,7 @@ func TestFile_Export(t *testing.T) { Format: "csv", }, args: args{ - featureEvents: []exporter.ExportableEvent{ + events: []exporter.ExportableEvent{ exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", @@ -104,7 +107,7 @@ func TestFile_Export(t *testing.T) { ParquetCompressionCodec: parquet.CompressionCodec_SNAPPY.String(), }, args: args{ - featureEvents: []exporter.ExportableEvent{ + events: []exporter.ExportableEvent{ exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", Metadata: map[string]interface{}{"test": "test"}, @@ -129,6 +132,44 @@ func TestFile_Export(t *testing.T) { }, }, }, + { + name: "all default parquet tracking events", + wantErr: false, + fields: fields{ + Format: "parquet", + ParquetCompressionCodec: parquet.CompressionCodec_SNAPPY.String(), + Filename: "tracking-{{ .Hostname}}-{{ .Timestamp}}.parquet", + EventType: "tracking", + }, + args: args{ + + events: []exporter.ExportableEvent{ + exporter.TrackingEvent{ + Kind: "feature", + ContextKind: "anonymous", + UserKey: "xxx", + CreationDate: 1617970547, + Key: "what-ever-you-want", + EvaluationContext: ffcontext.NewEvaluationContext("xxx-xxx-xxx").ToMap(), + TrackingDetails: map[string]interface{}{"foo": "bar"}, + }, + }, + }, + expected: expected{ + fileNameRegex: "^tracking-" + hostname + "-[0-9]*\\.parquet$", + trackingEvents: []exporter.TrackingEvent{ + { + Kind: "feature", + ContextKind: "anonymous", + UserKey: "xxx", + CreationDate: 1617970547, + Key: "what-ever-you-want", + EvaluationContext: ffcontext.NewEvaluationContext("xxx-xxx-xxx").ToMap(), + TrackingDetails: map[string]interface{}{"foo": "bar"}, + }, + }, + }, + }, { name: "custom CSV format", wantErr: false, @@ -137,7 +178,7 @@ func TestFile_Export(t *testing.T) { CsvTemplate: "{{ .Kind}};{{ .ContextKind}}\n", }, args: args{ - featureEvents: []exporter.ExportableEvent{ + events: []exporter.ExportableEvent{ exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", @@ -161,7 +202,7 @@ func TestFile_Export(t *testing.T) { ParquetCompressionCodec: parquet.CompressionCodec_UNCOMPRESSED.String(), }, args: args{ - featureEvents: []exporter.ExportableEvent{ + events: []exporter.ExportableEvent{ exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", @@ -207,7 +248,7 @@ func TestFile_Export(t *testing.T) { Filename: "{{ .Format}}-test-{{ .Timestamp}}", }, args: args{ - featureEvents: []exporter.ExportableEvent{ + events: []exporter.ExportableEvent{ exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", @@ -230,7 +271,7 @@ func TestFile_Export(t *testing.T) { Format: "xxx", }, args: args{ - featureEvents: []exporter.ExportableEvent{ + events: []exporter.ExportableEvent{ exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", @@ -253,7 +294,7 @@ func TestFile_Export(t *testing.T) { OutputDir: filepath.Join(tempDir, "non-existent-dir"), }, args: args{ - featureEvents: []exporter.ExportableEvent{ + events: []exporter.ExportableEvent{ exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", @@ -276,7 +317,7 @@ func TestFile_Export(t *testing.T) { Filename: "{{ .InvalidField}}", }, args: args{ - featureEvents: []exporter.ExportableEvent{ + events: []exporter.ExportableEvent{ exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", @@ -296,7 +337,7 @@ func TestFile_Export(t *testing.T) { CsvTemplate: "{{ .Foo}}", }, args: args{ - featureEvents: []exporter.ExportableEvent{ + events: []exporter.ExportableEvent{ exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", @@ -316,7 +357,7 @@ func TestFile_Export(t *testing.T) { OutputDir: filepath.Join(tempDir, "invalid-permissions-dir"), }, args: args{ - featureEvents: []exporter.ExportableEvent{ + events: []exporter.ExportableEvent{ exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", @@ -342,7 +383,7 @@ func TestFile_Export(t *testing.T) { OutputDir: filepath.Join(tempDir, "invalid-parent-dir"), }, args: args{ - featureEvents: []exporter.ExportableEvent{ + events: []exporter.ExportableEvent{ exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", @@ -368,7 +409,7 @@ func TestFile_Export(t *testing.T) { OutputDir: filepath.Join(tempDir, "dir-with-trailing-slash") + "/", }, args: args{ - featureEvents: []exporter.ExportableEvent{ + events: []exporter.ExportableEvent{ exporter.FeatureEvent{ Kind: "feature", ContextKind: "anonymousUser", UserKey: "ABCD", CreationDate: 1617970547, Key: "random-key", Variation: "Default", Value: "YO", Default: false, Source: "SERVER", @@ -408,7 +449,7 @@ func TestFile_Export(t *testing.T) { CsvTemplate: tt.fields.CsvTemplate, ParquetCompressionCodec: tt.fields.ParquetCompressionCodec, } - err := f.Export(context.Background(), tt.args.logger, tt.args.featureEvents) + err := f.Export(context.Background(), tt.args.logger, tt.args.events) if tt.wantErr { assert.Error(t, err, "export method should error") return @@ -425,21 +466,41 @@ func TestFile_Export(t *testing.T) { assert.Regexp(t, tt.expected.fileNameRegex, files[0].Name(), "Invalid file name") if tt.fields.Format == "parquet" { - fr, err := local.NewLocalFileReader(outputDir + "/" + files[0].Name()) - assert.NoError(t, err) - defer fr.Close() - pr, err := reader.NewParquetReader( - fr, - new(exporter.FeatureEvent), - int64(runtime.NumCPU()), - ) - assert.NoError(t, err) - defer pr.ReadStop() - gotFeatureEvents := make([]exporter.FeatureEvent, pr.GetNumRows()) - err = pr.Read(&gotFeatureEvents) - assert.NoError(t, err) - assert.ElementsMatch(t, tt.expected.featureEvents, gotFeatureEvents) - return + switch tt.fields.EventType { + case "tracking": + fr, err := local.NewLocalFileReader(outputDir + "/" + files[0].Name()) + assert.NoError(t, err) + defer fr.Close() + pr, err := reader.NewParquetReader( + fr, + new(exporter.TrackingEvent), + int64(runtime.NumCPU()), + ) + assert.NoError(t, err) + defer pr.ReadStop() + gotFeatureEvents := make([]exporter.TrackingEvent, pr.GetNumRows()) + err = pr.Read(&gotFeatureEvents) + assert.NoError(t, err) + assert.ElementsMatch(t, tt.expected.trackingEvents, gotFeatureEvents) + return + default: + fr, err := local.NewLocalFileReader(outputDir + "/" + files[0].Name()) + assert.NoError(t, err) + defer fr.Close() + pr, err := reader.NewParquetReader( + fr, + new(exporter.FeatureEvent), + int64(runtime.NumCPU()), + ) + assert.NoError(t, err) + defer pr.ReadStop() + gotFeatureEvents := make([]exporter.FeatureEvent, pr.GetNumRows()) + err = pr.Read(&gotFeatureEvents) + assert.NoError(t, err) + assert.ElementsMatch(t, tt.expected.featureEvents, gotFeatureEvents) + return + } + } expectedContent, _ := os.ReadFile(tt.expected.content) diff --git a/exporter/tracking_event.go b/exporter/tracking_event.go index 02069d50a51..261b6df9f17 100644 --- a/exporter/tracking_event.go +++ b/exporter/tracking_event.go @@ -34,7 +34,7 @@ type TrackingEvent struct { EvaluationContext map[string]any `json:"evaluationContext" parquet:"name=evaluationContext, type=MAP, keytype=BYTE_ARRAY, keyconvertedtype=UTF8, valuetype=BYTE_ARRAY, valueconvertedtype=UTF8"` // TrackingDetails contains the details of the tracking event - TrackingDetails TrackingEventDetails `json:"trackingEventDetails" parquet:"name=evaluationContext, type=MAP, keytype=BYTE_ARRAY, keyconvertedtype=UTF8, valuetype=BYTE_ARRAY, valueconvertedtype=UTF8"` + TrackingDetails TrackingEventDetails `json:"trackingEventDetails" parquet:"name=trackingEventDetails, type=MAP, keytype=BYTE_ARRAY, keyconvertedtype=UTF8, valuetype=BYTE_ARRAY, valueconvertedtype=UTF8"` } func (f TrackingEvent) GetKey() string { From 65982ed40b2f697ed94a61b6b58db4397d6b1f8d Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 10 Apr 2025 18:50:15 +0200 Subject: [PATCH 25/26] add comment + change error Signed-off-by: Thomas Poignant --- exporter/fileexporter/exporter.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/exporter/fileexporter/exporter.go b/exporter/fileexporter/exporter.go index f33605c6295..cf7f59e98f9 100644 --- a/exporter/fileexporter/exporter.go +++ b/exporter/fileexporter/exporter.go @@ -162,6 +162,7 @@ func (f *Exporter) writeParquet(filePath string, events []exporter.ExportableEve return f.writeParquetFeatureEvent(filePath, parquetFeatureEvents) } +// writeParquetFeatureEvent writes the feature events in a parquet file func (f *Exporter) writeParquetFeatureEvent(filePath string, events []exporter.FeatureEvent) error { fw, err := local.NewLocalFileWriter(filePath) if err != nil { @@ -186,13 +187,14 @@ func (f *Exporter) writeParquetFeatureEvent(filePath string, events []exporter.F } event.Value = eventValue if err = pw.Write(event); err != nil { - return fmt.Errorf("error while writing the export file: %v", err) + return fmt.Errorf("error while writing the parquet export file: %v", err) } } return pw.WriteStop() } +// writeParquetTrackingEvent writes the tracking events in a parquet file func (f *Exporter) writeParquetTrackingEvent( filePath string, events []exporter.TrackingEvent, @@ -215,7 +217,7 @@ func (f *Exporter) writeParquetTrackingEvent( for _, event := range events { if err = pw.Write(event); err != nil { - return fmt.Errorf("error while writing the export file: %v", err) + return fmt.Errorf("error while writing the parquet export file: %v", err) } } From 01a8bc01fe19f2282c72ae0bf98de449daf34142 Mon Sep 17 00:00:00 2001 From: Thomas Poignant Date: Thu, 10 Apr 2025 19:22:37 +0200 Subject: [PATCH 26/26] fix lint Signed-off-by: Thomas Poignant --- exporter/fileexporter/exporter_test.go | 1 - 1 file changed, 1 deletion(-) diff --git a/exporter/fileexporter/exporter_test.go b/exporter/fileexporter/exporter_test.go index 68ea6bd6683..af87c6ce399 100644 --- a/exporter/fileexporter/exporter_test.go +++ b/exporter/fileexporter/exporter_test.go @@ -500,7 +500,6 @@ func TestFile_Export(t *testing.T) { assert.ElementsMatch(t, tt.expected.featureEvents, gotFeatureEvents) return } - } expectedContent, _ := os.ReadFile(tt.expected.content)