OpenTelemetry Bot d680729c09 [chore] Prepare release 0.90.0 (#29543) 1 gadu atpakaļ
..
internal a5323a1d75 [pdatatest] allow partial matching of resource and metric attributes via regular expression (#28654) 1 gadu atpakaļ
plogtest cd92432fd2 [pkg/golden] internal/coreinternal/golden -> pkg/golden (#28636) 1 gadu atpakaļ
pmetrictest cd92432fd2 [pkg/golden] internal/coreinternal/golden -> pkg/golden (#28636) 1 gadu atpakaļ
ptracetest cd92432fd2 [pkg/golden] internal/coreinternal/golden -> pkg/golden (#28636) 1 gadu atpakaļ
Makefile 95625e0303 [chore] Make comparetest and pdatautil packages exported under pkg (#17873) 1 gadu atpakaļ
README.md c87856083e [pkg/pdatatest] Ignore span timestamps (#27798) 1 gadu atpakaļ
go.mod d680729c09 [chore] Prepare release 0.90.0 (#29543) 1 gadu atpakaļ
go.sum 40b485f08a Update core for v0.90.0 release (#29539) 1 gadu atpakaļ
metadata.yaml 8a4348cb00 [chore] add codeowners to metadata (#24404) 1 gadu atpakaļ

README.md

pdatatest

This module provides a test helpers for comparing metric, log and traces. The main functions are:

  • pmetrictest.CompareMetrics
  • plogtest.CompareLogs
  • ptracetest.CompareTraces

These functions compare the actual result with the expected result and return an error if they are not equal. The error contains a detailed description of the differences. The module also provides several options to customize the comparison by ignoring certain fields, attributes, or slices order. The module also provides helper functions for comparing other embedded pdata types such as pmetric.ResourceMetrics, pmetric.ScopeMetrics, plog.LogRecord, ptrace.Span, etc.

Typical Usage

func TestMetricsScraper(t *testing.T) {
	scraper := newScraper(componenttest.NewNopReceiverCreateSettings(), createDefaultConfig().(*Config))
	require.NoError(t, scraper.start(context.Background(), componenttest.NewNopHost()))
	actualMetrics, err := require.NoError(t, scraper.scrape(context.Background()))
	require.NoError(t, err)

	expectedFile, err := readMetrics(filepath.Join("testdata", "expected.json"))
	require.NoError(err)

	require.NoError(t, pmetrictest.CompareMetrics(expectedMetrics, actualMetrics, pmetrictest.IgnoreStartTimestamp(), 
		pmetrictest.IgnoreTimestamp()))
}
func TestLogsReceiver(t *testing.T) {
	sink := &consumertest.LogsSink{}
	rcvr := newLogsReceiver(createDefaultConfig().(*Config), zap.NewNop(), sink)
	rcvr.client = defaultMockClient()
	require.NoError(t,  rcvr.Start(context.Background(), componenttest.NewNopHost()))
	require.Eventually(t, func() bool {
		return sink.LogRecordCount() > 0
	}, 2*time.Second, 10*time.Millisecond)
	err = rcvr.Shutdown(context.Background())
	require.NoError(t, err)
	actualLogs := sink.AllLogs()[0]

	expectedLogs, err := readLogs(filepath.Join("testdata", "logs", "expected.json"))
	require.NoError(t, err)

	require.NoError(t, pmetrictest.CompareLogs(expectedLogs, actualLogs))
}
func TestTraceProcessor(t *testing.T) {
	nextTrace := new(consumertest.TracesSink)
	tp, err := newTracesProcessor(NewFactory().CreateDefaultConfig(), nextTrace)
	traces := generateTraces()
	tp.ConsumeTraces(ctx, traces)
	actualTraces := nextTrace.AllTraces()[0]

	expectedTraces, err := readTraces(filepath.Join("testdata", "traces", "expected.json"))
	require.NoError(t, err)
	
	require.NoError(t, ptracetest.CompareTraces(expectedTraces, actualTraces, ptracetest.IgnoreStartTimestamp(), 
		ptracetest.IgnoreEndTimestamp()))
}