This is an automated email from the ASF dual-hosted git repository.
wusheng pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/skywalking-banyandb.git
The following commit(s) were added to refs/heads/main by this push:
new 80cd6234 Fix unsupported empty string tag bug. (#925)
80cd6234 is described below
commit 80cd6234083ab071a53495c69ebc5aa35da600f2
Author: Gao Hongtao <[email protected]>
AuthorDate: Tue Jan 6 23:30:46 2026 +0800
Fix unsupported empty string tag bug. (#925)
---
.github/workflows/test.yml | 2 +-
CHANGES.md | 1 +
banyand/internal/storage/version.go | 2 +-
banyand/internal/storage/versions.yml | 2 +-
banyand/queue/pub/client.go | 11 +-
pkg/convert/string.go | 8 +-
pkg/convert/string_test.go | 2 +-
pkg/encoding/bytes.go | 41 ++--
pkg/encoding/bytes_test.go | 146 ++++++++++++++
pkg/encoding/dictionary.go | 47 +++--
pkg/encoding/dictionary_test.go | 209 +++++++++++++++++++++
.../stream/data/input/all_with_http_method.ql | 20 ++
.../stream/data/input/all_with_http_method.yaml | 26 +++
test/cases/stream/data/testdata/sw.json | 2 +-
.../stream/data/want/all_with_http_method.yaml | 103 ++++++++++
test/cases/stream/stream.go | 1 +
test/cases/trace/data/want/multi_group_new_tag.yml | 4 +-
17 files changed, 593 insertions(+), 34 deletions(-)
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index e764ece9..047328f6 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -74,7 +74,7 @@ jobs:
- name: Sanitize test name for artifact
if: ${{ failure() }}
id: sanitize-name
- run: echo "sanitized=$(echo '${{ inputs.test-name }}' | sed
's/[^a-zA-Z0-9._-]/-/g')" >> $GITHUB_OUTPUT
+ run: echo "sanitized=$(echo '${{ inputs.test-name }}-${{
inputs.timezone }}' | sed 's/[^a-zA-Z0-9._-]/-/g')" >> $GITHUB_OUTPUT
- uses: actions/upload-artifact@v4
if: ${{ failure() }}
name: Upload BanyanDB Data Folder
diff --git a/CHANGES.md b/CHANGES.md
index 2d615f9e..ceca71b0 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -23,6 +23,7 @@ Release Notes.
- Fix server got panic when create/update property with high dist usage.
- Fix incorrect key range update in sidx part metadata.
- Fix panic in measure block merger when merging blocks with overlapping
timestamps.
+- Fix unsupported empty string tag bug.
### Document
diff --git a/banyand/internal/storage/version.go
b/banyand/internal/storage/version.go
index 6f9d6ccf..55a92f04 100644
--- a/banyand/internal/storage/version.go
+++ b/banyand/internal/storage/version.go
@@ -28,7 +28,7 @@ import (
const (
metadataFilename = "metadata"
- currentVersion = "1.3.0"
+ currentVersion = "1.4.0"
compatibleVersionsKey = "versions"
compatibleVersionsFilename = "versions.yml"
)
diff --git a/banyand/internal/storage/versions.yml
b/banyand/internal/storage/versions.yml
index 7f01e316..e5cb937c 100644
--- a/banyand/internal/storage/versions.yml
+++ b/banyand/internal/storage/versions.yml
@@ -14,4 +14,4 @@
# limitations under the License.
versions:
-- 1.3.0
+- 1.4.0
diff --git a/banyand/queue/pub/client.go b/banyand/queue/pub/client.go
index 104bbbdd..488b9f98 100644
--- a/banyand/queue/pub/client.go
+++ b/banyand/queue/pub/client.go
@@ -464,7 +464,14 @@ func (p *pub) checkWritable(n string, topic bus.Topic)
(bool, *common.Error) {
backoff := jitteredBackoff(initBackoff, maxBackoff,
attempt, defaultJitterFactor)
select {
case <-time.After(backoff):
- if errInternal := p.checkServiceHealth(t,
node.conn); errInternal == nil {
+ p.mu.RLock()
+ nodeCur, okCur := p.active[nodeName]
+ p.mu.RUnlock()
+ if !okCur {
+ return
+ }
+ errInternal := p.checkServiceHealth(t,
nodeCur.conn)
+ if errInternal == nil {
func() {
p.mu.Lock()
defer p.mu.Unlock()
@@ -478,7 +485,7 @@ func (p *pub) checkWritable(n string, topic bus.Topic)
(bool, *common.Error) {
}()
return
}
- p.log.Warn().Str("topic",
t).Err(err).Str("node", nodeName).Dur("backoff", backoff).Msg("data node can
not ingest data")
+ p.log.Warn().Str("topic",
t).Err(errInternal).Str("node", nodeName).Dur("backoff", backoff).Msg("data
node can not ingest data")
case <-p.closer.CloseNotify():
return
}
diff --git a/pkg/convert/string.go b/pkg/convert/string.go
index d57c40a1..58963679 100644
--- a/pkg/convert/string.go
+++ b/pkg/convert/string.go
@@ -21,17 +21,19 @@ import (
"unsafe"
)
+var emptyStringBytes = []byte{}
+
// StringToBytes converts string to bytes.
-// It work well until the referenced memory won’t be changed.
+// It works well until the referenced memory won’t be changed.
func StringToBytes(s string) (b []byte) {
if s == "" {
- return nil
+ return emptyStringBytes
}
return unsafe.Slice(unsafe.StringData(s), len(s))
}
// BytesToString converts bytes to string.
-// It work well until the referenced memory won’t be changed.
+// It works well until the referenced memory won’t be changed.
func BytesToString(b []byte) string {
if len(b) == 0 {
return ""
diff --git a/pkg/convert/string_test.go b/pkg/convert/string_test.go
index 36bbba87..0c35d5a9 100644
--- a/pkg/convert/string_test.go
+++ b/pkg/convert/string_test.go
@@ -28,7 +28,7 @@ func TestStringToBytes(t *testing.T) {
s string
want []byte
}{
- {"EmptyString", "", nil},
+ {"EmptyString", "", []byte{}},
{"NonEmptyString", "hello", []byte("hello")},
{"SpecialChars", "!@#$%^&*()_+{}", []byte("!@#$%^&*()_+{}")},
{"CustomString", "test123", []byte("test123")},
diff --git a/pkg/encoding/bytes.go b/pkg/encoding/bytes.go
index 5130cdcf..c9b0737c 100644
--- a/pkg/encoding/bytes.go
+++ b/pkg/encoding/bytes.go
@@ -46,7 +46,12 @@ func EncodeBytesBlock(dst []byte, a [][]byte) []byte {
u64s := GenerateUint64List(len(a))
aLens := u64s.L[:0]
for _, s := range a {
- aLens = append(aLens, uint64(len(s)))
+ if s == nil {
+ aLens = append(aLens, 0)
+ } else {
+ // Offset by 1: "" (len 0) becomes 1, "a" (len 1)
becomes 2, etc.
+ aLens = append(aLens, uint64(len(s))+1)
+ }
}
u64s.L = aLens
dst = EncodeUint64Block(dst, u64s.L)
@@ -100,15 +105,22 @@ func (bbd *BytesBlockDecoder) Decode(dst [][]byte, src
[]byte, itemsCount uint64
data := bbd.data[dataLen:]
for _, sLen := range aLens {
- if uint64(len(data)) < sLen {
- return dst, fmt.Errorf("cannot decode a string with the
length %d bytes from %d bytes", sLen, len(data))
- }
if sLen == 0 {
dst = append(dst, nil)
continue
}
- dst = append(dst, data[:sLen])
- data = data[sLen:]
+ // Reverse the offset
+ actualLen := sLen - 1
+ if uint64(len(data)) < actualLen {
+ return dst, fmt.Errorf("cannot decode a string with the
length %d bytes from %d bytes", actualLen, len(data))
+ }
+ if actualLen == 0 {
+ // Explicitly create non-nil empty slice for empty
strings
+ dst = append(dst, []byte{})
+ } else {
+ dst = append(dst, data[:actualLen])
+ }
+ data = data[actualLen:]
}
return dst, nil
@@ -136,15 +148,22 @@ func (bbd *BytesBlockDecoder) DecodeWithTail(dst
[][]byte, src []byte, itemsCoun
data := bbd.data[dataLen:]
for _, sLen := range aLens {
- if uint64(len(data)) < sLen {
- return dst, tail, fmt.Errorf("cannot decode a string
with the length %d bytes from %d bytes", sLen, len(data))
- }
if sLen == 0 {
dst = append(dst, nil)
continue
}
- dst = append(dst, data[:sLen])
- data = data[sLen:]
+ // Reverse the offset
+ actualLen := sLen - 1
+ if uint64(len(data)) < actualLen {
+ return dst, tail, fmt.Errorf("cannot decode a string
with the length %d bytes from %d bytes", actualLen, len(data))
+ }
+ if actualLen == 0 {
+ // Explicitly create non-nil empty slice for empty
strings
+ dst = append(dst, []byte{})
+ } else {
+ dst = append(dst, data[:actualLen])
+ }
+ data = data[actualLen:]
}
return dst, tail, nil
diff --git a/pkg/encoding/bytes_test.go b/pkg/encoding/bytes_test.go
index ec4eccd2..f418d5d8 100644
--- a/pkg/encoding/bytes_test.go
+++ b/pkg/encoding/bytes_test.go
@@ -18,6 +18,7 @@
package encoding_test
import (
+ "fmt"
"testing"
"github.com/stretchr/testify/assert"
@@ -60,3 +61,148 @@ func TestEncodeBlockAndDecode(t *testing.T) {
assert.Equal(t, slice, decoded[i])
}
}
+
+func TestEncodeBytesBlockEdgeCases(t *testing.T) {
+ tests := []struct {
+ name string
+ input [][]byte
+ expected [][]byte
+ }{
+ {
+ name: "nil slice",
+ input: [][]byte{nil},
+ expected: [][]byte{nil},
+ },
+ {
+ name: "empty slice",
+ input: [][]byte{{}},
+ expected: [][]byte{{}},
+ },
+ {
+ name: "single byte",
+ input: [][]byte{[]byte("a")},
+ expected: [][]byte{[]byte("a")},
+ },
+ {
+ name: "mixed nil and empty",
+ input: [][]byte{nil, {}, nil},
+ expected: [][]byte{nil, {}, nil},
+ },
+ {
+ name: "mixed empty and content",
+ input: [][]byte{{}, []byte("hello"), {}},
+ expected: [][]byte{{}, []byte("hello"), {}},
+ },
+ {
+ name: "all nil",
+ input: [][]byte{nil, nil, nil},
+ expected: [][]byte{nil, nil, nil},
+ },
+ {
+ name: "all empty",
+ input: [][]byte{{}, {}, {}},
+ expected: [][]byte{{}, {}, {}},
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ encoded := encoding.EncodeBytesBlock(nil, tt.input)
+ require.NotNil(t, encoded)
+
+ blockDecoder := &encoding.BytesBlockDecoder{}
+ decoded, err := blockDecoder.Decode(nil, encoded,
uint64(len(tt.input)))
+ require.Nil(t, err)
+ require.Len(t, decoded, len(tt.expected))
+
+ for i, expected := range tt.expected {
+ if expected == nil {
+ assert.Nil(t, decoded[i], "index %d
should be nil", i)
+ } else {
+ assert.NotNil(t, decoded[i], "index %d
should not be nil", i)
+ assert.Equal(t, expected, decoded[i],
"index %d should match expected", i)
+ }
+ }
+ })
+ }
+}
+
+func TestEncodeBytesBlockRoundTripConsistency(t *testing.T) {
+ testCases := [][][]byte{
+ // Basic cases
+ {nil},
+ {[]byte{}},
+ {[]byte("a")},
+ {[]byte("hello world")},
+
+ // Mixed scenarios
+ {nil, []byte{}, []byte("a")},
+ {[]byte{}, nil, []byte("hello")},
+ {nil, []byte{}, nil, []byte("test"), []byte{}},
+
+ // Edge cases with different lengths
+ {[]byte(""), []byte("x"), []byte("xx"), []byte("xxx")},
+ {nil, nil, []byte{}, []byte{}, []byte("a"), []byte("ab")},
+
+ // Large content
+ {[]byte("this is a longer string for testing purposes")},
+ {nil, []byte("mixed with nil and empty"), []byte{},
[]byte("another string")},
+ }
+
+ for i, original := range testCases {
+ t.Run(fmt.Sprintf("round_trip_%d", i), func(t *testing.T) {
+ // Encode
+ encoded := encoding.EncodeBytesBlock(nil, original)
+ require.NotNil(t, encoded)
+
+ // Decode
+ blockDecoder := &encoding.BytesBlockDecoder{}
+ decoded, err := blockDecoder.Decode(nil, encoded,
uint64(len(original)))
+ require.Nil(t, err)
+ require.Len(t, decoded, len(original))
+
+ // Verify round-trip consistency
+ for j, orig := range original {
+ if orig == nil {
+ assert.Nil(t, decoded[j], "position %d:
nil should remain nil", j)
+ } else {
+ assert.NotNil(t, decoded[j], "position
%d: non-nil should remain non-nil", j)
+ assert.Equal(t, orig, decoded[j],
"position %d: content should match", j)
+ }
+ }
+
+ // Test that we can encode the decoded result again and
get the same encoded bytes
+ reEncoded := encoding.EncodeBytesBlock(nil, decoded)
+ assert.Equal(t, encoded, reEncoded, "re-encoding
decoded result should produce identical bytes")
+ })
+ }
+}
+
+func TestEncodeBytesBlockNilEmptyStringDistinction(t *testing.T) {
+ t.Run("nil vs empty string distinction", func(t *testing.T) {
+ // Test that nil and empty string produce different encoded
results
+ nilSlice := [][]byte{nil}
+ emptySlice := [][]byte{{}}
+
+ nilEncoded := encoding.EncodeBytesBlock(nil, nilSlice)
+ emptyEncoded := encoding.EncodeBytesBlock(nil, emptySlice)
+
+ // They should be different (nil uses 0, empty uses 1)
+ assert.NotEqual(t, nilEncoded, emptyEncoded)
+
+ // Decode and verify they decode to different results
+ blockDecoder := &encoding.BytesBlockDecoder{}
+
+ nilDecoded, err := blockDecoder.Decode(nil, nilEncoded, 1)
+ require.Nil(t, err)
+ assert.Len(t, nilDecoded, 1)
+ assert.Nil(t, nilDecoded[0])
+
+ blockDecoder.Reset()
+ emptyDecoded, err := blockDecoder.Decode(nil, emptyEncoded, 1)
+ require.Nil(t, err)
+ assert.Len(t, emptyDecoded, 1)
+ assert.NotNil(t, emptyDecoded[0])
+ assert.Equal(t, []byte{}, emptyDecoded[0])
+ })
+}
diff --git a/pkg/encoding/dictionary.go b/pkg/encoding/dictionary.go
index 0aa10965..39de269c 100644
--- a/pkg/encoding/dictionary.go
+++ b/pkg/encoding/dictionary.go
@@ -51,7 +51,7 @@ func (d *Dictionary) Reset() {
// Add adds a value to the dictionary.
func (d *Dictionary) Add(value []byte) bool {
for i, v := range d.values {
- if bytes.Equal(v, value) {
+ if valuesEqual(v, value) {
d.indices = append(d.indices, uint32(i))
return true
}
@@ -65,6 +65,17 @@ func (d *Dictionary) Add(value []byte) bool {
return true
}
+// valuesEqual compares two byte slices, distinguishing nil from empty slice.
+func valuesEqual(a, b []byte) bool {
+ if a == nil && b == nil {
+ return true
+ }
+ if a == nil || b == nil {
+ return false
+ }
+ return bytes.Equal(a, b)
+}
+
// Encode encodes the dictionary.
func (d *Dictionary) Encode(dst []byte) []byte {
dst = VarUint64ToBytes(dst, uint64(len(d.values)))
@@ -124,15 +135,22 @@ func (d *Dictionary) decodeBytesBlockWithTail(src []byte,
itemsCount uint64) ([]
dst := d.values[:0]
data := decompressedData
for _, sLen := range aLens {
- if uint64(len(data)) < sLen {
- return nil, nil, fmt.Errorf("cannot decode a string
with the length %d bytes from %d bytes", sLen, len(data))
- }
if sLen == 0 {
dst = append(dst, nil)
continue
}
- dst = append(dst, data[:sLen])
- data = data[sLen:]
+ // Reverse the offset
+ actualLen := sLen - 1
+ if uint64(len(data)) < actualLen {
+ return nil, nil, fmt.Errorf("cannot decode a string
with the length %d bytes from %d bytes", actualLen, len(data))
+ }
+ if actualLen == 0 {
+ // Explicitly create non-nil empty slice for empty
strings
+ dst = append(dst, []byte{})
+ } else {
+ dst = append(dst, data[:actualLen])
+ }
+ data = data[actualLen:]
}
return dst, tail, nil
@@ -289,15 +307,22 @@ func DecodeDictionaryValues(src []byte) ([][]byte, error)
{
var values [][]byte
data := decompressedData
for _, sLen := range aLens {
- if uint64(len(data)) < sLen {
- return nil, fmt.Errorf("cannot decode a string with the
length %d bytes from %d bytes", sLen, len(data))
- }
if sLen == 0 {
values = append(values, nil)
continue
}
- values = append(values, data[:sLen])
- data = data[sLen:]
+ // Reverse the offset
+ actualLen := sLen - 1
+ if uint64(len(data)) < actualLen {
+ return nil, fmt.Errorf("cannot decode a string with the
length %d bytes from %d bytes", actualLen, len(data))
+ }
+ if actualLen == 0 {
+ // Explicitly create non-nil empty slice for empty
strings
+ values = append(values, []byte{})
+ } else {
+ values = append(values, data[:actualLen])
+ }
+ data = data[actualLen:]
}
return values, nil
diff --git a/pkg/encoding/dictionary_test.go b/pkg/encoding/dictionary_test.go
index 5445e03b..d8f13dfb 100644
--- a/pkg/encoding/dictionary_test.go
+++ b/pkg/encoding/dictionary_test.go
@@ -21,6 +21,7 @@ import (
"fmt"
"testing"
+ "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -69,6 +70,214 @@ func TestEncodeAndDecodeDictionary(t *testing.T) {
require.Equal(t, values, decoded)
}
+func TestEncodeAndDecodeDictionaryEdgeCases(t *testing.T) {
+ tests := []struct {
+ name string
+ input [][]byte
+ expected [][]byte
+ }{
+ {
+ name: "nil_slice",
+ input: [][]byte{nil},
+ expected: [][]byte{nil},
+ },
+ {
+ name: "empty_slice",
+ input: [][]byte{{}},
+ expected: [][]byte{{}},
+ },
+ {
+ name: "single_byte",
+ input: [][]byte{[]byte("a")},
+ expected: [][]byte{[]byte("a")},
+ },
+ {
+ name: "mixed_nil_and_empty",
+ input: [][]byte{nil, {}},
+ expected: [][]byte{nil, {}},
+ },
+ {
+ name: "mixed_empty_and_content",
+ input: [][]byte{{}, []byte("hello")},
+ expected: [][]byte{{}, []byte("hello")},
+ },
+ {
+ name: "all_nil",
+ input: [][]byte{nil, nil, nil},
+ expected: [][]byte{nil, nil, nil},
+ },
+ {
+ name: "all_empty",
+ input: [][]byte{{}, {}, {}},
+ expected: [][]byte{{}, {}, {}},
+ },
+ {
+ name: "nil_empty_and_content",
+ input: [][]byte{nil, {}, []byte("test"), nil,
[]byte("value")},
+ expected: [][]byte{nil, {}, []byte("test"), nil,
[]byte("value")},
+ },
+ {
+ name: "duplicate_empty_strings",
+ input: [][]byte{{}, []byte("a"), {}, []byte("a")},
+ expected: [][]byte{{}, []byte("a"), {}, []byte("a")},
+ },
+ {
+ name: "duplicate_nil_values",
+ input: [][]byte{nil, []byte("b"), nil, []byte("b")},
+ expected: [][]byte{nil, []byte("b"), nil, []byte("b")},
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ dict := NewDictionary()
+ for _, value := range tt.input {
+ dict.Add(value)
+ }
+
+ encoded := dict.Encode(nil)
+ require.NotNil(t, encoded)
+
+ decodedDict := NewDictionary()
+ decoded, err := decodedDict.Decode(nil, encoded,
uint64(len(tt.input)))
+ require.NoError(t, err)
+ require.Len(t, decoded, len(tt.expected))
+
+ for i, expected := range tt.expected {
+ if expected == nil {
+ assert.Nil(t, decoded[i], "index %d
should be nil", i)
+ } else {
+ assert.NotNil(t, decoded[i], "index %d
should not be nil", i)
+ assert.Equal(t, expected, decoded[i],
"index %d should match expected", i)
+ }
+ }
+ })
+ }
+}
+
+func TestEncodeAndDecodeDictionaryRoundTripConsistency(t *testing.T) {
+ testCases := [][][]byte{
+ // Basic cases
+ {nil},
+ {[]byte{}},
+ {[]byte("a")},
+ {[]byte("hello world")},
+
+ // Mixed scenarios
+ {nil, []byte{}, []byte("a")},
+ {[]byte{}, nil, []byte("hello")},
+ {nil, []byte{}, nil, []byte("test"), []byte{}},
+
+ // Edge cases with different lengths
+ {[]byte{}, []byte("x"), []byte("xx"), []byte("xxx")},
+ {nil, nil, []byte{}, []byte{}, []byte("a"), []byte("ab")},
+
+ // Large content
+ {[]byte("this is a longer string for testing purposes")},
+ {nil, []byte("mixed with nil and empty"), []byte{},
[]byte("another string")},
+
+ // Duplicate values (dictionary compression)
+ {[]byte("hello"), []byte("world"), []byte("hello"),
[]byte("world")},
+ {nil, []byte("test"), nil, []byte("test")},
+ {[]byte{}, []byte("value"), []byte{}, []byte("value")},
+ }
+
+ for i, original := range testCases {
+ t.Run(fmt.Sprintf("round_trip_%d", i), func(t *testing.T) {
+ // Encode
+ dict := NewDictionary()
+ for _, value := range original {
+ dict.Add(value)
+ }
+ encoded := dict.Encode(nil)
+ require.NotNil(t, encoded)
+
+ // Decode
+ decodedDict := NewDictionary()
+ decoded, err := decodedDict.Decode(nil, encoded,
uint64(len(original)))
+ require.NoError(t, err)
+ require.Len(t, decoded, len(original))
+
+ // Verify round-trip consistency
+ for j, orig := range original {
+ if orig == nil {
+ assert.Nil(t, decoded[j], "position %d:
nil should remain nil", j)
+ } else {
+ assert.NotNil(t, decoded[j], "position
%d: non-nil should remain non-nil", j)
+ assert.Equal(t, orig, decoded[j],
"position %d: content should match", j)
+ }
+ }
+
+ // Test that we can encode the decoded result again and
get the same encoded bytes
+ reEncodedDict := NewDictionary()
+ for _, value := range decoded {
+ reEncodedDict.Add(value)
+ }
+ reEncoded := reEncodedDict.Encode(nil)
+ assert.Equal(t, encoded, reEncoded, "re-encoding
decoded result should produce identical bytes")
+ })
+ }
+}
+
+func TestEncodeAndDecodeDictionaryNilEmptyStringDistinction(t *testing.T) {
+ t.Run("nil vs empty string distinction", func(t *testing.T) {
+ // Test that nil and empty string produce different encoded
results
+ nilDict := NewDictionary()
+ nilDict.Add(nil)
+ nilEncoded := nilDict.Encode(nil)
+
+ emptyDict := NewDictionary()
+ emptyDict.Add([]byte{})
+ emptyEncoded := emptyDict.Encode(nil)
+
+ // They should be different (nil uses 0, empty uses 1)
+ assert.NotEqual(t, nilEncoded, emptyEncoded)
+
+ // Decode and verify they decode to different results
+ nilDecodedDict := NewDictionary()
+ nilDecoded, err := nilDecodedDict.Decode(nil, nilEncoded, 1)
+ require.NoError(t, err)
+ assert.Len(t, nilDecoded, 1)
+ assert.Nil(t, nilDecoded[0])
+
+ emptyDecodedDict := NewDictionary()
+ emptyDecoded, err := emptyDecodedDict.Decode(nil, emptyEncoded,
1)
+ require.NoError(t, err)
+ assert.Len(t, emptyDecoded, 1)
+ assert.NotNil(t, emptyDecoded[0])
+ assert.Equal(t, []byte{}, emptyDecoded[0])
+ })
+
+ t.Run("mixed nil and empty in same dictionary", func(t *testing.T) {
+ values := [][]byte{nil, {}, []byte("test"), nil, {}}
+ dict := NewDictionary()
+ for _, value := range values {
+ dict.Add(value)
+ }
+
+ encoded := dict.Encode(nil)
+ require.NotNil(t, encoded)
+
+ decodedDict := NewDictionary()
+ decoded, err := decodedDict.Decode(nil, encoded,
uint64(len(values)))
+ require.NoError(t, err)
+ require.Len(t, decoded, len(values))
+
+ // Verify nil values remain nil
+ assert.Nil(t, decoded[0], "first value should be nil")
+ assert.Nil(t, decoded[3], "fourth value should be nil")
+
+ // Verify empty strings remain non-nil empty
+ assert.NotNil(t, decoded[1], "second value should not be nil")
+ assert.Equal(t, []byte{}, decoded[1], "second value should be
empty string")
+ assert.NotNil(t, decoded[4], "fifth value should not be nil")
+ assert.Equal(t, []byte{}, decoded[4], "fifth value should be
empty string")
+
+ // Verify content values
+ assert.Equal(t, []byte("test"), decoded[2], "third value should
be 'test'")
+ })
+}
+
type parameter struct {
count int
cardinality int
diff --git a/test/cases/stream/data/input/all_with_http_method.ql
b/test/cases/stream/data/input/all_with_http_method.ql
new file mode 100644
index 00000000..65db9d46
--- /dev/null
+++ b/test/cases/stream/data/input/all_with_http_method.ql
@@ -0,0 +1,20 @@
+# Licensed to Apache Software Foundation (ASF) under one or more contributor
+# license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright
+# ownership. Apache Software Foundation (ASF) licenses this file to you under
+# the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+SELECT trace_id, http.method, data_binary FROM STREAM sw IN default
+TIME > '-15m'
+
diff --git a/test/cases/stream/data/input/all_with_http_method.yaml
b/test/cases/stream/data/input/all_with_http_method.yaml
new file mode 100644
index 00000000..11d0933e
--- /dev/null
+++ b/test/cases/stream/data/input/all_with_http_method.yaml
@@ -0,0 +1,26 @@
+# Licensed to Apache Software Foundation (ASF) under one or more contributor
+# license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright
+# ownership. Apache Software Foundation (ASF) licenses this file to you under
+# the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+name: "sw"
+groups: ["default"]
+projection:
+ tagFamilies:
+ - name: "searchable"
+ tags: ["trace_id", "http.method"]
+ - name: "data"
+ tags: ["data_binary"]
+
diff --git a/test/cases/stream/data/testdata/sw.json
b/test/cases/stream/data/testdata/sw.json
index e12eb307..eed140f8 100644
--- a/test/cases/stream/data/testdata/sw.json
+++ b/test/cases/stream/data/testdata/sw.json
@@ -22,7 +22,7 @@
{"str":{"value": "/product_id"}},
{"int":{"value": 500}},
{"int":{"value": 1622933202000000000}},
- {"null":0},
+ {"str":{"value": ""}},
{"null":0},
{"str":{"value": "1"}},
{"str":{"value":"mysql"}},
diff --git a/test/cases/stream/data/want/all_with_http_method.yaml
b/test/cases/stream/data/want/all_with_http_method.yaml
new file mode 100644
index 00000000..6e835b4c
--- /dev/null
+++ b/test/cases/stream/data/want/all_with_http_method.yaml
@@ -0,0 +1,103 @@
+# Licensed to Apache Software Foundation (ASF) under one or more contributor
+# license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright
+# ownership. Apache Software Foundation (ASF) licenses this file to you under
+# the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+elements:
+ - elementId: "0978df79cf4ed409"
+ tagFamilies:
+ - name: searchable
+ tags:
+ - key: trace_id
+ value:
+ str:
+ value: "1"
+ - key: http.method
+ value:
+ "null": null
+ - name: data
+ tags:
+ - key: data_binary
+ value:
+ binaryData: YWJjMTIzIT8kKiYoKSctPUB+
+ - elementId: "4dd999e3502d728d"
+ tagFamilies:
+ - name: searchable
+ tags:
+ - key: trace_id
+ value:
+ str:
+ value: "2"
+ - key: http.method
+ value:
+ str:
+ value: ""
+ - name: data
+ tags:
+ - key: data_binary
+ value:
+ binaryData: YWJjMTIzIT8kKiYoKSctPUB+
+ - elementId: "fafaa63b403a1604"
+ tagFamilies:
+ - name: searchable
+ tags:
+ - key: trace_id
+ value:
+ str:
+ value: "3"
+ - key: http.method
+ value:
+ str:
+ value: GET
+ - name: data
+ tags:
+ - key: data_binary
+ value:
+ binaryData: YWJjMTIzIT8kKiYoKSctPUB+
+ - elementId: "5fdc8aab6ea5c9d4"
+ tagFamilies:
+ - name: searchable
+ tags:
+ - key: trace_id
+ value:
+ str:
+ value: "4"
+ - key: http.method
+ value:
+ str:
+ value: GET
+ - name: data
+ tags:
+ - key: data_binary
+ value:
+ binaryData: YWJjMTIzIT8kKiYoKSctPUB+
+ - elementId: "aaede9362761569a"
+ tagFamilies:
+ - name: searchable
+ tags:
+ - key: trace_id
+ value:
+ str:
+ value: "5"
+ - key: http.method
+ value:
+ str:
+ value: GET
+ - name: data
+ tags:
+ - key: data_binary
+ value:
+ binaryData: YWJjMTIzIT8kKiYoKSctPUB+
+
diff --git a/test/cases/stream/stream.go b/test/cases/stream/stream.go
index ad1f11b7..18340f1a 100644
--- a/test/cases/stream/stream.go
+++ b/test/cases/stream/stream.go
@@ -45,6 +45,7 @@ var _ = g.DescribeTable("Scanning Streams", func(args
helpers.Args) {
}, flags.EventuallyTimeout).Should(gm.Succeed())
},
g.Entry("all elements", helpers.Args{Input: "all", Duration: 1 *
time.Hour}),
+ g.Entry("projection with http.method", helpers.Args{Input:
"all_with_http_method", Duration: 1 * time.Hour}),
g.Entry("limit", helpers.Args{Input: "limit", Duration: 1 * time.Hour}),
g.Entry("max limit", helpers.Args{Input: "all_max_limit", Want: "all",
Duration: 1 * time.Hour}),
g.Entry("offset", helpers.Args{Input: "offset", Duration: 1 *
time.Hour}),
diff --git a/test/cases/trace/data/want/multi_group_new_tag.yml
b/test/cases/trace/data/want/multi_group_new_tag.yml
index d0107db4..9a48b0ad 100644
--- a/test/cases/trace/data/want/multi_group_new_tag.yml
+++ b/test/cases/trace/data/want/multi_group_new_tag.yml
@@ -156,7 +156,7 @@ traces:
value: api_service
- key: error_message
value:
- "null": null
+ str: {}
- key: trace_id
value:
str:
@@ -170,7 +170,7 @@ traces:
value: api_service
- key: error_message
value:
- "null": null
+ str: {}
- key: trace_id
value:
str: