-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmemory_update.go
More file actions
165 lines (145 loc) · 4.44 KB
/
memory_update.go
File metadata and controls
165 lines (145 loc) · 4.44 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
package main
import (
"errors"
"log"
"strings"
"time"
)
// Update modifies an existing memory's text and optional label, recalculates similarity
// edges (content-based), and persists changes. Search uses content only.
// label is optional: if non-empty the chunk's label is set to it; if empty, existing label is kept.
// chunkType is optional: if non-empty the chunk's type is set to it; if empty, existing type is kept.
// scopes is optional: if nil, existing scopes are kept; if non-nil (even empty slice), scopes are updated.
func (s *MemoryStore) Update(id string, newText string, label string, chunkType string, scopes []string) (MemoryChunk, []RelatedMemory, error) {
id = strings.TrimSpace(id)
if id == "" {
return MemoryChunk{}, nil, ErrEmptyID
}
newText = strings.TrimSpace(newText)
if newText == "" {
return MemoryChunk{}, nil, errors.New("text is empty")
}
// Normalize text (summarize if too long).
normalizedText := newText
var err error
if len([]byte(normalizedText)) > MaxMemoryBytes {
normalizedText, err = s.summarizer.Summarize(normalizedText, MaxMemoryBytes)
if err != nil {
return MemoryChunk{}, nil, err
}
normalizedText = strings.TrimSpace(normalizedText)
if normalizedText == "" {
return MemoryChunk{}, nil, errors.New("summarizer returned empty text")
}
if len([]byte(normalizedText)) > MaxMemoryBytes {
normalizedText = HardTruncateToBytes(normalizedText, MaxMemoryBytes)
}
}
// Compute vectors from content only; label is not used for similarity.
vec, norm := vectorize(normalizedText)
tokens := tokenSet(normalizedText)
s.mu.Lock()
defer s.mu.Unlock()
sc := s.chunks[id]
if sc == nil {
return MemoryChunk{}, nil, ErrNotFound
}
// Label: if non-empty, use the new label; if empty, keep existing label.
label = strings.TrimSpace(label)
if label == "" {
label = sc.Label
}
// Remove old token index entries.
for tok := range sc.tokens {
if set := s.tokenIndex[tok]; set != nil {
delete(set, id)
if len(set) == 0 {
delete(s.tokenIndex, tok)
}
}
if s.tokenDocFreq[tok] > 0 {
s.tokenDocFreq[tok]--
if s.tokenDocFreq[tok] == 0 {
delete(s.tokenDocFreq, tok)
}
}
}
// Remove old edges (both directions).
oldEdgeIDs := make([]string, 0, len(sc.edges))
for otherID := range sc.edges {
oldEdgeIDs = append(oldEdgeIDs, otherID)
if other := s.chunks[otherID]; other != nil {
delete(other.edges, id)
}
}
// Update chunk data and BM25 doc length aggregate.
oldDocLen := sc.docLen
newDocLen := docLenFromVector(vec)
s.totalDocLen -= oldDocLen
s.totalDocLen += newDocLen
if s.totalDocLen < 0 {
s.totalDocLen = 0
}
sc.Text = normalizedText
sc.Label = label
if strings.TrimSpace(chunkType) != "" {
sc.Type = strings.TrimSpace(chunkType)
}
// Update scopes if provided
if scopes != nil {
sc.Scopes = normalizeScopes(scopes)
}
sc.vector = vec
sc.norm = norm
sc.tokens = tokens
sc.docLen = newDocLen
sc.edges = make(map[string]float64)
// Keep original CreatedAt; track modification time.
now := time.Now()
sc.UpdatedAt = &now
// Re-index tokens.
s.indexChunkLocked(id, tokens)
// Recalculate edges with all other chunks.
// Only connect memories that are scope-compatible (share scopes or one/both are global).
related := make([]RelatedMemory, 0)
var updatedChunks []*storedChunk
for otherID, other := range s.chunks {
if otherID == id {
continue
}
// Check scope compatibility before creating edge
if !scopesCompatible(sc.Scopes, other.Scopes) {
continue
}
sim := s.bm25MemSimilarityLocked(sc, other)
if sim > s.similarityDelta {
s.addEdgeLocked(id, sc, otherID, other, sim)
updatedChunks = append(updatedChunks, other)
related = append(related, RelatedMemory{
ID: otherID,
Label: other.Label,
Similarity: sim,
CreatedAt: other.CreatedAt,
})
}
}
// Persist updated chunk.
if err := s.storage.Save(sc.toStorageData()); err != nil {
return MemoryChunk{}, nil, err
}
// Collect all dirty neighbor IDs (lost or gained edges), deduplicated.
dirty := make(map[string]struct{}, len(oldEdgeIDs)+len(updatedChunks))
for _, oldID := range oldEdgeIDs {
dirty[oldID] = struct{}{}
}
for _, uc := range updatedChunks {
dirty[uc.ID] = struct{}{}
}
for dirtyID := range dirty {
if other := s.chunks[dirtyID]; other != nil {
_ = s.storage.Save(other.toStorageData())
}
}
log.Printf("MEMORY: UPDATE '%s' '%s'", id, ExcerptForLog(normalizedText, logExcerptLen))
return sc.MemoryChunk, related, nil
}