Skip to content

Commit cbb4c74

Browse files
authored
Merge pull request #31 from kkkjz/main
fix model name
2 parents 0778757 + 6adaad0 commit cbb4c74

File tree

5 files changed

+29
-16
lines changed

5 files changed

+29
-16
lines changed

memoryos-chromadb/comprehensive_test.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,15 @@ def main():
2121
memoryos = Memoryos(
2222
user_id='travel_user_test',
2323
openai_api_key='',
24-
openai_base_url='',
24+
openai_base_url='https://cn2us02.opapi.win/v1',
2525
data_storage_path='./comprehensive_test_data',
2626
assistant_id='travel_assistant',
27-
embedding_model_name='BAAI/bge-m3',
27+
embedding_model_name='',
2828
mid_term_capacity=1000,
29-
mid_term_heat_threshold=13.0,
29+
mid_term_heat_threshold=12.0,
3030
mid_term_similarity_threshold=0.7,
31-
short_term_capacity=2
31+
short_term_capacity=2,
32+
llm_model='gpt-4.1-mini'
3233
)
3334

3435
print("📝 Phase 1: Adding 30 rounds of travel planning conversations...")

memoryos-chromadb/long_term.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,14 @@ def __init__(self,
1515
llm_interface: OpenAIClient,
1616
knowledge_capacity=100,
1717
embedding_model_name: str = "all-MiniLM-L6-v2",
18-
embedding_model_kwargs: Optional[dict] = None):
18+
embedding_model_kwargs: Optional[dict] = None,
19+
llm_model: str = "gpt-4o-mini"): # 添加 llm_model 参数
1920
self.storage = storage_provider
2021
self.llm_interface = llm_interface
2122
self.knowledge_capacity = knowledge_capacity
2223
self.embedding_model_name = embedding_model_name
2324
self.embedding_model_kwargs = embedding_model_kwargs or {}
25+
self.llm_model = llm_model # 保存模型名称
2426

2527
def update_user_profile(self, user_id: str, conversation_history: str) -> Optional[Dict[str, Any]]:
2628
"""
@@ -31,6 +33,7 @@ def update_user_profile(self, user_id: str, conversation_history: str) -> Option
3133
updated_profile = gpt_user_profile_analysis(
3234
conversation_str=conversation_history,
3335
client=self.llm_interface,
36+
model=self.llm_model, # 传递模型参数
3437
existing_user_profile=existing_profile_str
3538
)
3639

@@ -73,7 +76,11 @@ def extract_knowledge_from_text(self, text: str) -> Optional[Dict[str, Any]]:
7376
"""
7477
if not text.strip():
7578
return None
76-
return gpt_knowledge_extraction(conversation_str=text, client=self.llm_interface)
79+
return gpt_knowledge_extraction(
80+
conversation_str=text,
81+
client=self.llm_interface,
82+
model=self.llm_model # 传递模型参数
83+
)
7784

7885
def get_user_knowledge(self) -> list:
7986
return self.storage.get_all_user_knowledge()
@@ -95,4 +102,4 @@ def search_knowledge(self, query: str, knowledge_type: str = "user", top_k=5) ->
95102
results = self.storage.search_assistant_knowledge(query_vec, top_k=top_k)
96103

97104
print(f"LongTermMemory: Searched {knowledge_type} knowledge for '{query[:30]}...'. Found {len(results)} matches.")
98-
return results
105+
return results

memoryos-chromadb/memoryos.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -91,21 +91,24 @@ def __init__(self, user_id: str,
9191
client=self.client,
9292
max_capacity=mid_term_capacity,
9393
embedding_model_name=self.embedding_model_name,
94-
embedding_model_kwargs=self.embedding_model_kwargs
94+
embedding_model_kwargs=self.embedding_model_kwargs,
95+
llm_model=self.llm_model
9596
)
9697
self.user_long_term_memory = LongTermMemory(
9798
storage_provider=self.storage_provider,
9899
llm_interface=self.client,
99100
embedding_model_name=self.embedding_model_name,
100-
embedding_model_kwargs=self.embedding_model_kwargs
101+
embedding_model_kwargs=self.embedding_model_kwargs,
102+
llm_model=self.llm_model
101103
)
102104

103105
# Initialize Memory Module for Assistant Knowledge
104106
self.assistant_long_term_memory = LongTermMemory(
105107
storage_provider=self.storage_provider,
106108
llm_interface=self.client,
107109
embedding_model_name=self.embedding_model_name,
108-
embedding_model_kwargs=self.embedding_model_kwargs
110+
embedding_model_kwargs=self.embedding_model_kwargs,
111+
llm_model=self.llm_model
109112
)
110113

111114
# Initialize Orchestration Modules

memoryos-chromadb/mid_term.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -43,11 +43,13 @@ def __init__(self,
4343
client: OpenAIClient,
4444
max_capacity=2000,
4545
embedding_model_name: str = "all-MiniLM-L6-v2",
46-
embedding_model_kwargs: Optional[dict] = None):
46+
embedding_model_kwargs: Optional[dict] = None,
47+
llm_model: str = "gpt-4o-mini"):
4748
self.user_id = user_id
4849
self.client = client
4950
self.max_capacity = max_capacity
5051
self.storage = storage_provider
52+
self.llm_model = llm_model
5153

5254
# Load sessions and other data from the shared storage provider's in-memory metadata
5355
self.sessions: dict = self.storage.get_mid_term_sessions()
@@ -100,7 +102,7 @@ def add_session(self, summary, details):
100102
**self.embedding_model_kwargs
101103
)
102104
summary_vec = normalize_vector(summary_vec).tolist()
103-
summary_keywords = list(extract_keywords_from_multi_summary(summary, client=self.client))
105+
summary_keywords = list(extract_keywords_from_multi_summary(summary, client=self.client,model=self.llm_model))
104106

105107
processed_details = []
106108
for page_data in details:
@@ -132,7 +134,7 @@ def add_session(self, summary, details):
132134
else:
133135
print(f"MidTermMemory: Computing new keywords for page {page_id}")
134136
full_text = f"User: {page_data.get('user_input','')} Assistant: {page_data.get('agent_response','')}"
135-
page_keywords = list(extract_keywords_from_multi_summary(full_text, client=self.client))
137+
page_keywords = list(extract_keywords_from_multi_summary(full_text, client=self.client,model=self.llm_model))
136138

137139
processed_page = {
138140
**page_data, # Carry over existing fields like user_input, agent_response, timestamp
@@ -249,7 +251,7 @@ def insert_pages_into_session(self, summary_for_new_pages, keywords_for_new_page
249251

250252
if "page_keywords" not in page_data or not page_data["page_keywords"]:
251253
full_text = f"User: {page_data.get('user_input','')} Assistant: {page_data.get('agent_response','')}"
252-
page_data["page_keywords"] = list(extract_keywords_from_multi_summary(full_text, client=self.client))
254+
page_data["page_keywords"] = list(extract_keywords_from_multi_summary(full_text, client=self.client,model=self.llm_model))
253255

254256
processed_new_pages.append({**page_data, "page_id": page_id})
255257

@@ -285,7 +287,7 @@ def search_sessions(self, query_text, segment_similarity_threshold=0.1, page_sim
285287
**self.embedding_model_kwargs
286288
)
287289
query_vec = normalize_vector(query_vec)
288-
query_keywords = set(extract_keywords_from_multi_summary(query_text, client=self.client))
290+
query_keywords = set(extract_keywords_from_multi_summary(query_text, client=self.client,model=self.llm_model))
289291

290292
# Search sessions using ChromaDB
291293
similar_sessions = self.storage.search_mid_term_sessions(query_vec.tolist(), top_k=top_k_sessions)

memoryos-chromadb/updater.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def _process_page_embedding_and_keywords(self, page_data):
5353
tasks.append(('embedding', lambda: get_embedding(full_text)))
5454

5555
if not ("page_keywords" in page_data and page_data["page_keywords"]):
56-
tasks.append(('keywords', lambda: extract_keywords_from_multi_summary(full_text, client=self.client)))
56+
tasks.append(('keywords', lambda: extract_keywords_from_multi_summary(full_text, client=self.client,model=self.llm_model)))
5757

5858
if tasks:
5959
with ThreadPoolExecutor(max_workers=2) as executor:

0 commit comments

Comments
 (0)