22Example demonstrating how to use MOSProduct for multi-user scenarios.
33"""
44
5+ import os
6+
57from memos .configs .mem_cube import GeneralMemCubeConfig
68from memos .configs .mem_os import MOSConfig
79from memos .mem_cube .general import GeneralMemCube
@@ -16,28 +18,53 @@ def get_config(user_name):
1618 "top_p" : 0.9 ,
1719 "top_k" : 50 ,
1820 "remove_think_prefix" : True ,
19- "api_key" : "your-api-key-here" ,
20- "api_base" : "https://api.openai.com/v1" ,
21+ "api_key" : os . getenv ( "OPENAI_API_KEY" ) ,
22+ "api_base" : os . getenv ( "OPENAI_API_BASE" ) ,
2123 }
2224 # Create a default configuration
2325 default_config = MOSConfig (
2426 user_id = "root" ,
2527 chat_model = {"backend" : "openai" , "config" : openapi_config },
2628 mem_reader = {
27- "backend" : "naive " ,
29+ "backend" : "simple_struct " ,
2830 "config" : {
2931 "llm" : {
3032 "backend" : "openai" ,
3133 "config" : openapi_config ,
3234 },
3335 "embedder" : {
34- "backend" : "ollama" ,
36+ "backend" : "universal_api" ,
37+ "config" : {
38+ "provider" : os .getenv ("MOS_EMBEDDER_PROVIDER" , "openai" ),
39+ "api_key" : os .getenv ("MOS_EMBEDDER_API_KEY" , "sk-xxxx" ),
40+ "model_name_or_path" : os .getenv (
41+ "MOS_EMBEDDER_MODEL" , "text-embedding-3-large"
42+ ),
43+ "base_url" : os .getenv ("MOS_EMBEDDER_API_BASE" , "http://openai.com" ),
44+ },
45+ },
46+ "chunker" : {
47+ "backend" : "sentence" ,
3548 "config" : {
36- "model_name_or_path" : "nomic-embed-text:latest" ,
49+ "tokenizer_or_token_counter" : "gpt2" ,
50+ "chunk_size" : 512 ,
51+ "chunk_overlap" : 128 ,
52+ "min_sentences_per_chunk" : 1 ,
3753 },
3854 },
3955 },
4056 },
57+ user_manager = {
58+ "backend" : "mysql" ,
59+ "config" : {
60+ "host" : os .getenv ("MYSQL_HOST" , "localhost" ),
61+ "port" : int (os .getenv ("MYSQL_PORT" , "3306" )),
62+ "username" : os .getenv ("MYSQL_USERNAME" , "root" ),
63+ "password" : os .getenv ("MYSQL_PASSWORD" , "12345678" ),
64+ "database" : os .getenv ("MYSQL_DATABASE" , "memos_users" ),
65+ "charset" : os .getenv ("MYSQL_CHARSET" , "utf8mb4" ),
66+ },
67+ },
4168 enable_textual_memory = True ,
4269 enable_activation_memory = False ,
4370 top_k = 5 ,
@@ -55,17 +82,27 @@ def get_config(user_name):
5582 "graph_db" : {
5683 "backend" : "neo4j" ,
5784 "config" : {
58- "uri" : "bolt://localhost:7687" ,
59- "user" : "neo4j" ,
60- "password" : "12345678" ,
61- "db_name" : user_name ,
85+ "uri" : os .getenv ("NEO4J_URI" , "bolt://localhost:7687" ),
86+ "user" : os .getenv ("NEO4J_USER" , "neo4j" ),
87+ "password" : os .getenv ("NEO4J_PASSWORD" , "12345678" ),
88+ "db_name" : os .getenv (
89+ "NEO4J_DB_NAME" , "shared-tree-textual-memory-test"
90+ ),
91+ "user_name" : f"memos{ user_name .replace ('-' , '' )} " ,
92+ "embedding_dimension" : int (os .getenv ("EMBEDDING_DIMENSION" , 768 )),
93+ "use_multi_db" : False ,
6294 "auto_create" : True ,
6395 },
6496 },
6597 "embedder" : {
66- "backend" : "ollama " ,
98+ "backend" : "universal_api " ,
6799 "config" : {
68- "model_name_or_path" : "nomic-embed-text:latest" ,
100+ "provider" : os .getenv ("MOS_EMBEDDER_PROVIDER" , "openai" ),
101+ "api_key" : os .getenv ("MOS_EMBEDDER_API_KEY" , "sk-xxxx" ),
102+ "model_name_or_path" : os .getenv (
103+ "MOS_EMBEDDER_MODEL" , "text-embedding-3-large"
104+ ),
105+ "base_url" : os .getenv ("MOS_EMBEDDER_API_BASE" , "http://openai.com" ),
69106 },
70107 },
71108 },
@@ -109,7 +146,7 @@ def main():
109146 print (f"\n Search result for Alice: { search_result } " )
110147
111148 # Search memories for Alice
112- search_result = mos_product .get_all (query = "conference" , user_id = "alice" , memory_type = "text_mem" )
149+ search_result = mos_product .get_all (user_id = "alice" , memory_type = "text_mem" )
113150 print (f"\n Search result for Alice: { search_result } " )
114151
115152 # List all users
0 commit comments