-
Notifications
You must be signed in to change notification settings - Fork 17
Update Llama Stack to 0.2.22 #43
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -12,6 +12,7 @@ | |
| import yaml | ||
| from llama_index.core import Settings, load_index_from_storage | ||
| from llama_index.core.llms.utils import resolve_llm | ||
| from llama_index.core.schema import NodeWithScore, TextNode | ||
| from llama_index.core.storage.storage_context import StorageContext | ||
| from llama_index.embeddings.huggingface import HuggingFaceEmbedding | ||
| from llama_index.vector_stores.faiss import FaissVectorStore | ||
|
|
@@ -35,20 +36,34 @@ def _llama_index_query(args: argparse.Namespace) -> None: | |
|
|
||
| if args.node is not None: | ||
| node = storage_context.docstore.get_node(args.node) | ||
| result = { | ||
| "query": args.query, | ||
| "type": "single_node", | ||
| "node_id": args.node, | ||
| "node": { | ||
| "id": node.node_id, | ||
| "text": node.text, | ||
| "metadata": node.metadata if hasattr(node, "metadata") else {}, | ||
| }, | ||
| } | ||
| if args.json: | ||
| print(json.dumps(result, indent=2)) | ||
| if isinstance(node, TextNode): | ||
| result = { | ||
| "query": args.query, | ||
| "type": "single_node", | ||
| "node_id": args.node, | ||
|
Comment on lines
37
to
+43
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Handle “node not found” explicitly and avoid ambiguous type message; also prefer sys.exit.
- if args.node is not None:
- node = storage_context.docstore.get_node(args.node)
+ if args.node is not None:
+ try:
+ node = storage_context.docstore.get_node(args.node)
+ except KeyError:
+ msg = f"Node {args.node} not found"
+ logging.warning(msg)
+ if args.json:
+ print(json.dumps({
+ "query": args.query,
+ "type": "single_node",
+ "node_id": args.node,
+ "error": msg,
+ }, indent=2))
+ sys.exit(1)
if isinstance(node, TextNode):
...
- else:
+ else:
logging.warning(
f"Node {args.node} is not a TextNode, type: {type(node).__name__}"
)
if args.json:
result = {
"query": args.query,
"type": "single_node",
"node_id": args.node,
"error": f"Node is not a TextNode (type: {type(node).__name__})",
}
print(json.dumps(result, indent=2))
- exit(1)
+ sys.exit(1)Also applies to: 55-66 🤖 Prompt for AI Agents |
||
| "node": { | ||
| "id": node.node_id, | ||
| "text": node.text, | ||
| "metadata": node.metadata if hasattr(node, "metadata") else {}, | ||
| }, | ||
| } | ||
| if args.json: | ||
| print(json.dumps(result, indent=2)) | ||
| else: | ||
| print(node) | ||
| else: | ||
| print(node) | ||
| logging.warning( | ||
| f"Node {args.node} is not a TextNode, type: {type(node).__name__}" | ||
| ) | ||
| if args.json: | ||
| result = { | ||
| "query": args.query, | ||
| "type": "single_node", | ||
| "node_id": args.node, | ||
| "error": f"Node is not a TextNode (type: {type(node).__name__})", | ||
| } | ||
| print(json.dumps(result, indent=2)) | ||
| exit(1) | ||
| else: | ||
| retriever = vector_index.as_retriever(similarity_top_k=args.top_k) | ||
| nodes = retriever.retrieve(args.query) | ||
|
|
@@ -88,13 +103,18 @@ def _llama_index_query(args: argparse.Namespace) -> None: | |
| "nodes": [], | ||
| } | ||
| for node in nodes: # type: ignore | ||
| node_data = { | ||
| "id": node.node_id, | ||
| "score": node.score, | ||
| "text": node.text, | ||
| "metadata": node.metadata if hasattr(node, "metadata") else {}, | ||
| } | ||
| result["nodes"].append(node_data) | ||
| if isinstance(node, NodeWithScore): | ||
| node_data = { | ||
| "id": node.node_id, | ||
| "score": node.score, | ||
| "text": node.text, | ||
| "metadata": node.metadata if hasattr(node, "metadata") else {}, | ||
| } | ||
| result["nodes"].append(node_data) | ||
syedriko marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| else: | ||
| logging.debug( | ||
| f"Skipping node of type {type(node).__name__}, expected NodeWithScore" | ||
| ) | ||
|
Comment on lines
105
to
+117
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Access fields via NodeWithScore.node to avoid AttributeError across versions; drop the loop’s type: ignore. In llama-index 0.12, - for node in nodes: # type: ignore
- if isinstance(node, NodeWithScore):
- node_data = {
- "id": node.node_id,
- "score": node.score,
- "text": node.text,
- "metadata": node.metadata if hasattr(node, "metadata") else {},
- }
- result["nodes"].append(node_data)
+ from typing import Sequence, cast
+ retrieved = cast(Sequence[NodeWithScore], nodes)
+ for nw in retrieved:
+ if isinstance(nw, NodeWithScore):
+ base = nw.node
+ # Prefer get_content() to work for non-TextNode nodes too
+ text = getattr(base, "text", None)
+ if text is None and hasattr(base, "get_content"):
+ text = base.get_content() or ""
+ node_data = {
+ "id": getattr(base, "node_id", getattr(base, "id_", "")),
+ "score": nw.score,
+ "text": text,
+ "metadata": getattr(base, "metadata", {}) or {},
+ }
+ result["nodes"].append(node_data)
else:
logging.debug(
f"Skipping node of type {type(node).__name__}, expected NodeWithScore"
)
🤖 Prompt for AI Agents |
||
|
|
||
| if args.json: | ||
| print(json.dumps(result, indent=2)) | ||
|
|
@@ -134,7 +154,7 @@ def _llama_stack_query(args: argparse.Namespace) -> None: | |
| yaml.safe_dump(cfg, open(cfg_file, "w", encoding="utf-8")) | ||
|
|
||
| stack_lib = importlib.import_module("llama_stack") | ||
| client = stack_lib.distribution.library_client.LlamaStackAsLibraryClient(cfg_file) | ||
| client = stack_lib.core.library_client.LlamaStackAsLibraryClient(cfg_file) | ||
| client.initialize() | ||
|
|
||
| # No need to register the DB as it's defined in llama-stack.yaml | ||
|
|
||
Uh oh!
There was an error while loading. Please reload this page.