Skip to content

Commit d1698bf

Browse files
committed
13: Refactor completion parameters
1 parent 160d9ab commit d1698bf

File tree

7 files changed

+76
-44
lines changed

7 files changed

+76
-44
lines changed

lib/ruby_llm/chat.rb

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
# frozen_string_literal: true
22

3+
require_relative 'completion_params'
4+
35
module RubyLLM
46
# Represents a conversation with an AI model. Handles message history,
57
# streaming responses, and tool integration with a simple, conversational API.
@@ -100,15 +102,15 @@ def cache_prompts(system: false, user: false, tools: false)
100102

101103
def complete(&)
102104
@on[:new_message]&.call
103-
response = @provider.complete(
104-
messages,
105+
params = CompletionParams.new(
106+
messages: messages,
105107
tools: @tools,
106108
temperature: @temperature,
107109
model: @model.id,
108110
cache_prompts: @cache_prompts.dup,
109-
connection: @connection,
110-
&
111+
connection: @connection
111112
)
113+
response = @provider.complete(params, &)
112114
@on[:end_message]&.call(response)
113115

114116
add_message response

lib/ruby_llm/completion_params.rb

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
# frozen_string_literal: true
2+
3+
module RubyLLM
4+
# Parameter object for LLM completion requests.
5+
# Encapsulates all the parameters needed for chat completion to avoid
6+
# long parameter lists and provide better maintainability.
7+
class CompletionParams
8+
attr_reader :messages, :tools, :temperature, :model, :connection, :cache_prompts, :stream
9+
10+
def initialize(options = {})
11+
@messages = options[:messages]
12+
@tools = options[:tools]
13+
@temperature = options[:temperature]
14+
@model = options[:model]
15+
@connection = options[:connection]
16+
@cache_prompts = options[:cache_prompts] || { system: false, user: false, tools: false }
17+
@stream = options[:stream] || false
18+
end
19+
20+
def streaming?
21+
@stream
22+
end
23+
end
24+
end

lib/ruby_llm/provider.rb

Lines changed: 17 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
# frozen_string_literal: true
22

3+
require_relative 'completion_params'
4+
35
module RubyLLM
46
# Base interface for LLM providers like OpenAI and Anthropic.
57
# Handles the complexities of API communication, streaming responses,
@@ -10,21 +12,25 @@ module Provider
1012
module Methods
1113
extend Streaming
1214

13-
def complete(messages, tools:, temperature:, model:, connection:,
14-
cache_prompts: { system: false, user: false, tools: false }, &)
15-
normalized_temperature = maybe_normalize_temperature(temperature, model)
15+
def complete(params, &)
16+
normalized_temperature = maybe_normalize_temperature(params.temperature, params.model)
17+
18+
completion_params = CompletionParams.new(
19+
messages: params.messages,
20+
tools: params.tools,
21+
temperature: normalized_temperature,
22+
model: params.model,
23+
connection: params.connection,
24+
cache_prompts: params.cache_prompts,
25+
stream: block_given?
26+
)
1627

17-
payload = render_payload(messages,
18-
tools: tools,
19-
temperature: normalized_temperature,
20-
model: model,
21-
cache_prompts: cache_prompts,
22-
stream: block_given?)
28+
payload = render_payload(completion_params)
2329

2430
if block_given?
25-
stream_response connection, payload, &
31+
stream_response(completion_params.connection, payload, &)
2632
else
27-
sync_response connection, payload
33+
sync_response completion_params.connection, payload
2834
end
2935
end
3036

lib/ruby_llm/providers/anthropic/chat.rb

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -11,14 +11,14 @@ def completion_url
1111
'/v1/messages'
1212
end
1313

14-
def render_payload(messages, tools:, temperature:, model:, stream: false,
15-
cache_prompts: { system: false, user: false, tools: false })
16-
system_messages, chat_messages = separate_messages(messages)
17-
system_content = build_system_content(system_messages, cache: cache_prompts[:system])
18-
19-
build_base_payload(chat_messages, temperature, model, stream, cache: cache_prompts[:user]).tap do |payload|
20-
add_optional_fields(payload, system_content: system_content, tools: tools,
21-
cache_tools: cache_prompts[:tools])
14+
def render_payload(params)
15+
system_messages, chat_messages = separate_messages(params.messages)
16+
system_content = build_system_content(system_messages, cache: params.cache_prompts[:system])
17+
18+
build_base_payload(chat_messages, params.temperature, params.model, params.stream,
19+
cache: params.cache_prompts[:user]).tap do |payload|
20+
add_optional_fields(payload, system_content: system_content, tools: params.tools,
21+
cache_tools: params.cache_prompts[:tools])
2222
end
2323
end
2424

lib/ruby_llm/providers/bedrock/chat.rb

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -39,20 +39,20 @@ def completion_url
3939
"model/#{@model_id}/invoke"
4040
end
4141

42-
def render_payload(messages, tools:, temperature:, model:, stream: false, # rubocop:disable Lint/UnusedMethodArgument
43-
cache_prompts: { system: false, user: false, tools: false })
42+
def render_payload(params)
4443
# Hold model_id in instance variable for use in completion_url and stream_url
45-
@model_id = model
44+
@model_id = params.model
4645

47-
system_messages, chat_messages = Anthropic::Chat.separate_messages(messages)
48-
system_content = Anthropic::Chat.build_system_content(system_messages, cache: cache_prompts[:system])
46+
system_messages, chat_messages = Anthropic::Chat.separate_messages(params.messages)
47+
system_content = Anthropic::Chat.build_system_content(system_messages, cache: params.cache_prompts[:system])
4948

50-
build_base_payload(chat_messages, temperature, model, cache: cache_prompts[:user]).tap do |payload|
49+
build_base_payload(chat_messages, params.temperature, params.model,
50+
cache: params.cache_prompts[:user]).tap do |payload|
5151
Anthropic::Chat.add_optional_fields(
5252
payload,
5353
system_content: system_content,
54-
tools: tools,
55-
cache_tools: cache_prompts[:tools]
54+
tools: params.tools,
55+
cache_tools: params.cache_prompts[:tools]
5656
)
5757
end
5858
end

lib/ruby_llm/providers/gemini/chat.rb

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -11,15 +11,15 @@ def completion_url
1111
"models/#{@model}:generateContent"
1212
end
1313

14-
def render_payload(messages, tools:, temperature:, model:, stream: false, cache_prompts: {}) # rubocop:disable Lint/UnusedMethodArgument
15-
@model = model # Store model for completion_url/stream_url
14+
def render_payload(params)
15+
@model = params.model # Store model for completion_url/stream_url
1616
payload = {
17-
contents: format_messages(messages),
17+
contents: format_messages(params.messages),
1818
generationConfig: {
19-
temperature: temperature
19+
temperature: params.temperature
2020
}
2121
}
22-
payload[:tools] = format_tools(tools) if tools.any?
22+
payload[:tools] = format_tools(params.tools) if params.tools.any?
2323
payload
2424
end
2525

lib/ruby_llm/providers/openai/chat.rb

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -11,18 +11,18 @@ def completion_url
1111

1212
module_function
1313

14-
def render_payload(messages, tools:, temperature:, model:, stream: false, cache_prompts: {}) # rubocop:disable Lint/UnusedMethodArgument
14+
def render_payload(params)
1515
{
16-
model: model,
17-
messages: format_messages(messages),
18-
temperature: temperature,
19-
stream: stream
16+
model: params.model,
17+
messages: format_messages(params.messages),
18+
temperature: params.temperature,
19+
stream: params.stream
2020
}.tap do |payload|
21-
if tools.any?
22-
payload[:tools] = tools.map { |_, tool| tool_for(tool) }
21+
if params.tools.any?
22+
payload[:tools] = params.tools.map { |_, tool| tool_for(tool) }
2323
payload[:tool_choice] = 'auto'
2424
end
25-
payload[:stream_options] = { include_usage: true } if stream
25+
payload[:stream_options] = { include_usage: true } if params.stream
2626
end
2727
end
2828

0 commit comments

Comments
 (0)