Skip to content

Commit f809fb2

Browse files
committed
Using ruby_llm with little patch json_object. crmne/ruby_llm#11
1 parent bec0e98 commit f809fb2

File tree

5 files changed

+61
-22
lines changed

5 files changed

+61
-22
lines changed

Gemfile

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,11 +41,12 @@ gem "canonical-rails"
4141

4242
gem "sitemap_generator"
4343

44-
# bundle config local.wechat /Users/guochunzhong/git/oss/wechat/
44+
# bundle config local.wechat /Users/$(whoami)/git/oss/wechat/
4545
gem "wechat", git: "https://git.thape.com.cn/Eric-Guo/wechat.git", branch: :main
4646
gem "rake"
4747

48-
gem "ruby-openai"
48+
# bundle config local.ruby_llm /Users/$(whoami)/git/oss/ruby_llm/
49+
gem "ruby_llm", git: "https://git.thape.com.cn/rails/ruby_llm.git", branch: :main
4950
gem "uri"
5051
gem "faraday-net_http"
5152
gem "net-http", "~> 0.4.1"

Gemfile.lock

Lines changed: 17 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,20 @@ GIT
1212
thor
1313
zeitwerk (~> 2.4)
1414

15+
GIT
16+
remote: https://git.thape.com.cn/rails/ruby_llm.git
17+
revision: 32e350578c9e05699fdde67e11e28b01645981ba
18+
branch: main
19+
specs:
20+
ruby_llm (1.1.2)
21+
base64
22+
event_stream_parser (~> 1)
23+
faraday (~> 2)
24+
faraday-multipart (~> 1)
25+
faraday-net_http (~> 3)
26+
faraday-retry (~> 2)
27+
zeitwerk (~> 2)
28+
1529
GEM
1630
remote: https://rubygems.org/
1731
specs:
@@ -176,6 +190,8 @@ GEM
176190
multipart-post (~> 2.0)
177191
faraday-net_http (3.3.0)
178192
net-http
193+
faraday-retry (2.3.1)
194+
faraday (~> 2.0)
179195
ffi (1.16.3)
180196
ffi-compiler (1.3.2)
181197
ffi (>= 1.15.5)
@@ -388,10 +404,6 @@ GEM
388404
sorbet-runtime (>= 0.5.10782)
389405
ruby-lsp-rails (0.4.0)
390406
ruby-lsp (>= 0.23.0, < 0.24.0)
391-
ruby-openai (8.1.0)
392-
event_stream_parser (>= 0.3.0, < 2.0.0)
393-
faraday (>= 1)
394-
faraday-multipart (>= 1)
395407
ruby-progressbar (1.13.0)
396408
ruby-vips (2.2.3)
397409
ffi (~> 1.12)
@@ -509,7 +521,7 @@ DEPENDENCIES
509521
rexml
510522
ruby-lsp
511523
ruby-lsp-rails
512-
ruby-openai
524+
ruby_llm!
513525
securerandom (~> 0.3.2)
514526
selenium-webdriver (= 4.26.0)
515527
shakapacker (~> 8.2)

app/models/guest_message.rb

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -59,15 +59,9 @@ def ai_spam_score
5959
}
6060
EOS_PROMPT
6161

62-
response = OpenAI::Client.new.chat(
63-
parameters: {
64-
model: "deepseek-chat",
65-
response_format: {type: "json_object"},
66-
messages: [{role: "system", content: system_prompt},
67-
{role: "user", content: "#{name} from company #{company} (#{contact_details}) leaving message:\n\n#{message}"}],
68-
temperature: 0.7
69-
}
70-
)
71-
response&.dig("choices", 0, "message", "content")
62+
chat = RubyLLM.chat
63+
chat.with_instructions system_prompt
64+
response = chat.ask "#{name} from company #{company} (#{contact_details}) leaving message:\n\n#{message}"
65+
response.content
7266
end
7367
end

config/initializers/openai.rb

Lines changed: 0 additions & 5 deletions
This file was deleted.

config/initializers/ruby_llm.rb

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
require "ruby_llm"
2+
3+
RubyLLM.configure do |config|
4+
# --- Provider API Keys ---
5+
# Provide keys ONLY for the providers you intend to use.
6+
# Using environment variables (ENV.fetch) is highly recommended.
7+
config.openai_api_key = ENV.fetch("OPENAI_API_KEY", nil)
8+
config.anthropic_api_key = ENV.fetch("ANTHROPIC_API_KEY", nil)
9+
config.gemini_api_key = ENV.fetch("GEMINI_API_KEY", nil)
10+
config.deepseek_api_key = ENV.fetch("DEEPSEEK_API_KEY", Rails.application.credentials.deepseek_access_token)
11+
12+
# --- AWS Bedrock Credentials ---
13+
# Uses standard AWS credential chain (environment, shared config, IAM role)
14+
# if these specific keys aren't set. Region is required if using Bedrock.
15+
config.bedrock_api_key = ENV.fetch("AWS_ACCESS_KEY_ID", nil)
16+
config.bedrock_secret_key = ENV.fetch("AWS_SECRET_ACCESS_KEY", nil)
17+
config.bedrock_region = ENV.fetch("AWS_REGION", nil) # e.g., 'us-west-2'
18+
config.bedrock_session_token = ENV.fetch("AWS_SESSION_TOKEN", nil) # For temporary credentials
19+
20+
# --- Custom OpenAI Endpoint --- New in v1.2.0
21+
# Use this for Azure OpenAI, proxies, or self-hosted models via OpenAI-compatible APIs.
22+
# See the "Working with Models" guide for details.
23+
config.openai_api_base = ENV.fetch("OPENAI_API_BASE", nil) # e.g., "https://your-azure.openai.azure.com"
24+
25+
# --- Default Models ---
26+
# Used by RubyLLM.chat, RubyLLM.embed, RubyLLM.paint if no model is specified.
27+
config.default_model = "deepseek-chat" # Default: 'gpt-4.1-nano'
28+
config.default_embedding_model = "text-embedding-3-small" # Default: 'text-embedding-3-small'
29+
config.default_image_model = "dall-e-3" # Default: 'dall-e-3'
30+
31+
# --- Connection Settings ---
32+
config.request_timeout = 60 # Request timeout in seconds (default: 120)
33+
config.max_retries = 2 # Max retries on transient network errors (default: 3)
34+
config.retry_interval = 0.1 # Initial delay in seconds (default: 0.1)
35+
config.retry_backoff_factor = 2 # Multiplier for subsequent retries (default: 2)
36+
config.retry_interval_randomness = 0.5 # Jitter factor (default: 0.5)
37+
end

0 commit comments

Comments
 (0)