Files
assistant-storefront/spec/lib/integrations/openai/processor_service_spec.rb
Liang XJ 092fb2e083
Some checks failed
Lock Threads / action (push) Has been cancelled
Mark stale issues and pull requests / stale (push) Has been cancelled
Publish Chatwoot EE docker images / build (linux/amd64, ubuntu-latest) (push) Has been cancelled
Publish Chatwoot EE docker images / build (linux/arm64, ubuntu-22.04-arm) (push) Has been cancelled
Publish Chatwoot EE docker images / merge (push) Has been cancelled
Publish Chatwoot CE docker images / build (linux/amd64, ubuntu-latest) (push) Has been cancelled
Publish Chatwoot CE docker images / build (linux/arm64, ubuntu-22.04-arm) (push) Has been cancelled
Publish Chatwoot CE docker images / merge (push) Has been cancelled
Run Chatwoot CE spec / lint-backend (push) Has been cancelled
Run Chatwoot CE spec / lint-frontend (push) Has been cancelled
Run Chatwoot CE spec / frontend-tests (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (0, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (1, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (10, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (11, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (12, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (13, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (14, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (15, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (2, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (3, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (4, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (5, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (6, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (7, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (8, 16) (push) Has been cancelled
Run Chatwoot CE spec / backend-tests (9, 16) (push) Has been cancelled
Run Linux nightly installer / nightly (push) Has been cancelled
Initial commit: Add logistics and order_detail message types
- Add Logistics component with progress tracking
- Add OrderDetail component for order information
- Support data-driven steps and actions
- Add blue color scale to widget SCSS
- Fix node overflow and progress bar rendering issues
- Add English translations for dashboard components

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
2026-01-26 11:16:56 +08:00

202 lines
7.0 KiB
Ruby

require 'rails_helper'
RSpec.describe Integrations::Openai::ProcessorService do
subject(:service) { described_class.new(hook: hook, event: event) }
let(:account) { create(:account) }
let(:hook) { create(:integrations_hook, :openai, account: account) }
# Mock RubyLLM objects
let(:mock_chat) { instance_double(RubyLLM::Chat) }
let(:mock_context) { instance_double(RubyLLM::Context) }
let(:mock_config) { OpenStruct.new }
let(:mock_response) do
instance_double(
RubyLLM::Message,
content: 'This is a reply from openai.',
input_tokens: nil,
output_tokens: nil
)
end
let(:mock_response_with_usage) do
instance_double(
RubyLLM::Message,
content: 'This is a reply from openai.',
input_tokens: 50,
output_tokens: 20
)
end
before do
allow(RubyLLM).to receive(:context).and_yield(mock_config).and_return(mock_context)
allow(mock_context).to receive(:chat).and_return(mock_chat)
allow(mock_chat).to receive(:with_instructions).and_return(mock_chat)
allow(mock_chat).to receive(:add_message).and_return(mock_chat)
allow(mock_chat).to receive(:ask).and_return(mock_response)
end
describe '#perform' do
describe 'text transformation operations' do
shared_examples 'text transformation operation' do |event_name|
let(:event) { { 'name' => event_name, 'data' => { 'content' => 'This is a test' } } }
it 'returns the transformed text' do
result = service.perform
expect(result[:message]).to eq('This is a reply from openai.')
end
it 'sends the user content to the LLM' do
service.perform
expect(mock_chat).to have_received(:ask).with('This is a test')
end
it 'sets system instructions' do
service.perform
expect(mock_chat).to have_received(:with_instructions).with(a_string_including('You are a helpful support agent'))
end
end
it_behaves_like 'text transformation operation', 'rephrase'
it_behaves_like 'text transformation operation', 'fix_spelling_grammar'
it_behaves_like 'text transformation operation', 'shorten'
it_behaves_like 'text transformation operation', 'expand'
it_behaves_like 'text transformation operation', 'make_friendly'
it_behaves_like 'text transformation operation', 'make_formal'
it_behaves_like 'text transformation operation', 'simplify'
end
describe 'conversation-based operations' do
let!(:conversation) { create(:conversation, account: account) }
before do
create(:message, account: account, conversation: conversation, message_type: :incoming, content: 'hello agent')
create(:message, account: account, conversation: conversation, message_type: :outgoing, content: 'hello customer')
end
context 'with reply_suggestion event' do
let(:event) { { 'name' => 'reply_suggestion', 'data' => { 'conversation_display_id' => conversation.display_id } } }
it 'returns the suggested reply' do
result = service.perform
expect(result[:message]).to eq('This is a reply from openai.')
end
it 'adds conversation history before asking' do
service.perform
# Should add the first message as history, then ask with the last message
expect(mock_chat).to have_received(:add_message).with(role: :user, content: 'hello agent')
expect(mock_chat).to have_received(:ask).with('hello customer')
end
end
context 'with summarize event' do
let(:event) { { 'name' => 'summarize', 'data' => { 'conversation_display_id' => conversation.display_id } } }
it 'returns the summary' do
result = service.perform
expect(result[:message]).to eq('This is a reply from openai.')
end
it 'sends formatted conversation as a single message' do
service.perform
# Summarize sends conversation as a formatted string in one user message
expect(mock_chat).to have_received(:ask).with(a_string_matching(/Customer.*hello agent.*Agent.*hello customer/m))
end
end
context 'with label_suggestion event and no labels' do
let(:event) { { 'name' => 'label_suggestion', 'data' => { 'conversation_display_id' => conversation.display_id } } }
it 'returns nil' do
expect(service.perform).to be_nil
end
end
end
describe 'edge cases' do
context 'with unknown event name' do
let(:event) { { 'name' => 'unknown', 'data' => {} } }
it 'returns nil' do
expect(service.perform).to be_nil
end
end
end
describe 'response structure' do
let(:event) { { 'name' => 'rephrase', 'data' => { 'content' => 'test message' } } }
context 'when response includes usage data' do
before do
allow(mock_chat).to receive(:ask).and_return(mock_response_with_usage)
end
it 'returns message with usage data' do
result = service.perform
expect(result[:message]).to eq('This is a reply from openai.')
expect(result[:usage]['prompt_tokens']).to eq(50)
expect(result[:usage]['completion_tokens']).to eq(20)
expect(result[:usage]['total_tokens']).to eq(70)
end
it 'includes request_messages in response' do
result = service.perform
expect(result[:request_messages]).to be_an(Array)
expect(result[:request_messages].length).to eq(2)
end
end
context 'when response does not include usage data' do
it 'returns message with zero total tokens' do
result = service.perform
expect(result[:message]).to eq('This is a reply from openai.')
expect(result[:usage]['total_tokens']).to eq(0)
end
it 'includes request_messages in response' do
result = service.perform
expect(result[:request_messages]).to be_an(Array)
end
end
end
describe 'endpoint configuration' do
let(:event) { { 'name' => 'rephrase', 'data' => { 'content' => 'test message' } } }
context 'without CAPTAIN_OPEN_AI_ENDPOINT configured' do
before { InstallationConfig.find_by(name: 'CAPTAIN_OPEN_AI_ENDPOINT')&.destroy }
it 'uses default OpenAI endpoint' do
expect(Llm::Config).to receive(:with_api_key).with(
hook.settings['api_key'],
api_base: 'https://api.openai.com/v1'
).and_call_original
service.perform
end
end
context 'with CAPTAIN_OPEN_AI_ENDPOINT configured' do
before do
InstallationConfig.find_by(name: 'CAPTAIN_OPEN_AI_ENDPOINT')&.destroy
create(:installation_config, name: 'CAPTAIN_OPEN_AI_ENDPOINT', value: 'https://custom.azure.com/')
end
it 'uses custom endpoint' do
expect(Llm::Config).to receive(:with_api_key).with(
hook.settings['api_key'],
api_base: 'https://custom.azure.com/v1'
).and_call_original
service.perform
end
end
end
end
end