1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
|
# frozen_string_literal: true
require 'maritaca-ai'
require_relative 'base'
require_relative '../../logic/providers/maritaca/tokens'
require_relative '../../logic/helpers/hash'
require_relative '../../logic/cartridge/default'
module NanoBot
module Components
module Providers
class Maritaca < Base
attr_reader :settings
CHAT_SETTINGS = %i[
max_tokens model do_sample temperature top_p repetition_penalty stopping_tokens
].freeze
def initialize(options, settings, credentials, _environment)
@settings = settings
maritaca_options = if options
options.transform_keys { |key| key.to_s.gsub('-', '_').to_sym }
else
{}
end
unless maritaca_options.key?(:stream)
maritaca_options[:stream] = Logic::Helpers::Hash.fetch(
Logic::Cartridge::Default.instance.values, %i[provider options stream]
)
end
maritaca_options[:server_sent_events] = maritaca_options.delete(:stream)
@client = ::Maritaca.new(
credentials: credentials.transform_keys { |key| key.to_s.gsub('-', '_').to_sym },
options: maritaca_options
)
end
def evaluate(input, streaming, cartridge, &feedback)
messages = input[:history].map do |event|
{ role: event[:who] == 'user' ? 'user' : 'assistant',
content: event[:message],
_meta: { at: event[:at] } }
end
# TODO: Does Maritaca have system messages?
%i[backdrop directive].each do |key|
next unless input[:behavior][key]
messages.prepend(
{ role: 'user',
content: input[:behavior][key],
_meta: { at: Time.now } }
)
end
payload = { chat_mode: true, messages: }
CHAT_SETTINGS.each do |key|
payload[key] = @settings[key] unless payload.key?(key) || !@settings.key?(key)
end
raise 'Maritaca does not support tools.' if input[:tools]
if streaming
content = ''
stream_call_back = proc do |event, _raw|
partial_content = event['answer']
if partial_content
content += partial_content
feedback.call(
{ should_be_stored: false,
interaction: { who: 'AI', message: partial_content } }
)
end
end
@client.chat_inference(
Logic::Maritaca::Tokens.apply_policies!(cartridge, payload),
server_sent_events: true, &stream_call_back
)
feedback.call(
{ should_be_stored: !(content.nil? || content == ''),
interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
finished: true }
)
else
result = @client.chat_inference(
Logic::Maritaca::Tokens.apply_policies!(cartridge, payload),
server_sent_events: false
)
content = result['answer']
feedback.call(
{ should_be_stored: !(content.nil? || content.to_s.strip == ''),
interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
finished: true }
)
end
end
end
end
end
end
|