1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
|
# frozen_string_literal: true
require 'ollama-ai'
require_relative 'base'
require_relative '../../logic/providers/ollama/tokens'
require_relative '../../logic/helpers/hash'
require_relative '../../logic/cartridge/default'
module NanoBot
module Components
module Providers
class Ollama < Base
attr_reader :settings
CHAT_SETTINGS = %i[
model template stream
].freeze
CHAT_OPTIONS = %i[
mirostat mirostat_eta mirostat_tau num_ctx num_gqa num_gpu num_thread repeat_last_n
repeat_penalty temperature seed stop tfs_z num_predict top_k top_p
].freeze
def initialize(options, settings, credentials, _environment)
@settings = settings
ollama_options = if options
options.transform_keys { |key| key.to_s.gsub('-', '_').to_sym }
else
{}
end
unless @settings.key?(:stream)
@settings = Marshal.load(Marshal.dump(@settings))
@settings[:stream] = Logic::Helpers::Hash.fetch(
Logic::Cartridge::Default.instance.values, %i[provider settings stream]
)
end
ollama_options[:server_sent_events] = @settings[:stream]
credentials ||= {}
@client = ::Ollama.new(
credentials: credentials.transform_keys { |key| key.to_s.gsub('-', '_').to_sym },
options: ollama_options
)
end
def evaluate(input, streaming, cartridge, &feedback)
messages = input[:history].map do |event|
{ role: event[:who] == 'user' ? 'user' : 'assistant',
content: event[:message],
_meta: { at: event[:at] } }
end
%i[backdrop directive].each do |key|
next unless input[:behavior][key]
messages.prepend(
{ role: key == :directive ? 'system' : 'user',
content: input[:behavior][key],
_meta: { at: Time.now } }
)
end
payload = { messages: }
CHAT_SETTINGS.each do |key|
payload[key] = @settings[key] unless payload.key?(key) || !@settings.key?(key)
end
if @settings.key?(:options)
options = {}
CHAT_OPTIONS.each do |key|
options[key] = @settings[:options][key] unless options.key?(key) || !@settings[:options].key?(key)
end
payload[:options] = options unless options.empty?
end
raise 'Ollama does not support tools.' if input[:tools]
if streaming
content = ''
stream_call_back = proc do |event, _raw|
partial_content = event.dig('message', 'content')
if partial_content
content += partial_content
feedback.call(
{ should_be_stored: false,
interaction: { who: 'AI', message: partial_content } }
)
end
if event['done']
feedback.call(
{ should_be_stored: !(content.nil? || content == ''),
interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
finished: true }
)
end
end
@client.chat(
Logic::Ollama::Tokens.apply_policies!(cartridge, payload),
server_sent_events: true, &stream_call_back
)
else
result = @client.chat(
Logic::Ollama::Tokens.apply_policies!(cartridge, payload),
server_sent_events: false
)
content = result.map { |event| event.dig('message', 'content') }.join
feedback.call(
{ should_be_stored: !(content.nil? || content.to_s.strip == ''),
interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
finished: true }
)
end
end
end
end
end
end
|