summaryrefslogtreecommitdiff
path: root/components/providers
diff options
context:
space:
mode:
authoricebaker <113217272+icebaker@users.noreply.github.com>2023-12-15 08:35:09 -0300
committerGitHub <noreply@github.com>2023-12-15 08:35:09 -0300
commitf3200fe0448044ebf43fb52f40a47bc648082c56 (patch)
tree6a5db83f9210a15b4e57516791d23f353b27f7a3 /components/providers
parentfb96658a1ca4b6e3e0505e7a39f660e1a05b3c6e (diff)
parent639fcc2da50333da00fe50a0970fe28b4e5e9908 (diff)
Merge pull request #9 from icebaker/ib-gemini
Adding support for Google Gemini
Diffstat (limited to 'components/providers')
-rw-r--r--components/providers/google.rb202
-rw-r--r--components/providers/openai.rb37
-rw-r--r--components/providers/openai/tools.rb101
-rw-r--r--components/providers/tools.rb99
4 files changed, 325 insertions, 114 deletions
diff --git a/components/providers/google.rb b/components/providers/google.rb
new file mode 100644
index 0000000..f847677
--- /dev/null
+++ b/components/providers/google.rb
@@ -0,0 +1,202 @@
+# frozen_string_literal: true
+
+require 'gemini-ai'
+
+require_relative 'base'
+
+require_relative '../../logic/providers/google/tools'
+require_relative '../../logic/providers/google/tokens'
+
+require_relative 'tools'
+
+module NanoBot
+ module Components
+ module Providers
+ class Google < Base
+ SETTINGS = {
+ generationConfig: %i[
+ temperature topP topK candidateCount maxOutputTokens stopSequences
+ ].freeze
+ }.freeze
+
+ SAFETY_SETTINGS = %i[category threshold].freeze
+
+ attr_reader :settings
+
+ def initialize(options, settings, credentials, _environment)
+ @settings = settings
+
+ @client = Gemini.new(
+ credentials: {
+ file_path: credentials[:'file-path'],
+ project_id: credentials[:'project-id'],
+ region: credentials[:region]
+ },
+ settings: { model: options[:model], stream: options[:stream] }
+ )
+ end
+
+ def evaluate(input, streaming, cartridge, &feedback)
+ messages = input[:history].map do |event|
+ if event[:message].nil? && event[:meta] && event[:meta][:tool_calls]
+ { role: 'model',
+ parts: event[:meta][:tool_calls],
+ _meta: { at: event[:at] } }
+ elsif event[:who] == 'tool'
+ { role: 'function',
+ parts: [
+ { functionResponse: {
+ name: event[:meta][:name],
+ response: { name: event[:meta][:name], content: event[:message].to_s }
+ } }
+ ],
+ _meta: { at: event[:at] } }
+ else
+ { role: event[:who] == 'user' ? 'user' : 'model',
+ parts: { text: event[:message] },
+ _meta: { at: event[:at] } }
+ end
+ end
+
+ %i[backdrop directive].each do |key|
+ next unless input[:behavior][key]
+
+ # TODO: Does Gemini have system messages?
+ messages.prepend(
+ { role: key == :directive ? 'user' : 'user',
+ parts: { text: input[:behavior][key] },
+ _meta: { at: Time.now } }
+ )
+ end
+
+ payload = { contents: messages, generationConfig: { candidateCount: 1 } }
+
+ if @settings
+ SETTINGS.each_key do |key|
+ SETTINGS[key].each do |sub_key|
+ if @settings.key?(key) && @settings[key].key?(sub_key)
+ payload[key] = {} unless payload.key?(key)
+ payload[key][sub_key] = @settings[key][sub_key]
+ end
+ end
+ end
+
+ if @settings[:safetySettings].is_a?(Array)
+ payload[:safetySettings] = [] unless payload.key?(:safetySettings)
+
+ @settings[:safetySettings].each do |safety_setting|
+ setting = {}
+ SAFETY_SETTINGS.each { |key| setting[key] = safety_setting[key] }
+ payload[:safetySettings] << setting
+ end
+ end
+ end
+
+ if input[:tools]
+ payload[:tools] = {
+ function_declarations: input[:tools].map { |raw| Logic::Google::Tools.adapt(raw) }
+ }
+ end
+
+ if streaming
+ content = ''
+ tools = []
+
+ stream_call_back = proc do |event, _parsed, _raw|
+ partial_content = event.dig('candidates', 0, 'content', 'parts').filter do |part|
+ part.key?('text')
+ end.map { |part| part['text'] }.join
+
+ partial_tools = event.dig('candidates', 0, 'content', 'parts').filter do |part|
+ part.key?('functionCall')
+ end
+
+ tools.concat(partial_tools) if partial_tools.size.positive?
+
+ if partial_content
+ content += partial_content
+ feedback.call(
+ { should_be_stored: false,
+ interaction: { who: 'AI', message: partial_content } }
+ )
+ end
+
+ if event.dig('candidates', 0, 'finishReason')
+ if tools&.size&.positive?
+ feedback.call(
+ { should_be_stored: true,
+ needs_another_round: true,
+ interaction: { who: 'AI', message: nil, meta: { tool_calls: tools } } }
+ )
+ Tools.apply(
+ cartridge, input[:tools], tools, feedback, Logic::Google::Tools
+ ).each do |interaction|
+ feedback.call({ should_be_stored: true, needs_another_round: true, interaction: })
+ end
+ end
+
+ feedback.call(
+ { should_be_stored: !(content.nil? || content == ''),
+ interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
+ finished: true }
+ )
+ end
+ end
+
+ begin
+ @client.stream_generate_content(
+ Logic::Google::Tokens.apply_policies!(cartridge, payload),
+ stream: true, &stream_call_back
+ )
+ rescue StandardError => e
+ raise e.class, e.response[:body] if e.response && e.response[:body]
+
+ raise e
+ end
+ else
+ begin
+ result = @client.stream_generate_content(
+ Logic::Google::Tokens.apply_policies!(cartridge, payload),
+ stream: false
+ )
+ rescue StandardError => e
+ raise e.class, e.response[:body] if e.response && e.response[:body]
+
+ raise e
+ end
+
+ tools = result.dig(0, 'candidates', 0, 'content', 'parts').filter do |part|
+ part.key?('functionCall')
+ end
+
+ if tools&.size&.positive?
+ feedback.call(
+ { should_be_stored: true,
+ needs_another_round: true,
+ interaction: { who: 'AI', message: nil, meta: { tool_calls: tools } } }
+ )
+
+ Tools.apply(
+ cartridge, input[:tools], tools, feedback, Logic::Google::Tools
+ ).each do |interaction|
+ feedback.call({ should_be_stored: true, needs_another_round: true, interaction: })
+ end
+ end
+
+ content = result.map do |answer|
+ answer.dig('candidates', 0, 'content', 'parts').filter do |part|
+ part.key?('text')
+ end.map { |part| part['text'] }.join
+ end.join
+
+ feedback.call(
+ { should_be_stored: !(content.nil? || content.to_s.strip == ''),
+ interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
+ finished: true }
+ )
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/components/providers/openai.rb b/components/providers/openai.rb
index 6384181..f6eafd4 100644
--- a/components/providers/openai.rb
+++ b/components/providers/openai.rb
@@ -6,9 +6,9 @@ require_relative 'base'
require_relative '../crypto'
require_relative '../../logic/providers/openai/tools'
-require_relative '../../controllers/interfaces/tools'
+require_relative '../../logic/providers/openai/tokens'
-require_relative 'openai/tools'
+require_relative 'tools'
module NanoBot
module Components
@@ -18,7 +18,7 @@ module NanoBot
CHAT_SETTINGS = %i[
model stream temperature top_p n stop max_tokens
- presence_penalty frequency_penalty logit_bias
+ presence_penalty frequency_penalty logit_bias seed response_format
].freeze
attr_reader :settings
@@ -40,12 +40,18 @@ module NanoBot
def evaluate(input, streaming, cartridge, &feedback)
messages = input[:history].map do |event|
if event[:message].nil? && event[:meta] && event[:meta][:tool_calls]
- { role: 'assistant', content: nil, tool_calls: event[:meta][:tool_calls] }
+ { role: 'assistant', content: nil,
+ tool_calls: event[:meta][:tool_calls],
+ _meta: { at: event[:at] } }
elsif event[:who] == 'tool'
{ role: event[:who], content: event[:message].to_s,
- tool_call_id: event[:meta][:id], name: event[:meta][:name] }
+ tool_call_id: event[:meta][:id],
+ name: event[:meta][:name],
+ _meta: { at: event[:at] } }
else
- { role: event[:who] == 'user' ? 'user' : 'assistant', content: event[:message] }
+ { role: event[:who] == 'user' ? 'user' : 'assistant',
+ content: event[:message],
+ _meta: { at: event[:at] } }
end
end
@@ -54,7 +60,8 @@ module NanoBot
messages.prepend(
{ role: key == :directive ? 'system' : 'user',
- content: input[:behavior][key] }
+ content: input[:behavior][key],
+ _meta: { at: Time.now } }
)
end
@@ -66,7 +73,7 @@ module NanoBot
payload.delete(:logit_bias) if payload.key?(:logit_bias) && payload[:logit_bias].nil?
- payload[:tools] = input[:tools].map { |raw| NanoBot::Logic::OpenAI::Tools.adapt(raw) } if input[:tools]
+ payload[:tools] = input[:tools].map { |raw| Logic::OpenAI::Tools.adapt(raw) } if input[:tools]
if streaming
content = ''
@@ -114,13 +121,15 @@ module NanoBot
needs_another_round: true,
interaction: { who: 'AI', message: nil, meta: { tool_calls: tools } } }
)
- Tools.apply(cartridge, input[:tools], tools, feedback).each do |interaction|
+ Tools.apply(
+ cartridge, input[:tools], tools, feedback, Logic::OpenAI::Tools
+ ).each do |interaction|
feedback.call({ should_be_stored: true, needs_another_round: true, interaction: })
end
end
feedback.call(
- { should_be_stored: !(content.nil? || content == ''),
+ { should_be_stored: !(content.nil? || content.to_s.strip == ''),
interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
finished: true }
)
@@ -128,7 +137,7 @@ module NanoBot
end
begin
- @client.chat(parameters: payload)
+ @client.chat(parameters: Logic::OpenAI::Tokens.apply_policies!(cartridge, payload))
rescue StandardError => e
raise e.class, e.response[:body] if e.response && e.response[:body]
@@ -136,7 +145,7 @@ module NanoBot
end
else
begin
- result = @client.chat(parameters: payload)
+ result = @client.chat(parameters: Logic::OpenAI::Tokens.apply_policies!(cartridge, payload))
rescue StandardError => e
raise e.class, e.response[:body] if e.response && e.response[:body]
@@ -153,7 +162,9 @@ module NanoBot
needs_another_round: true,
interaction: { who: 'AI', message: nil, meta: { tool_calls: tools } } }
)
- Tools.apply(cartridge, input[:tools], tools, feedback).each do |interaction|
+ Tools.apply(
+ cartridge, input[:tools], tools, feedback, Logic::OpenAI::Tools
+ ).each do |interaction|
feedback.call({ should_be_stored: true, needs_another_round: true, interaction: })
end
end
diff --git a/components/providers/openai/tools.rb b/components/providers/openai/tools.rb
deleted file mode 100644
index cd35e80..0000000
--- a/components/providers/openai/tools.rb
+++ /dev/null
@@ -1,101 +0,0 @@
-# frozen_string_literal: true
-
-require_relative '../../embedding'
-require_relative '../../../logic/cartridge/safety'
-
-require 'concurrent'
-
-module NanoBot
- module Components
- module Providers
- class OpenAI < Base
- module Tools
- def self.confirming(tool, feedback)
- feedback.call(
- { should_be_stored: false,
- interaction: { who: 'AI', message: nil, meta: {
- tool: { action: 'confirming', id: tool[:id], name: tool[:name], parameters: tool[:parameters] }
- } } }
- )
- end
-
- def self.apply(cartridge, function_cartridge, tools, feedback)
- prepared_tools = NanoBot::Logic::OpenAI::Tools.prepare(function_cartridge, tools)
-
- if Logic::Cartridge::Safety.confirmable?(cartridge)
- prepared_tools.each { |tool| tool[:allowed] = confirming(tool, feedback) }
- else
- prepared_tools.each { |tool| tool[:allowed] = true }
- end
-
- futures = prepared_tools.map do |tool|
- Concurrent::Promises.future do
- if tool[:allowed]
- process!(tool, feedback, function_cartridge, cartridge)
- else
- tool[:output] =
- "We asked the user you're chatting with for permission, but the user did not allow you to run this tool or function."
- tool
- end
- end
- end
-
- results = Concurrent::Promises.zip(*futures).value!
-
- results.map do |applied_tool|
- {
- who: 'tool',
- message: applied_tool[:output],
- meta: { id: applied_tool[:id], name: applied_tool[:name] }
- }
- end
- end
-
- def self.process!(tool, feedback, _function_cartridge, cartridge)
- feedback.call(
- { should_be_stored: false,
- interaction: { who: 'AI', message: nil, meta: {
- tool: { action: 'executing', id: tool[:id], name: tool[:name], parameters: tool[:parameters] }
- } } }
- )
-
- call = {
- parameters: %w[parameters],
- values: [tool[:parameters]],
- safety: { sandboxed: Logic::Cartridge::Safety.sandboxed?(cartridge) }
- }
-
- if %i[fennel lua clojure].count { |key| !tool[:source][key].nil? } > 1
- raise StandardError, 'conflicting tools'
- end
-
- if !tool[:source][:fennel].nil?
- call[:source] = tool[:source][:fennel]
- tool[:output] = Components::Embedding.fennel(**call)
- elsif !tool[:source][:clojure].nil?
- call[:source] = tool[:source][:clojure]
- tool[:output] = Components::Embedding.clojure(**call)
- elsif !tool[:source][:lua].nil?
- call[:source] = tool[:source][:lua]
- tool[:output] = Components::Embedding.lua(**call)
- else
- raise 'missing source code'
- end
-
- feedback.call(
- { should_be_stored: false,
- interaction: { who: 'AI', message: nil, meta: {
- tool: {
- action: 'responding', id: tool[:id], name: tool[:name],
- parameters: tool[:parameters], output: tool[:output]
- }
- } } }
- )
-
- tool
- end
- end
- end
- end
- end
-end
diff --git a/components/providers/tools.rb b/components/providers/tools.rb
new file mode 100644
index 0000000..122bc14
--- /dev/null
+++ b/components/providers/tools.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+require_relative '../embedding'
+require_relative '../../logic/cartridge/safety'
+
+require 'concurrent'
+
+module NanoBot
+ module Components
+ module Providers
+ module Tools
+ def self.confirming(tool, feedback)
+ feedback.call(
+ { should_be_stored: false,
+ interaction: { who: 'AI', message: nil, meta: {
+ tool: { action: 'confirming', id: tool[:id], name: tool[:label], parameters: tool[:parameters] }
+ } } }
+ )
+ end
+
+ def self.apply(cartridge, function_cartridge, tools, feedback, tools_logic)
+ prepared_tools = tools_logic.prepare(function_cartridge, tools)
+
+ if Logic::Cartridge::Safety.confirmable?(cartridge)
+ prepared_tools.each { |tool| tool[:allowed] = confirming(tool, feedback) }
+ else
+ prepared_tools.each { |tool| tool[:allowed] = true }
+ end
+
+ futures = prepared_tools.map do |tool|
+ Concurrent::Promises.future do
+ if tool[:allowed]
+ process!(tool, feedback, function_cartridge, cartridge)
+ else
+ tool[:output] =
+ "We asked the user you're chatting with for permission, but the user did not allow you to run this tool or function."
+ tool
+ end
+ end
+ end
+
+ results = Concurrent::Promises.zip(*futures).value!
+
+ results.map do |applied_tool|
+ {
+ who: 'tool',
+ message: applied_tool[:output],
+ meta: { id: applied_tool[:id], name: applied_tool[:name] }
+ }
+ end
+ end
+
+ def self.process!(tool, feedback, _function_cartridge, cartridge)
+ feedback.call(
+ { should_be_stored: false,
+ interaction: { who: 'AI', message: nil, meta: {
+ tool: { action: 'executing', id: tool[:id], name: tool[:label], parameters: tool[:parameters] }
+ } } }
+ )
+
+ call = {
+ parameters: %w[parameters],
+ values: [tool[:parameters]],
+ safety: { sandboxed: Logic::Cartridge::Safety.sandboxed?(cartridge) }
+ }
+
+ if %i[fennel lua clojure].count { |key| !tool[:source][key].nil? } > 1
+ raise StandardError, 'conflicting tools'
+ end
+
+ if !tool[:source][:fennel].nil?
+ call[:source] = tool[:source][:fennel]
+ tool[:output] = Components::Embedding.fennel(**call)
+ elsif !tool[:source][:clojure].nil?
+ call[:source] = tool[:source][:clojure]
+ tool[:output] = Components::Embedding.clojure(**call)
+ elsif !tool[:source][:lua].nil?
+ call[:source] = tool[:source][:lua]
+ tool[:output] = Components::Embedding.lua(**call)
+ else
+ raise 'missing source code'
+ end
+
+ feedback.call(
+ { should_be_stored: false,
+ interaction: { who: 'AI', message: nil, meta: {
+ tool: {
+ action: 'responding', id: tool[:id], name: tool[:label],
+ parameters: tool[:parameters], output: tool[:output]
+ }
+ } } }
+ )
+
+ tool
+ end
+ end
+ end
+ end
+end