summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authoricebaker <113217272+icebaker@users.noreply.github.com>2023-12-15 08:35:09 -0300
committerGitHub <noreply@github.com>2023-12-15 08:35:09 -0300
commitf3200fe0448044ebf43fb52f40a47bc648082c56 (patch)
tree6a5db83f9210a15b4e57516791d23f353b27f7a3
parentfb96658a1ca4b6e3e0505e7a39f660e1a05b3c6e (diff)
parent639fcc2da50333da00fe50a0970fe28b4e5e9908 (diff)
Merge pull request #9 from icebaker/ib-gemini
Adding support for Google Gemini
-rw-r--r--Gemfile2
-rw-r--r--Gemfile.lock30
-rw-r--r--README.md152
-rw-r--r--components/provider.rb5
-rw-r--r--components/providers/google.rb202
-rw-r--r--components/providers/openai.rb37
-rw-r--r--components/providers/openai/tools.rb101
-rw-r--r--components/providers/tools.rb99
-rw-r--r--controllers/session.rb7
-rw-r--r--logic/cartridge/streaming.rb9
-rw-r--r--logic/providers/google/tokens.rb16
-rw-r--r--logic/providers/google/tools.rb60
-rw-r--r--logic/providers/openai/tokens.rb16
-rw-r--r--logic/providers/openai/tools.rb7
-rw-r--r--nano-bots.gemspec2
-rw-r--r--spec/data/cartridges/streaming.yml1
-rw-r--r--spec/data/providers/google/tools.yml9
-rw-r--r--spec/logic/cartridge/streaming_spec.rb19
-rw-r--r--spec/logic/providers/google/tools_spec.rb78
-rw-r--r--spec/logic/providers/openai/tools_spec.rb7
-rw-r--r--static/cartridges/default.yml2
-rw-r--r--static/gem.rb4
22 files changed, 700 insertions, 165 deletions
diff --git a/Gemfile b/Gemfile
index a47c817..cd8f178 100644
--- a/Gemfile
+++ b/Gemfile
@@ -7,6 +7,6 @@ gemspec
group :test, :development do
gem 'pry-byebug', '~> 3.10', '>= 3.10.1'
gem 'rspec', '~> 3.12'
- gem 'rubocop', '~> 1.58'
+ gem 'rubocop', '~> 1.59'
gem 'rubocop-rspec', '~> 2.25'
end
diff --git a/Gemfile.lock b/Gemfile.lock
index 94c465a..988d478 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -5,7 +5,7 @@ PATH
babosa (~> 2.0)
concurrent-ruby (~> 1.2, >= 1.2.2)
dotenv (~> 2.8, >= 2.8.1)
- faraday (~> 2.7, >= 2.7.12)
+ gemini-ai (~> 1.0)
pry (~> 0.14.2)
rainbow (~> 3.1, >= 3.1.1)
rbnacl (~> 7.1, >= 7.1.1)
@@ -15,6 +15,8 @@ PATH
GEM
remote: https://rubygems.org/
specs:
+ addressable (2.8.6)
+ public_suffix (>= 2.0.2, < 6.0)
ast (2.4.2)
babosa (2.0.0)
base64 (0.2.0)
@@ -32,10 +34,26 @@ GEM
multipart-post (~> 2)
faraday-net_http (3.0.2)
ffi (1.16.3)
+ gemini-ai (1.0.0)
+ event_stream_parser (~> 1.0)
+ faraday (~> 2.7, >= 2.7.12)
+ googleauth (~> 1.9, >= 1.9.1)
+ google-cloud-env (2.1.0)
+ faraday (>= 1.0, < 3.a)
+ googleauth (1.9.1)
+ faraday (>= 1.0, < 3.a)
+ google-cloud-env (~> 2.1)
+ jwt (>= 1.4, < 3.0)
+ multi_json (~> 1.11)
+ os (>= 0.9, < 2.0)
+ signet (>= 0.16, < 2.a)
json (2.7.1)
+ jwt (2.7.1)
language_server-protocol (3.17.0.3)
method_source (1.0.0)
+ multi_json (1.15.0)
multipart-post (2.3.0)
+ os (1.1.4)
parallel (1.23.0)
parser (3.2.2.4)
ast (~> 2.4.1)
@@ -46,6 +64,7 @@ GEM
pry-byebug (3.10.1)
byebug (~> 11.0)
pry (>= 0.13, < 0.15)
+ public_suffix (5.0.4)
racc (1.7.3)
rainbow (3.1.1)
rbnacl (7.1.1)
@@ -65,7 +84,7 @@ GEM
diff-lcs (>= 1.2.0, < 2.0)
rspec-support (~> 3.12.0)
rspec-support (3.12.1)
- rubocop (1.58.0)
+ rubocop (1.59.0)
json (~> 2.3)
language_server-protocol (>= 3.17.0)
parallel (~> 1.10)
@@ -92,6 +111,11 @@ GEM
faraday-multipart (>= 1)
ruby-progressbar (1.13.0)
ruby2_keywords (0.0.5)
+ signet (0.18.0)
+ addressable (~> 2.8)
+ faraday (>= 0.17.5, < 3.a)
+ jwt (>= 1.5, < 3.0)
+ multi_json (~> 1.10)
sweet-moon (0.0.7)
ffi (~> 1.15, >= 1.15.5)
unicode-display_width (2.5.0)
@@ -103,7 +127,7 @@ DEPENDENCIES
nano-bots!
pry-byebug (~> 3.10, >= 3.10.1)
rspec (~> 3.12)
- rubocop (~> 1.58)
+ rubocop (~> 1.59)
rubocop-rspec (~> 2.25)
BUNDLED WITH
diff --git a/README.md b/README.md
index 6e125e8..2fb8700 100644
--- a/README.md
+++ b/README.md
@@ -8,22 +8,28 @@ _Image artificially created by Midjourney through a prompt generated by a Nano B
https://user-images.githubusercontent.com/113217272/238141567-c58a240c-7b67-4b3b-864a-0f49bbf6e22f.mp4
- [Setup](#setup)
- - [Docker](#docker)
+ - [OpenAI ChatGPT](#openai-chatgpt)
+ - [Google Gemini](#google-gemini)
+ - [Docker](#docker)
+ - [OpenAI ChatGPT](#openai-chatgpt-1)
+ - [Google Gemini](#google-gemini-1)
- [Usage](#usage)
- - [Command Line](#command-line)
- - [Library](#library)
+ - [Command Line](#command-line)
+ - [Library](#library)
- [Cartridges](#cartridges)
- - [Tools (Functions)](#tools-functions)
- - [Experimental Clojure Support](#experimental-clojure-support)
- - [Marketplace](#marketplace)
+ - [OpenAI ChatGPT](#openai-chatgpt-2)
+ - [Google Gemini](#google-gemini-2)
+ - [Tools (Functions)](#tools-functions)
+ - [Experimental Clojure Support](#experimental-clojure-support)
+ - [Marketplace](#marketplace)
- [Security and Privacy](#security-and-privacy)
- - [Cryptography](#cryptography)
- - [End-user IDs](#end-user-ids)
- - [Decrypting](#decrypting)
+ - [Cryptography](#cryptography)
+ - [End-user IDs](#end-user-ids)
+ - [Decrypting](#decrypting)
- [Providers](#providers)
- [Debugging](#debugging)
- [Development](#development)
- - [Publish to RubyGems](#publish-to-rubygems)
+ - [Publish to RubyGems](#publish-to-rubygems)
## Setup
@@ -45,6 +51,8 @@ bundle install
For credentials and configurations, relevant environment variables can be set in your `.bashrc`, `.zshrc`, or equivalent files, as well as in your Docker Container or System Environment. Example:
+### OpenAI ChatGPT
+
```sh
export OPENAI_API_ADDRESS=https://api.openai.com
export OPENAI_API_KEY=your-access-token
@@ -69,6 +77,36 @@ NANO_BOTS_END_USER=your-user
# NANO_BOTS_CARTRIDGES_DIRECTORY=/home/user/.local/share/nano-bots/cartridges
```
+### Google Gemini
+
+Click [here](https://github.com/gbaptista/gemini-ai#credentials) to learn how to obtain your credentials.
+
+```sh
+export GOOGLE_CREDENTIALS_FILE_PATH=google-credentials.json
+export GOOGLE_PROJECT_ID=your-project-id
+export GOOGLE_REGION=us-east4
+
+export NANO_BOTS_ENCRYPTION_PASSWORD=UNSAFE
+export NANO_BOTS_END_USER=your-user
+
+# export NANO_BOTS_STATE_DIRECTORY=/home/user/.local/state/nano-bots
+# export NANO_BOTS_CARTRIDGES_DIRECTORY=/home/user/.local/share/nano-bots/cartridges
+```
+
+Alternatively, if your current directory has a `.env` file with the environment variables, they will be automatically loaded:
+
+```sh
+GOOGLE_CREDENTIALS_FILE_PATH=google-credentials.json
+GOOGLE_PROJECT_ID=your-project-id
+GOOGLE_REGION=us-east4
+
+NANO_BOTS_ENCRYPTION_PASSWORD=UNSAFE
+NANO_BOTS_END_USER=your-user
+
+# NANO_BOTS_STATE_DIRECTORY=/home/user/.local/state/nano-bots
+# NANO_BOTS_CARTRIDGES_DIRECTORY=/home/user/.local/share/nano-bots/cartridges
+```
+
## Docker
Clone the repository and copy the Docker Compose template:
@@ -81,6 +119,8 @@ cp docker-compose.example.yml docker-compose.yml
Set your provider credentials and choose your desired directory for the cartridges files:
+### OpenAI ChatGPT
+
```yaml
---
services:
@@ -97,6 +137,28 @@ services:
- ./your-state-path:/root/.local/state/nano-bots
```
+### Google Gemini
+
+```yaml
+---
+services:
+ nano-bots:
+ image: ruby:3.2.2-slim-bookworm
+ command: sh -c "apt-get update && apt-get install -y --no-install-recommends build-essential libffi-dev libsodium-dev lua5.4-dev curl && curl -s https://raw.githubusercontent.com/babashka/babashka/master/install | bash && gem install nano-bots -v 1.1.2 && bash"
+ environment:
+ GOOGLE_CREDENTIALS_FILE_PATH: /root/.config/google-credentials.json
+ GOOGLE_PROJECT_ID: your-project-id
+ GOOGLE_REGION: us-east4
+ NANO_BOTS_ENCRYPTION_PASSWORD: UNSAFE
+ NANO_BOTS_END_USER: your-user
+ volumes:
+ - ./google-credentials.json:/root/.config/google-credentials.json
+ - ./your-cartridges:/root/.local/share/nano-bots/cartridges
+ - ./your-state-path:/root/.local/state/nano-bots
+```
+
+### Container
+
Enter the container:
```sh
docker compose run nano-bots
@@ -246,8 +308,16 @@ end
## Cartridges
+Check the Nano Bots specification to learn more about [how to build cartridges](https://spec.nbots.io/#/README?id=cartridges).
+
+Try the [Nano Bots Clinic (Live Editor)](https://clinic.nbots.io) to learn about creating Cartridges.
+
Here's what a Nano Bot Cartridge looks like:
+### OpenAI ChatGPT
+
+Read the [full specification](https://spec.nbots.io/#/README?id=open-ai-chatgpt) for OpenAI ChatGPT.
+
```yaml
---
meta:
@@ -269,12 +339,36 @@ provider:
access-token: ENV/OPENAI_API_KEY
settings:
user: ENV/NANO_BOTS_END_USER
- model: gpt-3.5-turbo
+ model: gpt-4-1106-preview
```
-Check the Nano Bots specification to learn more about [how to build cartridges](https://spec.nbots.io/#/README?id=cartridges).
+### Google Gemini
-Try the [Nano Bots Clinic (Live Editor)](https://clinic.nbots.io) to learn about creating Cartridges.
+Read the [full specification](https://spec.nbots.io/#/README?id=google-gemini) for Google Gemini.
+
+```yaml
+---
+meta:
+ symbol: 🤖
+ name: Nano Bot Name
+ author: Your Name
+ version: 1.0.0
+ license: CC0-1.0
+ description: A helpful assistant.
+
+behaviors:
+ interaction:
+ directive: You are a helpful assistant.
+
+provider:
+ id: google
+ credentials:
+ project-id: ENV/GOOGLE_PROJECT_ID
+ file-path: ENV/GOOGLE_CREDENTIALS_FILE_PATH
+ region: ENV/GOOGLE_REGION
+ options:
+ model: gemini-pro
+```
### Tools (Functions)
@@ -301,22 +395,9 @@ The randomly generated number is 59.
🤖> |
```
+To successfully use Tools (Functions), you need to specify a provider and a model that supports them. As of the writing of this README, the provider that supports them is [OpenAI](https://platform.openai.com/docs/models), with models `gpt-3.5-turbo-1106` and `gpt-4-1106-preview`, and [Google](https://cloud.google.com/vertex-ai/docs/generative-ai/multimodal/function-calling#supported_models), with the model `gemini-pro`.
-To successfully use Tools (Functions), you need to specify a provider and a model that support them. As of the writing of this README, the provider that supports them is [OpenAI](https://platform.openai.com/docs/models), with models `gpt-3.5-turbo-1106` and `gpt-4-1106-preview`:
-
-```yaml
----
-provider:
- id: openai
- credentials:
- address: ENV/OPENAI_API_ADDRESS
- access-token: ENV/OPENAI_API_KEY
- settings:
- user: ENV/NANO_BOTS_END_USER
- model: gpt-4-1106-preview
-```
-
-Check the [Nano Bots specification](https://spec.nbots.io/#/README?id=tools-functions-2) to learn more about them.
+Check the [Nano Bots specification](https://spec.nbots.io/#/README?id=tools-functions-2) to learn more about Tools (Functions).
#### Experimental Clojure Support
@@ -469,13 +550,16 @@ If you lose your password, you lose your data. It is not possible to recover it
Currently supported providers:
-- [x] [FastChat (Vicuna)](https://github.com/lm-sys/FastChat)
-- [x] [Open AI](https://platform.openai.com/docs/api-reference)
-- [ ] [Google PaLM](https://developers.generativeai.google/)
-- [ ] [Alpaca](https://github.com/tatsu-lab/stanford_alpaca)
-- [ ] [LLaMA](https://github.com/facebookresearch/llama)
+- [x] [Open AI ChatGPT](https://platform.openai.com/docs/api-reference)
+- [x] [Google Gemini](https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/gemini)
+- [ ] [Anthropic Claude](https://www.anthropic.com)
+- [ ] [Cohere Command](https://cohere.com)
+- [ ] [Meta Llama](https://ai.meta.com/llama/)
+- [ ] [01.AI Yi](https://01.ai)
+- [ ] [WizardLM](https://wizardlm.github.io)
+- [ ] [LMSYS Org FastChat Vicuna](https://github.com/lm-sys/FastChat)
-Although only OpenAI has been officially tested, some of the open-source providers offer APIs that are compatible with OpenAI, such as [FastChat](https://github.com/lm-sys/FastChat#openai-compatible-restful-apis--sdk). Therefore, it is highly probable that they will work just fine.
+Although only OpenAI ChatGPT and Google Gemini have been officially tested, some alternative providers offer APIs that are compatible with, for example, OpenAI, such as [FastChat](https://github.com/lm-sys/FastChat#openai-compatible-restful-apis--sdk). Therefore, it is highly probable that they will work just fine.
## Development
diff --git a/components/provider.rb b/components/provider.rb
index 3414009..d83319f 100644
--- a/components/provider.rb
+++ b/components/provider.rb
@@ -1,8 +1,7 @@
# frozen_string_literal: true
-require 'openai'
-
require_relative 'providers/openai'
+require_relative 'providers/google'
module NanoBot
module Components
@@ -11,6 +10,8 @@ module NanoBot
case provider[:id]
when 'openai'
Providers::OpenAI.new(provider[:settings], provider[:credentials], environment:)
+ when 'google'
+ Providers::Google.new(provider[:options], provider[:settings], provider[:credentials], environment:)
else
raise "Unsupported provider \"#{provider[:id]}\""
end
diff --git a/components/providers/google.rb b/components/providers/google.rb
new file mode 100644
index 0000000..f847677
--- /dev/null
+++ b/components/providers/google.rb
@@ -0,0 +1,202 @@
+# frozen_string_literal: true
+
+require 'gemini-ai'
+
+require_relative 'base'
+
+require_relative '../../logic/providers/google/tools'
+require_relative '../../logic/providers/google/tokens'
+
+require_relative 'tools'
+
+module NanoBot
+ module Components
+ module Providers
+ class Google < Base
+ SETTINGS = {
+ generationConfig: %i[
+ temperature topP topK candidateCount maxOutputTokens stopSequences
+ ].freeze
+ }.freeze
+
+ SAFETY_SETTINGS = %i[category threshold].freeze
+
+ attr_reader :settings
+
+ def initialize(options, settings, credentials, _environment)
+ @settings = settings
+
+ @client = Gemini.new(
+ credentials: {
+ file_path: credentials[:'file-path'],
+ project_id: credentials[:'project-id'],
+ region: credentials[:region]
+ },
+ settings: { model: options[:model], stream: options[:stream] }
+ )
+ end
+
+ def evaluate(input, streaming, cartridge, &feedback)
+ messages = input[:history].map do |event|
+ if event[:message].nil? && event[:meta] && event[:meta][:tool_calls]
+ { role: 'model',
+ parts: event[:meta][:tool_calls],
+ _meta: { at: event[:at] } }
+ elsif event[:who] == 'tool'
+ { role: 'function',
+ parts: [
+ { functionResponse: {
+ name: event[:meta][:name],
+ response: { name: event[:meta][:name], content: event[:message].to_s }
+ } }
+ ],
+ _meta: { at: event[:at] } }
+ else
+ { role: event[:who] == 'user' ? 'user' : 'model',
+ parts: { text: event[:message] },
+ _meta: { at: event[:at] } }
+ end
+ end
+
+ %i[backdrop directive].each do |key|
+ next unless input[:behavior][key]
+
+ # TODO: Does Gemini have system messages?
+ messages.prepend(
+ { role: key == :directive ? 'user' : 'user',
+ parts: { text: input[:behavior][key] },
+ _meta: { at: Time.now } }
+ )
+ end
+
+ payload = { contents: messages, generationConfig: { candidateCount: 1 } }
+
+ if @settings
+ SETTINGS.each_key do |key|
+ SETTINGS[key].each do |sub_key|
+ if @settings.key?(key) && @settings[key].key?(sub_key)
+ payload[key] = {} unless payload.key?(key)
+ payload[key][sub_key] = @settings[key][sub_key]
+ end
+ end
+ end
+
+ if @settings[:safetySettings].is_a?(Array)
+ payload[:safetySettings] = [] unless payload.key?(:safetySettings)
+
+ @settings[:safetySettings].each do |safety_setting|
+ setting = {}
+ SAFETY_SETTINGS.each { |key| setting[key] = safety_setting[key] }
+ payload[:safetySettings] << setting
+ end
+ end
+ end
+
+ if input[:tools]
+ payload[:tools] = {
+ function_declarations: input[:tools].map { |raw| Logic::Google::Tools.adapt(raw) }
+ }
+ end
+
+ if streaming
+ content = ''
+ tools = []
+
+ stream_call_back = proc do |event, _parsed, _raw|
+ partial_content = event.dig('candidates', 0, 'content', 'parts').filter do |part|
+ part.key?('text')
+ end.map { |part| part['text'] }.join
+
+ partial_tools = event.dig('candidates', 0, 'content', 'parts').filter do |part|
+ part.key?('functionCall')
+ end
+
+ tools.concat(partial_tools) if partial_tools.size.positive?
+
+ if partial_content
+ content += partial_content
+ feedback.call(
+ { should_be_stored: false,
+ interaction: { who: 'AI', message: partial_content } }
+ )
+ end
+
+ if event.dig('candidates', 0, 'finishReason')
+ if tools&.size&.positive?
+ feedback.call(
+ { should_be_stored: true,
+ needs_another_round: true,
+ interaction: { who: 'AI', message: nil, meta: { tool_calls: tools } } }
+ )
+ Tools.apply(
+ cartridge, input[:tools], tools, feedback, Logic::Google::Tools
+ ).each do |interaction|
+ feedback.call({ should_be_stored: true, needs_another_round: true, interaction: })
+ end
+ end
+
+ feedback.call(
+ { should_be_stored: !(content.nil? || content == ''),
+ interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
+ finished: true }
+ )
+ end
+ end
+
+ begin
+ @client.stream_generate_content(
+ Logic::Google::Tokens.apply_policies!(cartridge, payload),
+ stream: true, &stream_call_back
+ )
+ rescue StandardError => e
+ raise e.class, e.response[:body] if e.response && e.response[:body]
+
+ raise e
+ end
+ else
+ begin
+ result = @client.stream_generate_content(
+ Logic::Google::Tokens.apply_policies!(cartridge, payload),
+ stream: false
+ )
+ rescue StandardError => e
+ raise e.class, e.response[:body] if e.response && e.response[:body]
+
+ raise e
+ end
+
+ tools = result.dig(0, 'candidates', 0, 'content', 'parts').filter do |part|
+ part.key?('functionCall')
+ end
+
+ if tools&.size&.positive?
+ feedback.call(
+ { should_be_stored: true,
+ needs_another_round: true,
+ interaction: { who: 'AI', message: nil, meta: { tool_calls: tools } } }
+ )
+
+ Tools.apply(
+ cartridge, input[:tools], tools, feedback, Logic::Google::Tools
+ ).each do |interaction|
+ feedback.call({ should_be_stored: true, needs_another_round: true, interaction: })
+ end
+ end
+
+ content = result.map do |answer|
+ answer.dig('candidates', 0, 'content', 'parts').filter do |part|
+ part.key?('text')
+ end.map { |part| part['text'] }.join
+ end.join
+
+ feedback.call(
+ { should_be_stored: !(content.nil? || content.to_s.strip == ''),
+ interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
+ finished: true }
+ )
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/components/providers/openai.rb b/components/providers/openai.rb
index 6384181..f6eafd4 100644
--- a/components/providers/openai.rb
+++ b/components/providers/openai.rb
@@ -6,9 +6,9 @@ require_relative 'base'
require_relative '../crypto'
require_relative '../../logic/providers/openai/tools'
-require_relative '../../controllers/interfaces/tools'
+require_relative '../../logic/providers/openai/tokens'
-require_relative 'openai/tools'
+require_relative 'tools'
module NanoBot
module Components
@@ -18,7 +18,7 @@ module NanoBot
CHAT_SETTINGS = %i[
model stream temperature top_p n stop max_tokens
- presence_penalty frequency_penalty logit_bias
+ presence_penalty frequency_penalty logit_bias seed response_format
].freeze
attr_reader :settings
@@ -40,12 +40,18 @@ module NanoBot
def evaluate(input, streaming, cartridge, &feedback)
messages = input[:history].map do |event|
if event[:message].nil? && event[:meta] && event[:meta][:tool_calls]
- { role: 'assistant', content: nil, tool_calls: event[:meta][:tool_calls] }
+ { role: 'assistant', content: nil,
+ tool_calls: event[:meta][:tool_calls],
+ _meta: { at: event[:at] } }
elsif event[:who] == 'tool'
{ role: event[:who], content: event[:message].to_s,
- tool_call_id: event[:meta][:id], name: event[:meta][:name] }
+ tool_call_id: event[:meta][:id],
+ name: event[:meta][:name],
+ _meta: { at: event[:at] } }
else
- { role: event[:who] == 'user' ? 'user' : 'assistant', content: event[:message] }
+ { role: event[:who] == 'user' ? 'user' : 'assistant',
+ content: event[:message],
+ _meta: { at: event[:at] } }
end
end
@@ -54,7 +60,8 @@ module NanoBot
messages.prepend(
{ role: key == :directive ? 'system' : 'user',
- content: input[:behavior][key] }
+ content: input[:behavior][key],
+ _meta: { at: Time.now } }
)
end
@@ -66,7 +73,7 @@ module NanoBot
payload.delete(:logit_bias) if payload.key?(:logit_bias) && payload[:logit_bias].nil?
- payload[:tools] = input[:tools].map { |raw| NanoBot::Logic::OpenAI::Tools.adapt(raw) } if input[:tools]
+ payload[:tools] = input[:tools].map { |raw| Logic::OpenAI::Tools.adapt(raw) } if input[:tools]
if streaming
content = ''
@@ -114,13 +121,15 @@ module NanoBot
needs_another_round: true,
interaction: { who: 'AI', message: nil, meta: { tool_calls: tools } } }
)
- Tools.apply(cartridge, input[:tools], tools, feedback).each do |interaction|
+ Tools.apply(
+ cartridge, input[:tools], tools, feedback, Logic::OpenAI::Tools
+ ).each do |interaction|
feedback.call({ should_be_stored: true, needs_another_round: true, interaction: })
end
end
feedback.call(
- { should_be_stored: !(content.nil? || content == ''),
+ { should_be_stored: !(content.nil? || content.to_s.strip == ''),
interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
finished: true }
)
@@ -128,7 +137,7 @@ module NanoBot
end
begin
- @client.chat(parameters: payload)
+ @client.chat(parameters: Logic::OpenAI::Tokens.apply_policies!(cartridge, payload))
rescue StandardError => e
raise e.class, e.response[:body] if e.response && e.response[:body]
@@ -136,7 +145,7 @@ module NanoBot
end
else
begin
- result = @client.chat(parameters: payload)
+ result = @client.chat(parameters: Logic::OpenAI::Tokens.apply_policies!(cartridge, payload))
rescue StandardError => e
raise e.class, e.response[:body] if e.response && e.response[:body]
@@ -153,7 +162,9 @@ module NanoBot
needs_another_round: true,
interaction: { who: 'AI', message: nil, meta: { tool_calls: tools } } }
)
- Tools.apply(cartridge, input[:tools], tools, feedback).each do |interaction|
+ Tools.apply(
+ cartridge, input[:tools], tools, feedback, Logic::OpenAI::Tools
+ ).each do |interaction|
feedback.call({ should_be_stored: true, needs_another_round: true, interaction: })
end
end
diff --git a/components/providers/openai/tools.rb b/components/providers/openai/tools.rb
deleted file mode 100644
index cd35e80..0000000
--- a/components/providers/openai/tools.rb
+++ /dev/null
@@ -1,101 +0,0 @@
-# frozen_string_literal: true
-
-require_relative '../../embedding'
-require_relative '../../../logic/cartridge/safety'
-
-require 'concurrent'
-
-module NanoBot
- module Components
- module Providers
- class OpenAI < Base
- module Tools
- def self.confirming(tool, feedback)
- feedback.call(
- { should_be_stored: false,
- interaction: { who: 'AI', message: nil, meta: {
- tool: { action: 'confirming', id: tool[:id], name: tool[:name], parameters: tool[:parameters] }
- } } }
- )
- end
-
- def self.apply(cartridge, function_cartridge, tools, feedback)
- prepared_tools = NanoBot::Logic::OpenAI::Tools.prepare(function_cartridge, tools)
-
- if Logic::Cartridge::Safety.confirmable?(cartridge)
- prepared_tools.each { |tool| tool[:allowed] = confirming(tool, feedback) }
- else
- prepared_tools.each { |tool| tool[:allowed] = true }
- end
-
- futures = prepared_tools.map do |tool|
- Concurrent::Promises.future do
- if tool[:allowed]
- process!(tool, feedback, function_cartridge, cartridge)
- else
- tool[:output] =
- "We asked the user you're chatting with for permission, but the user did not allow you to run this tool or function."
- tool
- end
- end
- end
-
- results = Concurrent::Promises.zip(*futures).value!
-
- results.map do |applied_tool|
- {
- who: 'tool',
- message: applied_tool[:output],
- meta: { id: applied_tool[:id], name: applied_tool[:name] }
- }
- end
- end
-
- def self.process!(tool, feedback, _function_cartridge, cartridge)
- feedback.call(
- { should_be_stored: false,
- interaction: { who: 'AI', message: nil, meta: {
- tool: { action: 'executing', id: tool[:id], name: tool[:name], parameters: tool[:parameters] }
- } } }
- )
-
- call = {
- parameters: %w[parameters],
- values: [tool[:parameters]],
- safety: { sandboxed: Logic::Cartridge::Safety.sandboxed?(cartridge) }
- }
-
- if %i[fennel lua clojure].count { |key| !tool[:source][key].nil? } > 1
- raise StandardError, 'conflicting tools'
- end
-
- if !tool[:source][:fennel].nil?
- call[:source] = tool[:source][:fennel]
- tool[:output] = Components::Embedding.fennel(**call)
- elsif !tool[:source][:clojure].nil?
- call[:source] = tool[:source][:clojure]
- tool[:output] = Components::Embedding.clojure(**call)
- elsif !tool[:source][:lua].nil?
- call[:source] = tool[:source][:lua]
- tool[:output] = Components::Embedding.lua(**call)
- else
- raise 'missing source code'
- end
-
- feedback.call(
- { should_be_stored: false,
- interaction: { who: 'AI', message: nil, meta: {
- tool: {
- action: 'responding', id: tool[:id], name: tool[:name],
- parameters: tool[:parameters], output: tool[:output]
- }
- } } }
- )
-
- tool
- end
- end
- end
- end
- end
-end
diff --git a/components/providers/tools.rb b/components/providers/tools.rb
new file mode 100644
index 0000000..122bc14
--- /dev/null
+++ b/components/providers/tools.rb
@@ -0,0 +1,99 @@
+# frozen_string_literal: true
+
+require_relative '../embedding'
+require_relative '../../logic/cartridge/safety'
+
+require 'concurrent'
+
+module NanoBot
+ module Components
+ module Providers
+ module Tools
+ def self.confirming(tool, feedback)
+ feedback.call(
+ { should_be_stored: false,
+ interaction: { who: 'AI', message: nil, meta: {
+ tool: { action: 'confirming', id: tool[:id], name: tool[:label], parameters: tool[:parameters] }
+ } } }
+ )
+ end
+
+ def self.apply(cartridge, function_cartridge, tools, feedback, tools_logic)
+ prepared_tools = tools_logic.prepare(function_cartridge, tools)
+
+ if Logic::Cartridge::Safety.confirmable?(cartridge)
+ prepared_tools.each { |tool| tool[:allowed] = confirming(tool, feedback) }
+ else
+ prepared_tools.each { |tool| tool[:allowed] = true }
+ end
+
+ futures = prepared_tools.map do |tool|
+ Concurrent::Promises.future do
+ if tool[:allowed]
+ process!(tool, feedback, function_cartridge, cartridge)
+ else
+ tool[:output] =
+ "We asked the user you're chatting with for permission, but the user did not allow you to run this tool or function."
+ tool
+ end
+ end
+ end
+
+ results = Concurrent::Promises.zip(*futures).value!
+
+ results.map do |applied_tool|
+ {
+ who: 'tool',
+ message: applied_tool[:output],
+ meta: { id: applied_tool[:id], name: applied_tool[:name] }
+ }
+ end
+ end
+
+ def self.process!(tool, feedback, _function_cartridge, cartridge)
+ feedback.call(
+ { should_be_stored: false,
+ interaction: { who: 'AI', message: nil, meta: {
+ tool: { action: 'executing', id: tool[:id], name: tool[:label], parameters: tool[:parameters] }
+ } } }
+ )
+
+ call = {
+ parameters: %w[parameters],
+ values: [tool[:parameters]],
+ safety: { sandboxed: Logic::Cartridge::Safety.sandboxed?(cartridge) }
+ }
+
+ if %i[fennel lua clojure].count { |key| !tool[:source][key].nil? } > 1
+ raise StandardError, 'conflicting tools'
+ end
+
+ if !tool[:source][:fennel].nil?
+ call[:source] = tool[:source][:fennel]
+ tool[:output] = Components::Embedding.fennel(**call)
+ elsif !tool[:source][:clojure].nil?
+ call[:source] = tool[:source][:clojure]
+ tool[:output] = Components::Embedding.clojure(**call)
+ elsif !tool[:source][:lua].nil?
+ call[:source] = tool[:source][:lua]
+ tool[:output] = Components::Embedding.lua(**call)
+ else
+ raise 'missing source code'
+ end
+
+ feedback.call(
+ { should_be_stored: false,
+ interaction: { who: 'AI', message: nil, meta: {
+ tool: {
+ action: 'responding', id: tool[:id], name: tool[:label],
+ parameters: tool[:parameters], output: tool[:output]
+ }
+ } } }
+ )
+
+ tool
+ end
+ end
+ end
+ end
+end
diff --git a/controllers/session.rb b/controllers/session.rb
index aabcb41..e12ab86 100644
--- a/controllers/session.rb
+++ b/controllers/session.rb
@@ -63,6 +63,7 @@ module NanoBot
behavior = Logic::Helpers::Hash.fetch(@cartridge, %i[behaviors boot]) || {}
@state[:history] << {
+ at: Time.now,
who: 'user',
mode: mode.to_s,
input: instruction,
@@ -78,6 +79,7 @@ module NanoBot
behavior = Logic::Helpers::Hash.fetch(@cartridge, %i[behaviors interaction]) || {}
@state[:history] << {
+ at: Time.now,
who: 'user',
mode: mode.to_s,
input: message,
@@ -159,7 +161,10 @@ module NanoBot
end
end
- @state[:history] << event if feedback[:should_be_stored]
+ if feedback[:should_be_stored]
+ event[:at] = Time.now
+ @state[:history] << event
+ end
if event[:output] && ((!feedback[:finished] && streaming) || (!streaming && feedback[:finished]))
self.print(color ? Rainbow(event[:output]).send(color) : event[:output])
diff --git a/logic/cartridge/streaming.rb b/logic/cartridge/streaming.rb
index a0f8700..6949b3a 100644
--- a/logic/cartridge/streaming.rb
+++ b/logic/cartridge/streaming.rb
@@ -7,7 +7,14 @@ module NanoBot
module Cartridge
module Streaming
def self.enabled?(cartridge, interface)
- return false if Helpers::Hash.fetch(cartridge, %i[provider settings stream]) == false
+ provider_stream = case Helpers::Hash.fetch(cartridge, %i[provider id])
+ when 'openai'
+ Helpers::Hash.fetch(cartridge, %i[provider settings stream])
+ when 'google'
+ Helpers::Hash.fetch(cartridge, %i[provider options stream])
+ end
+
+ return false if provider_stream == false
specific_interface = Helpers::Hash.fetch(cartridge, [:interfaces, interface, :output, :stream])
diff --git a/logic/providers/google/tokens.rb b/logic/providers/google/tokens.rb
new file mode 100644
index 0000000..3d5492f
--- /dev/null
+++ b/logic/providers/google/tokens.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'openai'
+
+module NanoBot
+ module Logic
+ module Google
+ module Tokens
+ def self.apply_policies!(_cartridge, payload)
+ payload[:contents] = payload[:contents].map { |message| message.except(:_meta) }
+ payload
+ end
+ end
+ end
+ end
+end
diff --git a/logic/providers/google/tools.rb b/logic/providers/google/tools.rb
new file mode 100644
index 0000000..e1396d6
--- /dev/null
+++ b/logic/providers/google/tools.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'json'
+require 'babosa'
+
+require_relative '../../helpers/hash'
+
+module NanoBot
+ module Logic
+ module Google
+ module Tools
+ def self.prepare(cartridge, tools)
+ applies = []
+
+ tools = Marshal.load(Marshal.dump(tools))
+
+ tools.each do |tool|
+ tool = Helpers::Hash.symbolize_keys(tool)
+
+ cartridge.each do |candidate|
+ candidate_key = candidate[:name].to_slug.normalize.gsub('-', '_')
+ tool_key = tool[:functionCall][:name].to_slug.normalize.gsub('-', '_')
+
+ next unless candidate_key == tool_key
+
+ source = {}
+
+ source[:clojure] = candidate[:clojure] if candidate[:clojure]
+ source[:fennel] = candidate[:fennel] if candidate[:fennel]
+ source[:lua] = candidate[:lua] if candidate[:lua]
+
+ applies << {
+ label: candidate[:name],
+ name: tool[:functionCall][:name],
+ type: 'function',
+ parameters: tool[:functionCall][:args],
+ source:
+ }
+ end
+ end
+
+ raise 'missing tool' if applies.size != tools.size
+
+ applies
+ end
+
+ def self.adapt(cartridge)
+ output = {
+ name: cartridge[:name],
+ description: cartridge[:description]
+ }
+
+ output[:parameters] = (cartridge[:parameters] || { type: 'object', properties: {} })
+
+ output
+ end
+ end
+ end
+ end
+end
diff --git a/logic/providers/openai/tokens.rb b/logic/providers/openai/tokens.rb
new file mode 100644
index 0000000..60efa60
--- /dev/null
+++ b/logic/providers/openai/tokens.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'openai'
+
+module NanoBot
+ module Logic
+ module OpenAI
+ module Tokens
+ def self.apply_policies!(_cartridge, payload)
+ payload[:messages] = payload[:messages].map { |message| message.except(:_meta) }
+ payload
+ end
+ end
+ end
+ end
+end
diff --git a/logic/providers/openai/tools.rb b/logic/providers/openai/tools.rb
index 1b2882a..f00176c 100644
--- a/logic/providers/openai/tools.rb
+++ b/logic/providers/openai/tools.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
require 'json'
+require 'babosa'
require_relative '../../helpers/hash'
@@ -17,7 +18,10 @@ module NanoBot
tool = Helpers::Hash.symbolize_keys(tool)
cartridge.each do |candidate|
- next unless tool[:function][:name] == candidate[:name]
+ candidate_key = candidate[:name].to_slug.normalize.gsub('-', '_')
+ tool_key = tool[:function][:name].to_slug.normalize.gsub('-', '_')
+
+ next unless candidate_key == tool_key
source = {}
@@ -27,6 +31,7 @@ module NanoBot
applies << {
id: tool[:id],
+ label: candidate[:name],
name: tool[:function][:name],
type: 'function',
parameters: JSON.parse(tool[:function][:arguments]),
diff --git a/nano-bots.gemspec b/nano-bots.gemspec
index 8cd70d3..f11fd58 100644
--- a/nano-bots.gemspec
+++ b/nano-bots.gemspec
@@ -34,7 +34,7 @@ Gem::Specification.new do |spec|
spec.add_dependency 'babosa', '~> 2.0'
spec.add_dependency 'concurrent-ruby', '~> 1.2', '>= 1.2.2'
spec.add_dependency 'dotenv', '~> 2.8', '>= 2.8.1'
- spec.add_dependency 'faraday', '~> 2.7', '>= 2.7.12'
+ spec.add_dependency 'gemini-ai', '~> 1.0'
spec.add_dependency 'pry', '~> 0.14.2'
spec.add_dependency 'rainbow', '~> 3.1', '>= 3.1.1'
spec.add_dependency 'rbnacl', '~> 7.1', '>= 7.1.1'
diff --git a/spec/data/cartridges/streaming.yml b/spec/data/cartridges/streaming.yml
index 8234d34..e004110 100644
--- a/spec/data/cartridges/streaming.yml
+++ b/spec/data/cartridges/streaming.yml
@@ -10,5 +10,6 @@ interfaces:
stream: true
provider:
+ id: openai
settings:
stream: true
diff --git a/spec/data/providers/google/tools.yml b/spec/data/providers/google/tools.yml
new file mode 100644
index 0000000..a5c53af
--- /dev/null
+++ b/spec/data/providers/google/tools.yml
@@ -0,0 +1,9 @@
+---
+- functionCall:
+ name: get_current_weather
+ args:
+ location: Tokyo, Japan
+- functionCall:
+ name: what_time_is_it
+ args:
+ timezone: local
diff --git a/spec/logic/cartridge/streaming_spec.rb b/spec/logic/cartridge/streaming_spec.rb
index 466dd0b..4b71dfd 100644
--- a/spec/logic/cartridge/streaming_spec.rb
+++ b/spec/logic/cartridge/streaming_spec.rb
@@ -7,11 +7,22 @@ require_relative '../../../logic/cartridge/streaming'
RSpec.describe NanoBot::Logic::Cartridge::Streaming do
context 'interfaces override' do
context 'defaults' do
- let(:cartridge) { {} }
+ context 'openai' do
+ let(:cartridge) { { provider: { id: 'openai' } } }
- it 'uses default values when appropriate' do
- expect(described_class.enabled?(cartridge, :repl)).to be(true)
- expect(described_class.enabled?(cartridge, :eval)).to be(true)
+ it 'uses default values when appropriate' do
+ expect(described_class.enabled?(cartridge, :repl)).to be(true)
+ expect(described_class.enabled?(cartridge, :eval)).to be(true)
+ end
+ end
+
+ context 'google' do
+ let(:cartridge) { { provider: { id: 'google' } } }
+
+ it 'uses default values when appropriate' do
+ expect(described_class.enabled?(cartridge, :repl)).to be(true)
+ expect(described_class.enabled?(cartridge, :eval)).to be(true)
+ end
end
end
diff --git a/spec/logic/providers/google/tools_spec.rb b/spec/logic/providers/google/tools_spec.rb
new file mode 100644
index 0000000..149a5c4
--- /dev/null
+++ b/spec/logic/providers/google/tools_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'yaml'
+
+require_relative '../../../../logic/providers/google/tools'
+
+RSpec.describe NanoBot::Logic::Google::Tools do
+ context 'tools' do
+ let(:cartridge) { load_symbolized('cartridges/tools.yml') }
+
+ context 'adapt' do
+ it 'adapts to Google expected format' do
+ expect(described_class.adapt(cartridge[:tools][0])).to eq(
+ { name: 'what-time-is-it',
+ description: 'Returns the current date and time for a given timezone.',
+ parameters: {
+ type: 'object',
+ properties: {
+ timezone: {
+ type: 'string',
+ description: 'A string representing the timezone that should be used to provide a datetime, following the IANA (Internet Assigned Numbers Authority) Time Zone Database. Examples are "Asia/Tokyo" and "Europe/Paris".'
+ }
+ },
+ required: ['timezone']
+ } }
+ )
+
+ expect(described_class.adapt(cartridge[:tools][1])).to eq(
+ { name: 'get-current-weather',
+ description: 'Get the current weather in a given location.',
+ parameters: {
+ type: 'object',
+ properties: { location: { type: 'string' }, unit: { type: 'string' } }
+ } }
+ )
+
+ expect(described_class.adapt(cartridge[:tools][2])).to eq(
+ { name: 'sh',
+ description: "It has access to computer users' data and can be used to run shell commands, similar to those in a Linux terminal, to extract information. Please be mindful and careful to avoid running dangerous commands on users' computers.",
+ parameters: {
+ type: 'object',
+ properties: {
+ command: {
+ type: 'array',
+ description: 'An array of strings that represents a shell command along with its arguments or options. For instance, `["df", "-h"]` executes the `df -h` command, where each array element specifies either the command itself or an associated argument/option.',
+ items: { type: 'string' }
+ }
+ }
+ } }
+ )
+
+ expect(described_class.adapt(cartridge[:tools][3])).to eq(
+ { name: 'clock',
+ description: 'Returns the current date and time.',
+ parameters: { type: 'object', properties: {} } }
+ )
+ end
+ end
+
+ context 'prepare' do
+ let(:tools) { load_symbolized('providers/google/tools.yml') }
+
+ it 'prepare tools to be executed' do
+ expect(described_class.prepare(cartridge[:tools], tools)).to eq(
+ [{ name: 'get_current_weather',
+ label: 'get-current-weather',
+ type: 'function',
+ parameters: { location: 'Tokyo, Japan' },
+ source: { fennel: "(let [{:location location :unit unit} parameters]\n (.. \"Here is the weather in \" location \", in \" unit \": 35.8°C.\"))\n" } },
+ { name: 'what_time_is_it',
+ label: 'what-time-is-it',
+ type: 'function', parameters: { timezone: 'local' },
+ source: { fennel: "(os.date)\n" } }]
+ )
+ end
+ end
+ end
+end
diff --git a/spec/logic/providers/openai/tools_spec.rb b/spec/logic/providers/openai/tools_spec.rb
index 949d097..9757c17 100644
--- a/spec/logic/providers/openai/tools_spec.rb
+++ b/spec/logic/providers/openai/tools_spec.rb
@@ -78,10 +78,15 @@ RSpec.describe NanoBot::Logic::OpenAI::Tools do
expect(described_class.prepare(cartridge[:tools], tools)).to eq(
[{ id: 'call_XYZ',
name: 'get-current-weather',
+ label: 'get-current-weather',
type: 'function',
parameters: { 'location' => 'Tokyo, Japan' },
source: { fennel: "(let [{:location location :unit unit} parameters]\n (.. \"Here is the weather in \" location \", in \" unit \": 35.8°C.\"))\n" } },
- { id: 'call_ZYX', name: 'what-time-is-it', type: 'function', parameters: {},
+ { id: 'call_ZYX',
+ name: 'what-time-is-it',
+ label: 'what-time-is-it',
+ type: 'function',
+ parameters: {},
source: { fennel: "(os.date)\n" } }]
)
end
diff --git a/static/cartridges/default.yml b/static/cartridges/default.yml
index 98dd47b..fbf449b 100644
--- a/static/cartridges/default.yml
+++ b/static/cartridges/default.yml
@@ -30,5 +30,7 @@ interfaces:
feedback: true
provider:
+ options:
+ stream: true
settings:
stream: true
diff --git a/static/gem.rb b/static/gem.rb
index bd2218b..b927f4c 100644
--- a/static/gem.rb
+++ b/static/gem.rb
@@ -5,8 +5,8 @@ module NanoBot
name: 'nano-bots',
version: '1.1.2',
author: 'icebaker',
- summary: 'Ruby Implementation of Nano Bots: small, AI-powered bots',
- description: 'Ruby Implementation of Nano Bots: small, AI-powered bots easily shared as a single file, designed to support multiple providers such as Vicuna, OpenAI ChatGPT, Google PaLM, Alpaca, and LLaMA.',
+ summary: 'Ruby Implementation of Nano Bots: small, AI-powered bots for OpenAI ChatGPT and Google Gemini.',
+ description: 'Ruby Implementation of Nano Bots: small, AI-powered bots that can be easily shared as a single file, designed to support multiple providers such as OpenAI ChatGPT and Google Gemini, with support for calling Tools (Functions).',
github: 'https://github.com/icebaker/ruby-nano-bots',
gem_server: 'https://rubygems.org',
license: 'MIT',