Module: SlackSmartBot::AI::OpenAI

Defined in:
lib/slack/smart-bot/ai/open_ai/models.rb,
lib/slack/smart-bot/ai/open_ai/connect.rb,
lib/slack/smart-bot/ai/open_ai/send_gpt_chat.rb,
lib/slack/smart-bot/ai/open_ai/send_image_edit.rb,
lib/slack/smart-bot/ai/open_ai/whisper_transcribe.rb,
lib/slack/smart-bot/ai/open_ai/send_image_variation.rb,
lib/slack/smart-bot/ai/open_ai/send_image_generation.rb

Class Method Summary collapse

Class Method Details

.connect(ai_open_ai, general_config, personal_settings, reconnect: false, service: :chat_gpt) ⇒ Object



4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
# File 'lib/slack/smart-bot/ai/open_ai/connect.rb', line 4

def self.connect(ai_open_ai, general_config, personal_settings, reconnect: false, service: :chat_gpt)
  require "openai"
  require "nice_http"
  user = Thread.current[:user]
  team_id_user = Thread.current[:team_id_user]

  ai_open_ai = {} if ai_open_ai.nil?
  ai_open_ai_user = {}

  # ._ai to avoid to call .ai method from amazing_print
  ai_open_ai_user = {
    host: general_config._ai.open_ai.host,
    access_token: general_config._ai.open_ai.access_token,
    models: {
      client: nil,
      host: general_config._ai.open_ai.models.host,
      access_token: general_config._ai.open_ai.models.access_token,
      url: general_config._ai.open_ai.models.url,
      api_type: general_config._ai.open_ai.models.api_type,
      api_version: general_config._ai.open_ai.models.api_version,
    },
    chat_gpt: {
      client: nil,
      host: general_config._ai.open_ai.chat_gpt.host,
      access_token: general_config._ai.open_ai.chat_gpt.access_token,
      model: general_config._ai.open_ai.chat_gpt.model,
      smartbot_model: general_config._ai.open_ai.chat_gpt.smartbot_model,
      api_type: general_config._ai.open_ai.chat_gpt.api_type,
      api_version: general_config._ai.open_ai.chat_gpt.api_version,
      fixed_user: general_config._ai.open_ai.chat_gpt.fixed_user, #for testing purposes
    },
    dall_e: {
      client: nil,
      host: general_config._ai.open_ai.dall_e.host,
      access_token: general_config._ai.open_ai.dall_e.access_token,
      image_size: general_config._ai.open_ai.dall_e.image_size,
      model: general_config._ai.open_ai.dall_e.model,
    },
    whisper: {
      client: nil,
      host: general_config._ai.open_ai.whisper.host,
      access_token: general_config._ai.open_ai.whisper.access_token,
      model: general_config._ai.open_ai.whisper.model,
    },
  }
  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.host") and
     personal_settings[team_id_user]["ai.open_ai.host"] != ""
    ai_open_ai_user[:host] = personal_settings[team_id_user]["ai.open_ai.host"]
    ai_open_ai_user[:chat_gpt][:host] = ai_open_ai_user[:host]
    ai_open_ai_user[:dall_e][:host] = ai_open_ai_user[:host]
    ai_open_ai_user[:whisper][:host] = ai_open_ai_user[:host]
    ai_open_ai_user[:models][:host] = ai_open_ai_user[:host]
  end

  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.access_token") and
     personal_settings[team_id_user]["ai.open_ai.access_token"] != ""
    ai_open_ai_user[:access_token] = personal_settings[team_id_user]["ai.open_ai.access_token"]
    ai_open_ai_user[:chat_gpt][:access_token] = ai_open_ai_user[:access_token]
    ai_open_ai_user[:dall_e][:access_token] = ai_open_ai_user[:access_token]
    ai_open_ai_user[:whisper][:access_token] = ai_open_ai_user[:access_token]
    ai_open_ai_user[:models][:access_token] = ai_open_ai_user[:access_token]
  end

  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.chat_gpt.model") and
     personal_settings[team_id_user]["ai.open_ai.chat_gpt.model"] != ""
    ai_open_ai_user[:chat_gpt][:model] = personal_settings[team_id_user]["ai.open_ai.chat_gpt.model"]
  end
  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.chat_gpt.smartbot_model") and
     personal_settings[team_id_user]["ai.open_ai.chat_gpt.smartbot_model"] != ""
    ai_open_ai_user[:chat_gpt][:smartbot_model] = personal_settings[team_id_user]["ai.open_ai.chat_gpt.smartbot_model"]
  end

  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.whisper.model") and
     personal_settings[team_id_user]["ai.open_ai.whisper.model"] != ""
    ai_open_ai_user[:whisper][:model] = personal_settings[team_id_user]["ai.open_ai.whisper.model"]
  end
  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.dall_e.image_size") and
     personal_settings[team_id_user]["ai.open_ai.dall_e.image_size"] != ""
    ai_open_ai_user[:dall_e][:image_size] = personal_settings[team_id_user]["ai.open_ai.dall_e.image_size"]
  end

  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.chat_gpt.host") and
     personal_settings[team_id_user]["ai.open_ai.chat_gpt.host"] != ""
    ai_open_ai_user[:chat_gpt][:host] = personal_settings[team_id_user]["ai.open_ai.chat_gpt.host"]
  end
  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.dall_e.host") and
     personal_settings[team_id_user]["ai.open_ai.dall_e.host"] != ""
    ai_open_ai_user[:dall_e][:host] = personal_settings[team_id_user]["ai.open_ai.dall_e.host"]
  end
  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.whisper.host") and
     personal_settings[team_id_user]["ai.open_ai.whisper.host"] != ""
    ai_open_ai_user[:whisper][:host] = personal_settings[team_id_user]["ai.open_ai.whisper.host"]
  end

  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.chat_gpt.access_token") and
     personal_settings[team_id_user]["ai.open_ai.chat_gpt.access_token"] != ""
    ai_open_ai_user[:chat_gpt][:access_token] = personal_settings[team_id_user]["ai.open_ai.chat_gpt.access_token"]
  end
  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.dall_e.access_token") and
     personal_settings[team_id_user]["ai.open_ai.dall_e.access_token"] != ""
    ai_open_ai_user[:dall_e][:access_token] = personal_settings[team_id_user]["ai.open_ai.dall_e.access_token"]
  end
  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.whisper.access_token") and
     personal_settings[team_id_user]["ai.open_ai.whisper.access_token"] != ""
    ai_open_ai_user[:whisper][:access_token] = personal_settings[team_id_user]["ai.open_ai.whisper.access_token"]
  end

  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.chat_gpt.api_type") and
     personal_settings[team_id_user]["ai.open_ai.chat_gpt.api_type"] != ""
    ai_open_ai_user[:chat_gpt][:api_type] = personal_settings[team_id_user]["ai.open_ai.chat_gpt.api_type"]
  end
  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.chat_gpt.api_version") and
     personal_settings[team_id_user]["ai.open_ai.chat_gpt.api_version"] != ""
    ai_open_ai_user[:chat_gpt][:api_version] = personal_settings[team_id_user]["ai.open_ai.chat_gpt.api_version"]
  end
  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.models.host") and
      personal_settings[team_id_user]["ai.open_ai.models.host"] != ""
    ai_open_ai_user[:models][:host] = personal_settings[team_id_user]["ai.open_ai.models.host"]
  end
  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.models.access_token") and
      personal_settings[team_id_user]["ai.open_ai.models.access_token"] != ""
    ai_open_ai_user[:models][:access_token] = personal_settings[team_id_user]["ai.open_ai.models.access_token"]
  end
  if personal_settings.key?(team_id_user) and personal_settings[team_id_user].key?("ai.open_ai.models.url") and
      personal_settings[team_id_user]["ai.open_ai.models.url"] != ""
    ai_open_ai_user[:models][:url] = personal_settings[team_id_user]["ai.open_ai.models.url"]
  end

  host = ai_open_ai_user[service].host
  access_token = ai_open_ai_user[service].access_token

  ai_open_ai[team_id_user] ||= ai_open_ai_user.deep_copy

  if ai_open_ai.key?(team_id_user) and ai_open_ai[team_id_user] != nil and ai_open_ai[team_id_user][service].key?(:client) and
     ai_open_ai[team_id_user][service][:client] != nil and !reconnect
    # do nothing, we already have a client and we don't want to reconnect
  elsif access_token.to_s != ""
    ai_open_ai[team_id_user][service] = ai_open_ai_user[service].deep_copy
    if host == ""
      ai_open_ai[team_id_user][service][:client] = ::OpenAI::Client.new(uri_base: "https://api.openai.com/", access_token: access_token, request_timeout: 300)
    else
      if ai_open_ai_user[service].key?(:url) and ai_open_ai_user[service][:url] != ""
        ai_open_ai[team_id_user][service][:client] = NiceHttp.new(host: host, headers: { 'Authorization': "Bearer #{ai_open_ai_user[service][:access_token]}" }, ssl: true, timeout: (30))
      elsif ai_open_ai_user[service].key?(:api_type) and ai_open_ai_user[service][:api_type] == :openai_azure
        if general_config._ai.open_ai.key?(:testing) and general_config._ai.open_ai.testing #and !general_config.simulate #todo: check
          log = "#{general_config.path}/logs/chat_gpt_azure_#{team_id_user}.log"
        else
          log = :no
        end
        ai_open_ai[team_id_user][service][:client] = NiceHttp.new(host: host, headers: { 'api-key': access_token }, ssl: true, timeout: (30), log: log)
      else
        ai_open_ai[team_id_user][service][:client] = ::OpenAI::Client.new(uri_base: host, access_token: access_token, request_timeout: 300)
      end
    end
  else
    ai_open_ai[team_id_user] = nil
    message = ["You need to set the OpenAI access token in the config file or in the personal settings."]
    message << "You can get it from https://platform.openai.com/account/api-keys"
    message << "If you want to use your personal access token, you can set it on a DM with SmartBot in the personal settings:"
    message << "    `set personal settings ai.open_ai.#{service}.access_token ACCESS_TOKEN`"
    if service == :chat_gpt
      message << "By default we will be using the chatgpt model #{general_config._ai.open_ai.chat_gpt.model}. You can change it in the config file or in personal settings:"
      message << "    `set personal settings ai.open_ai.chat_gpt.model gpt-4-0314`"
      message << "For specifying the model for ChatGPT on REPLs: `set personal settings ai.open_ai.chat_gpt.smartbot_model gpt-4-0314`"
    elsif service == :whisper
      message << "By default we will be using the whisper model #{general_config._ai.open_ai.whisper.model}. You can change it in the config file or in personal settings:"
      message << "    `set personal settings ai.open_ai.whisper.model whisper-1`"
    elsif service == :dall_e
      message << "You can also change the image size in the config file or in personal settings:"
      message << "    `set personal settings ai.open_ai.dall_e.image_size 512x512`"
    end
    message << "In case you are a master admin, you can set it in the SmartBot config file:"
    message << "    `ai: { open_ai: { #{service}: { access_token: 'ACCESS_TOKEN'} } }`"
    return ai_open_ai, message.join("\n")
  end
  return ai_open_ai, ""
end

.models(open_ai_client, models_config, model = "", return_response: false) ⇒ Object



4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
# File 'lib/slack/smart-bot/ai/open_ai/models.rb', line 4

def self.models(open_ai_client, models_config, model = "", return_response: false)
  require "openai"
  require "amazing_print"
  user = Thread.current[:user]
  models = []
  if model.empty? or model == "chatgpt"
    if open_ai_client.is_a?(NiceHttp) and models_config.url != ""
      resp = open_ai_client.get(models_config.url)
      #save resp to a file
      resp.body.json.data.each do |m|
        if model.empty? or (model == "chatgpt" and
                            (m[:model_name].to_s.include?("gpt-") or (m.key?(:model_info) and m[:model_info][:mode].to_s == "chat")))
          models << m[:model_name]
        end
      end
    elsif open_ai_client.is_a?(NiceHttp) #azure
      #todo: consider filtering by model mode
      resp = open_ai_client.get("/openai/deployments?api-version=#{models_config.api_version}")
      models = resp.body.json(:id)
      models.flatten!
      models.select! { |i| i.include?("gpt-") } if model == "chatgpt"
    else
      #todo: consider filtering by model mode
      response = open_ai_client.models.list
      models = []
      response.data.each do |model|
        models << model["id"]
      end
      models.select! { |i| i.include?("gpt-") } if model == "chatgpt"
    end
    if return_response
      return models.uniq.sort
    else
      return models.uniq.sort.join("\n")
    end
  else
    response_obj = {}
    if open_ai_client.is_a?(NiceHttp) and models_config.url != ""
      resp = open_ai_client.get(models_config.url)
      result = {}
      resp.data.json.data.each do |m|
        if m[:model_name].to_s == model
          result = m
          break
        end
      end
      if result.empty?
        response = { message: "Model not found" }
        response_obj = response
      else
        response = { message: "" }
        result[:model_info].each do |k, v|
          response.message += "#{k}: #{v}\n"
        end
        response_obj = result[:model_info]
      end
    elsif open_ai_client.is_a?(NiceHttp) #azure
      resp = open_ai_client.get("/openai/deployments/#{model}?api-version=#{models_config.api_version}")
      response = resp.body.json()
      response_obj = response
    else
      response = open_ai_client.models.retrieve(id: model)
      response_obj = response
    end
    result = response.ai
  end
  response = response.to_json
  if !response.json(:message).empty? and response.json(:content).empty?
    result = response.json(:message)
  end
  if return_response
    return response_obj
  else
    return result
  end
end

.send_gpt_chat(open_ai_client, model, messages, chat_gpt_config) ⇒ Object



4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
# File 'lib/slack/smart-bot/ai/open_ai/send_gpt_chat.rb', line 4

def self.send_gpt_chat(open_ai_client, model, messages, chat_gpt_config)
  require "openai"
  require "nice_http"
  user = Thread.current[:user]
  if user.key?(:sso_user_name)
    user_name = user.sso_user_name
  else
    user_name = user.name
  end
  if messages.is_a?(String)
    messages = [{ role: "user", content: messages }]
  end
  parameters = {
    model: model, # Required.
    messages: messages,
    temperature: 0.7,
    user: user_name,
  }

  parameters.user = chat_gpt_config.fixed_user if chat_gpt_config.fixed_user.to_s != ""
  if open_ai_client.is_a?(NiceHttp)
    begin
      response = {}
      tries = 0
      while (!response.key?(:data) or response.data.nil? or response.data.empty?) and tries < 10
        begin
          request = {
            path: "/openai/deployments/#{model}/chat/completions?api-version=#{chat_gpt_config.api_version}",
            data: parameters,
          }
          response = open_ai_client.post(request)
        rescue Exception => exception
          response = { message: exception.message }.to_json
        end
        tries += 1
        sleep 1 if !response.key?(:data) or response.data.nil? or response.data.empty? #wait a second before trying again
      end
      response.data = { message: "" }.to_json if !response.key?(:data) or response.data.nil? or response.data.empty?
      response = response.data
    rescue Exception => exception
      response = { message: exception.message }.to_json
    end
  else
    begin
      response = open_ai_client.chat(parameters: parameters)
      response = response.to_json
    rescue Exception => e
      response = e.response
      if !response.nil? and response.status == 403 and response.body.error.message.to_s.include?("You must pass a valid 'user'")
        response.body.error.message += "\nThe user on Slack is: #{user.name}\nYou have to go to your Profile Slack Account on a browser. Then go to Settings.\nNow go to Username and click on expand, change the name to your SSO name and click on Save"
      end
      if response.nil?
        response = { message: e.message }
      else
        response = response.to_json
      end
    end
  end
  if response.nil?
    result = "No response from the AI. Please contact the SmartBot administrator."
    return false, result
  elsif response.is_a?(Hash) and response.key?(:message) and !response.key?(:content)
    result = response[:message]
    return false, result
  elsif !response.json(:message).empty? and response.json(:content).empty?
    result = response.json(:message)
    return false, result
  elsif !response.json(:error).empty? and !response.json(:code).empty?
    result = response.json(:code)
    return false, result
  elsif !response.json(:error).empty?
    result = response.json(:error).to_s
    return false, result
  else
    result = response.json(:content)
    return true, result
  end
end

.send_image_edit(open_api_client, image, message, size: "") ⇒ Object



4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
# File 'lib/slack/smart-bot/ai/open_ai/send_image_edit.rb', line 4

def self.send_image_edit(open_api_client, image, message, size: "")
  #todo: add size personal settings
  require "openai"
  user = Thread.current[:user]

  if size == ""
    response = open_ai_client.images.edit(parameters: { image: image, prompt: message })
  else
    response = open_api_client.images.edit(parameters: { image: image, prompt: message, size: size })
  end
  response = response.to_json
  if !response.json(:message).empty?
    return false, "*OpenAI*: #{response.json(:message)}"
  else
    urls = [response.json(:url)].flatten
    return true, urls
  end
end

.send_image_generation(open_ai_client, message, image_size) ⇒ Object



4
5
6
7
8
9
10
11
12
13
14
15
# File 'lib/slack/smart-bot/ai/open_ai/send_image_generation.rb', line 4

def self.send_image_generation(open_ai_client, message, image_size)
  require "openai"
  user = Thread.current[:user]
  response = open_ai_client.images.generate(parameters: { prompt: message, size: image_size })
  response = response.to_json
  if !response.json(:message).empty?
    return false, "*OpenAI*: #{response.json(:message)}"
  else
    urls = response.json(:url)
    return true, urls
  end
end

.send_image_variation(open_ai_client, image, variations, size: "") ⇒ Object



4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
# File 'lib/slack/smart-bot/ai/open_ai/send_image_variation.rb', line 4

def self.send_image_variation(open_ai_client, image, variations, size: "")
  #todo: add size personal settings
  require "openai"
  user = Thread.current[:user]
  if size == ""
    response = open_ai_client.images.variations(parameters: { image: image, n: variations })
  else
    response = open_ai_client.images.variations(parameters: { image: image, n: variations, size: size })
  end
  response = response.to_json
  if !response.json(:message).empty?
    return false, "*OpenAI*: #{response.json(:message)}"
  else
    urls = [response.json(:url)].flatten
    return true, urls
  end
end

.whisper_transcribe(open_ai_client, model, file) ⇒ Object



4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
# File 'lib/slack/smart-bot/ai/open_ai/whisper_transcribe.rb', line 4

def self.whisper_transcribe(open_ai_client, model, file)
  require "openai"
  user = Thread.current[:user]
  response = open_ai_client.transcribe(
    parameters: {
      model: model, # Required.
      file: File.open(file, "rb"),
    },
  )
  response = response.to_json
  if !response.json(:message).empty?
    return false, response.json(:message)
  else
    return true, response.json(:text)
  end
end