Module: PWN::AI::Grok

Defined in:
lib/pwn/ai/grok.rb

Overview

This plugin interacts with xAI’s Grok API, similar to the Grok plugin. It provides methods to list models, generate completions, and chat. API documentation: docs.x.ai/docs Obtain an API key from x.ai/api

Class Method Summary collapse

Class Method Details

.authorsObject

Author(s)

0day Inc. <[email protected]>



230
231
232
233
234
# File 'lib/pwn/ai/grok.rb', line 230

public_class_method def self.authors
  "AUTHOR(S):
    0day Inc. <[email protected]>
  "
end

.chat(opts = {}) ⇒ Object

Supported Method Parameters

response = PWN::AI::Grok.chat(

request: 'required - message to Grok'
model: 'optional - model to use for text generation (defaults to PWN::Env[:ai][:grok][:model])',
temp: 'optional - creative response float (deafults to PWN::Env[:ai][:grok][:temp])',
system_role_content: 'optional - context to set up the model behavior for conversation (Default: PWN::Env[:ai][:grok][:system_role_content])',
response_history: 'optional - pass response back in to have a conversation',
speak_answer: 'optional speak answer using PWN::Plugins::Voice.text_to_speech (Default: nil)',
timeout: 'optional timeout in seconds (defaults to 300)',
spinner: 'optional - display spinner (defaults to false)'

)



145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
# File 'lib/pwn/ai/grok.rb', line 145

public_class_method def self.chat(opts = {})
  engine = PWN::Env[:ai][:grok]
  request = opts[:request]
  max_prompt_length = engine[:max_prompt_length] ||= 256_000
  request_trunc_idx = ((max_prompt_length - 1) / 3.36).floor
  request = request[0..request_trunc_idx]

  model = opts[:model] ||= engine[:model]
  raise 'ERROR: Model is required.  Call #get_models method for details' if model.nil?

  temp = opts[:temp].to_f ||= engine[:temp].to_f
  temp = 1 if temp.zero?

  rest_call = 'chat/completions'

  response_history = opts[:response_history]

  max_tokens = response_history[:usage][:total_tokens] unless response_history.nil?

  system_role_content = opts[:system_role_content] ||= engine[:system_role_content]
  system_role_content = response_history[:choices].first[:content] if response_history

  system_role = {
    role: 'system',
    content: system_role_content
  }

  user_role = {
    role: 'user',
    content: request
  }

  response_history ||= { choices: [system_role] }
  choices_len = response_history[:choices].length

  http_body = {
    model: model,
    messages: [system_role],
    temperature: temp,
    stream: false
  }

  if response_history[:choices].length > 1
    response_history[:choices][1..-1].each do |message|
      http_body[:messages].push(message)
    end
  end

  http_body[:messages].push(user_role)

  timeout = opts[:timeout]
  spinner = opts[:spinner]

  response = grok_rest_call(
    http_method: :post,
    rest_call: rest_call,
    http_body: http_body,
    timeout: timeout,
    spinner: spinner
  )

  json_resp = JSON.parse(response, symbolize_names: true)
  assistant_resp = json_resp[:choices].first[:message]
  json_resp[:choices] = http_body[:messages]
  json_resp[:choices].push(assistant_resp)

  speak_answer = true if opts[:speak_answer]

  if speak_answer
    answer = assistant_resp[:content]
    text_path = "/tmp/#{SecureRandom.hex}.pwn_voice"
    # answer = json_resp[:choices].last[:text]
    # answer = json_resp[:choices].last[:content] if gpt
    File.write(text_path, answer)
    PWN::Plugins::Voice.text_to_speech(text_path: text_path)
    File.unlink(text_path)
  end

  json_resp
rescue StandardError => e
  raise e
end

.get_modelsObject

Supported Method Parameters

models = PWN::AI::Grok.get_models



125
126
127
128
129
130
131
# File 'lib/pwn/ai/grok.rb', line 125

public_class_method def self.get_models
  models = grok_rest_call(rest_call: 'models')

  JSON.parse(models, symbolize_names: true)[:data]
rescue StandardError => e
  raise e
end

.helpObject

Display Usage for this Module



238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
# File 'lib/pwn/ai/grok.rb', line 238

public_class_method def self.help
  puts "USAGE:
    models = #{self}.get_models

    response = #{self}.chat(
      request: 'required - message to Grok',
      model: 'optional - model to use for text generation (defaults to PWN::Env[:ai][:grok][:model])',
      temp: 'optional - creative response float (defaults to PWN::Env[:ai][:grok][:temp])',
      system_role_content: 'optional - context to set up the model behavior for conversation (Default: PWN::Env[:ai][:grok][:system_role_content])',
      response_history: 'optional - pass response back in to have a conversation',
      speak_answer: 'optional speak answer using PWN::Plugins::Voice.text_to_speech (Default: nil)',
      timeout: 'optional - timeout in seconds (defaults to 300)'.
      spinner: 'optional - display spinner (defaults to false)'
    )

    #{self}.authors
  "
end