Method: LlmMemory::Broca#respond_with_schema

Defined in:
lib/llm_memory/broca.rb

#respond_with_schema(context: {}, schema: {}) ⇒ Object

[View source]

46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
# File 'lib/llm_memory/broca.rb', line 46

def respond_with_schema(context: {}, schema: {})
  response_content = respond(context)
  begin
    response = client.chat(
      parameters: {
        model: "gpt-3.5-turbo-0613", # as of July 3, 2023
        messages: [
          {
            role: "user",
            content: response_content
          }
        ],
        functions: [
          {
            name: "broca",
            description: "Formating the content with the specified schema",
            parameters: schema
          }
        ]
      }
    )
    LlmMemory.logger.debug(response)
    message = response.dig("choices", 0, "message")
    if message["role"] == "assistant" && message["function_call"]
      function_name = message.dig("function_call", "name")
      args =
        JSON.parse(
          message.dig("function_call", "arguments"),
          {symbolize_names: true}
        )
      if function_name == "broca"
        args
      end
    end
  rescue => e
    LlmMemory.logger.info(e.inspect)
    nil
  end
end