Class: Promptcraft::Llm

Inherits:
Object
  • Object
show all
Defined in:
lib/promptcraft/llm.rb

Constant Summary collapse

DEFAULT_PROVIDER =
"groq"

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(provider: DEFAULT_PROVIDER, model: nil, temperature: nil, api_key: nil) ⇒ Llm

Returns a new instance of Llm.



11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
# File 'lib/promptcraft/llm.rb', line 11

def initialize(provider: DEFAULT_PROVIDER, model: nil, temperature: nil, api_key: nil)
  @provider = provider
  @temperature = temperature
  @langchain = case provider
  when "groq"
    @model = model || "llama3-70b-8192"
    require "openai"
    Langchain::LLM::OpenAI.new(
      api_key: api_key || ENV.fetch("GROQ_API_KEY"),
      llm_options: {uri_base: "https://api.groq.com/openai/"},
      default_options: {
        temperature: temperature,
        chat_completion_model_name: @model
      }.compact
    )
  when "openai"
    @model = model || "gpt-3.5-turbo"
    require "openai"
    Langchain::LLM::OpenAI.new(
      api_key: api_key || ENV.fetch("OPENAI_API_KEY"),
      default_options: {
        temperature: temperature,
        chat_completion_model_name: @model
      }.compact
    )
  when "openrouter"
    @model = model || "meta-llama/llama-3-8b-instruct:free"
    require "openai"
    Langchain::LLM::OpenAI.new(
      api_key: api_key || ENV.fetch("OPENROUTER_API_KEY"),
      llm_options: {uri_base: "https://openrouter.ai/api/"},
      default_options: {
        temperature: temperature,
        chat_completion_model_name: @model
      }.compact
    )
  when "ollama"
    @model = model || "llama3"
    Langchain::LLM::Ollama.new(
      default_options: {
        completion_model_name: @model,
        chat_completion_model_name: @model
      }
    )
  end
end

Instance Attribute Details

#langchainObject (readonly)

Returns the value of attribute langchain.



6
7
8
# File 'lib/promptcraft/llm.rb', line 6

def langchain
  @langchain
end

#modelObject

Returns the value of attribute model.



7
8
9
# File 'lib/promptcraft/llm.rb', line 7

def model
  @model
end

#providerObject

Returns the value of attribute provider.



7
8
9
# File 'lib/promptcraft/llm.rb', line 7

def provider
  @provider
end

#temperatureObject

Returns the value of attribute temperature.



7
8
9
# File 'lib/promptcraft/llm.rb', line 7

def temperature
  @temperature
end

Class Method Details

.from_h(hash) ⇒ Object



62
63
64
# File 'lib/promptcraft/llm.rb', line 62

def self.from_h(hash)
  new(provider: hash[:provider], model: hash[:model], temperature: hash[:temperature])
end

Instance Method Details

#to_hObject



58
59
60
# File 'lib/promptcraft/llm.rb', line 58

def to_h
  {provider:, model:, temperature:}.compact
end