Class: RbsGoose::Configuration

Inherits:
Object
  • Object
show all
Extended by:
Forwardable
Defined in:
lib/rbs_goose/configuration.rb

Defined Under Namespace

Classes: LLMConfig, TemplateConfig

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initializeConfiguration

Returns a new instance of Configuration.



19
20
21
22
23
# File 'lib/rbs_goose/configuration.rb', line 19

def initialize(&)
  self.infer_template = default_infer_template
  self.fix_error_template = default_fix_error_template
  instance_eval(&) if block_given?
end

Instance Attribute Details

#fix_error_templateObject

Returns the value of attribute fix_error_template.



25
26
27
# File 'lib/rbs_goose/configuration.rb', line 25

def fix_error_template
  @fix_error_template
end

#infer_templateObject

Returns the value of attribute infer_template.



25
26
27
# File 'lib/rbs_goose/configuration.rb', line 25

def infer_template
  @infer_template
end

#llmObject

Returns the value of attribute llm.



25
26
27
# File 'lib/rbs_goose/configuration.rb', line 25

def llm
  @llm
end

Instance Method Details

#use_anthropic(access_token, model_name: 'claude-3-haiku-20240307', mode: :chat, default_options: {}) ⇒ Object

rubocop:disable Metrics/MethodLength



44
45
46
47
48
49
50
51
52
53
54
55
56
# File 'lib/rbs_goose/configuration.rb', line 44

def use_anthropic(access_token, model_name: 'claude-3-haiku-20240307', mode: :chat, default_options: {}) # rubocop:disable Metrics/MethodLength
  @llm = LLMConfig.new(
    client: ::Langchain::LLM::Anthropic.new(
      api_key: access_token,
      default_options: {
        completion_model_name: model_name,
        chat_completion_model_name: model_name,
        max_tokens_to_sample: 4096
      }.merge(default_options)
    ),
    mode:
  )
end

#use_ollama(url: 'http://localhost:11434', model_name: 'codegemma', mode: :complete, default_options: {}) ⇒ Object

rubocop:disable Metrics/MethodLength



58
59
60
61
62
63
64
65
66
67
68
69
70
# File 'lib/rbs_goose/configuration.rb', line 58

def use_ollama(url: 'http://localhost:11434', model_name: 'codegemma', mode: :complete, default_options: {}) # rubocop:disable Metrics/MethodLength
  @llm = LLMConfig.new(
    client: ::Langchain::LLM::Ollama.new(
      url:,
      default_options: {
        temperature: 0.0,
        completion_model_name: model_name,
        chat_completion_model_name: model_name
      }.merge(default_options)
    ),
    mode:
  )
end

#use_open_ai(open_ai_access_token, model_name: 'gpt-3.5-turbo-1106', mode: :chat, llm_options: {}, default_options: {}) ⇒ Object

rubocop:disable Metrics/MethodLength



27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
# File 'lib/rbs_goose/configuration.rb', line 27

def use_open_ai( # rubocop:disable Metrics/MethodLength
  open_ai_access_token, model_name: 'gpt-3.5-turbo-1106', mode: :chat,
  llm_options: {}, default_options: {}
)
  @llm = LLMConfig.new(
    client: ::Langchain::LLM::OpenAI.new(
      api_key: open_ai_access_token,
      llm_options: { request_timeout: 600 }.merge(llm_options),
      default_options: {
        completion_model_name: model_name,
        chat_completion_model_name: model_name
      }.merge(default_options)
    ),
    mode:
  )
end