Class: BxBuilderChain::QuestionAskingService

Inherits:
Object
  • Object
show all
Defined in:
lib/generators/bx_builder_chain/templates/app/services/bx_builder_chain/question_asking_service.rb

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(question:, user_groups: ['public'], client_class_name: 'BxBuilderChain::Vectorsearch::Pgvector', llm_class_name: 'BxBuilderChain::Llm::OpenAi', context_results: 6, model: "gpt-3.5-turbo-16k") ⇒ QuestionAskingService

Returns a new instance of QuestionAskingService.



5
6
7
8
9
10
11
12
# File 'lib/generators/bx_builder_chain/templates/app/services/bx_builder_chain/question_asking_service.rb', line 5

def initialize(question:, user_groups: ['public'], client_class_name: 'BxBuilderChain::Vectorsearch::Pgvector', llm_class_name: 'BxBuilderChain::Llm::OpenAi', context_results: 6, model: "gpt-3.5-turbo-16k")
  @question = question
  @user_groups = user_groups
  @client_class_name = client_class_name
  @llm_class_name = llm_class_name
  @context_results = context_results
  @model = model
end

Instance Attribute Details

#client_class_nameObject (readonly)

Returns the value of attribute client_class_name.



3
4
5
# File 'lib/generators/bx_builder_chain/templates/app/services/bx_builder_chain/question_asking_service.rb', line 3

def client_class_name
  @client_class_name
end

#context_resultsObject (readonly)

Returns the value of attribute context_results.



3
4
5
# File 'lib/generators/bx_builder_chain/templates/app/services/bx_builder_chain/question_asking_service.rb', line 3

def context_results
  @context_results
end

#llm_class_nameObject (readonly)

Returns the value of attribute llm_class_name.



3
4
5
# File 'lib/generators/bx_builder_chain/templates/app/services/bx_builder_chain/question_asking_service.rb', line 3

def llm_class_name
  @llm_class_name
end

#questionObject (readonly)

Returns the value of attribute question.



3
4
5
# File 'lib/generators/bx_builder_chain/templates/app/services/bx_builder_chain/question_asking_service.rb', line 3

def question
  @question
end

#user_groupsObject (readonly)

Returns the value of attribute user_groups.



3
4
5
# File 'lib/generators/bx_builder_chain/templates/app/services/bx_builder_chain/question_asking_service.rb', line 3

def user_groups
  @user_groups
end

Instance Method Details

#askObject



14
15
16
17
18
19
# File 'lib/generators/bx_builder_chain/templates/app/services/bx_builder_chain/question_asking_service.rb', line 14

def ask
  return { error: 'No question provided' } unless question.present?
  
  response = client.ask(question: question, context_results: context_results)
  { answer: response }
end

#chat(history = []) ⇒ Object

history in the format of

message:‘user message’, {role:‘ai’,message:‘some text’, .….


23
24
25
26
27
28
29
30
31
32
33
34
# File 'lib/generators/bx_builder_chain/templates/app/services/bx_builder_chain/question_asking_service.rb', line 23

def chat(history = [])
  search_results = client.similarity_search(query: @question, k: @context_results)

  context = search_results.map do |result|
    result.content.to_s
  end
  context = context.join("\n---\n")
  
  prompt = client.generate_prompt(question: @question, context: context, prompt_template: nil)
  
  llm.chat(prompt:prompt, messages: reduce_history(history))
end

#reduce_history(history) ⇒ Object



36
37
38
39
40
41
42
43
44
45
# File 'lib/generators/bx_builder_chain/templates/app/services/bx_builder_chain/question_asking_service.rb', line 36

def reduce_history(history)
  current_length = 0
  limit = tokenizer.class::TOKEN_LIMITS["gpt-3.5-turbo-16k"] * 0.75
  history_overflow, @reduced_history = @history.partition do |msg|
    current_length += tokenizer.token_length(msg[:message], @current_model)
    current_length > limit
  end

  reduced_history
end