Class: AIPrompt

Inherits:
Object
  • Object
show all
Defined in:
lib/cl/magic/common/ai_prompt.rb

Constant Summary collapse

API_COMPLETIONS_PATH =
"/openai/v1/chat/completions"
API_EMBEDDINGS_PATH =
"/openai/v1/embeddings"
MAX_THREADS =

set to 1 to debug without concurrency

10

Instance Method Summary collapse

Constructor Details

#initialize(logger, cache_dir, max_chunk_size = 10000, temperature = 1) ⇒ AIPrompt

Returns a new instance of AIPrompt.



19
20
21
22
23
24
25
26
27
28
29
30
31
# File 'lib/cl/magic/common/ai_prompt.rb', line 19

def initialize(logger, cache_dir, max_chunk_size=10000, temperature=1)
  @cache_dir = cache_dir
  @logger = logger
  @max_chunk_size = max_chunk_size
  @temperature = temperature
  @ai_text_splitter = AITextSplitter.new(@max_chunk_size, @logger)
  @thread_pool = Concurrent::ThreadPoolExecutor.new(
    min_threads: 0,
    max_threads: MAX_THREADS,
    max_queue: 0,
    fallback_policy: :caller_runs
  )
end

Instance Method Details

#clear_cacheObject



56
57
58
59
60
# File 'lib/cl/magic/common/ai_prompt.rb', line 56

def clear_cache()
  Dir.glob(File.join(get_cache_path, '*.json')).each do |file|
    File.delete(file)
  end
end

#gen_embeddings(input) ⇒ Object



33
34
35
36
37
38
39
40
# File 'lib/cl/magic/common/ai_prompt.rb', line 33

def gen_embeddings(input)
  data = {
    model: "text-embedding-ada-002",
    input: input,
  }
  response = post_open_ai(API_EMBEDDINGS_PATH, data.to_json)
  return response["data"][0]["embedding"]
end

#prompt(raw_data, prompt, split_as_markdown = false, separator) ⇒ Object



42
43
44
45
46
47
48
49
50
51
52
53
54
# File 'lib/cl/magic/common/ai_prompt.rb', line 42

def prompt(raw_data, prompt, split_as_markdown=false, separator)

  # split
  split_data = @ai_text_splitter.split(raw_data, split_as_markdown, separator)

  # summarize
  responses = summarize_split_text(split_data, prompt, split_as_markdown)

  # map and return
  return responses.collect do |json|
    json["choices"].map {|c| c["message"]["content"]}.join("\n")
  end
end