Class: Promptcraft::Cli::RunCommand

Inherits:
Object
  • Object
show all
Includes:
TTY::Option
Defined in:
lib/promptcraft/cli/run_command.rb

Overview

Pick an LLM provider + model:

promptcraft --provider groq
promptcraft --provider openai --model gpt-3.5-turbo

Pass in a prompt via CLI (-p,–prompt expects a string, or filename)

promptcraft -c tmp/maths/start/basic.yml -p "I'm terrible at maths. If I'm asked a maths question, I reply with a question."
promptcraft -c tmp/maths/start/basic.yml -p <(echo "I'm terrible at maths. If I'm asked a maths question, I reply with a question.")

The prompt file can also be YAML with system_prompt: key.

Instance Method Summary collapse

Instance Method Details

#dump_conversation(conversation, format:) ⇒ Object

Currently we support only streaming YAML and JSON objects so can immediately dump them to STDOUT



175
176
177
178
179
180
181
# File 'lib/promptcraft/cli/run_command.rb', line 175

def dump_conversation(conversation, format:)
  if format == "json"
    puts conversation.to_json
  else
    puts conversation.to_yaml
  end
end

#io_ready?(io) ⇒ Boolean

Returns:

  • (Boolean)


183
184
185
186
# File 'lib/promptcraft/cli/run_command.rb', line 183

def io_ready?(io)
  return false unless io
  IO.select([io], nil, nil, 5)
end

#run(stdin: nil, threads: nil) ⇒ Object

Arguments are for the benefit of test suite



83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
# File 'lib/promptcraft/cli/run_command.rb', line 83

def run(stdin: nil, threads: nil)
  if params[:help]
    warn help
  elsif params.errors.any?
    warn params.errors.summary
  else
    # Load files in threads
    threads ||= params[:threads]
    pool = Concurrent::FixedThreadPool.new(threads)
    conversations = Concurrent::Array.new
    # TODO: load in thread pool
    (params[:conversation] || []).each do |filename|
      pool.post do
        # check if --conversation=filename is an actual file, else store it in StringIO and pass to load_from_io
        if File.exist?(filename)
          conversations.push(*Promptcraft::Conversation.load_from_file(filename))
        else
          conversations.push(*Promptcraft::Conversation.load_from_io(StringIO.new(filename)))
        end
      end
    end
    pool.shutdown
    pool.wait_for_termination

    # if STDIN piped into the command, read stream of YAML conversations from STDIN
    if io_ready?(stdin)
      conversations.push(*Promptcraft::Conversation.load_from_io(stdin))
    end

    if conversations.empty?
      conversations << Promptcraft::Conversation.new(system_prompt: "You are helpful. If you're first, then ask a question. You like brevity.")
    end

    if (prompt = params[:prompt])
      # if prompt is a file, load it; else set the prompt to the value
      new_system_prompt = if File.exist?(prompt)
        File.read(prompt)
      else
        prompt
      end

      # If new_system_prompt is YAML and a Hash, use "system_prompt" key
      begin
        obj = YAML.load(new_system_prompt, symbolize_keys: true)
        if obj.is_a?(Hash) && obj[:system_prompt]
          new_system_prompt = obj[:system_prompt]
        end
      rescue
      end
    end

    # Process each conversation in a concurrent thread via a thread pool
    pool = Concurrent::FixedThreadPool.new(threads)
    mutex = Mutex.new

    updated_conversations = Concurrent::Array.new
    conversations.each do |conversation|
      pool.post do
        # warn "Post processing conversation for #{conversation.messages.inspect}"
        llm = if params[:provider]
          Promptcraft::Llm.new(provider: params[:provider], model: params[:model])
        elsif conversation.llm
          conversation.llm
        else
          Promptcraft::Llm.new
        end
        llm.model = params[:model] if params[:model]
        llm.temperature = params[:temperature] if params[:temperature]

        system_prompt = new_system_prompt || conversation.system_prompt

        cmd = Promptcraft::Command::RechatConversationCommand.new(system_prompt:, conversation:, llm:)
        cmd.execute
        updated_conversations << cmd.updated_conversation

        mutex.synchronize do
          dump_conversation(cmd.updated_conversation, format: params[:format])
        end
      rescue => e
        mutex.synchronize do
          warn "Error: #{e.message}"
          warn "for conversation: #{conversation.inspect}"
        end
      end
    end
    pool.shutdown
    pool.wait_for_termination
  end
end