Class: Cohere::Client

Inherits:
Object
  • Object
show all
Defined in:
lib/cohere/client.rb

Constant Summary collapse

ENDPOINT_URL =
"https://api.cohere.ai/v1"

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(api_key:, timeout: nil) ⇒ Client

Returns a new instance of Client.



11
12
13
14
# File 'lib/cohere/client.rb', line 11

def initialize(api_key:, timeout: nil)
  @api_key = api_key
  @timeout = timeout
end

Instance Attribute Details

#api_keyObject (readonly)

Returns the value of attribute api_key.



7
8
9
# File 'lib/cohere/client.rb', line 7

def api_key
  @api_key
end

Instance Method Details

#chat(message: nil, model: nil, stream: false, preamble: nil, preamble_override: nil, chat_history: [], conversation_id: nil, prompt_truncation: nil, connectors: [], search_queries_only: false, documents: [], citation_quality: nil, temperature: nil, max_tokens: nil, k: nil, p: nil, seed: nil, frequency_penalty: nil, presence_penalty: nil, tools: [], &block) ⇒ Object



16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
# File 'lib/cohere/client.rb', line 16

def chat(
  message: nil,
  model: nil,
  stream: false,
  preamble: nil,
  preamble_override: nil,
  chat_history: [],
  conversation_id: nil,
  prompt_truncation: nil,
  connectors: [],
  search_queries_only: false,
  documents: [],
  citation_quality: nil,
  temperature: nil,
  max_tokens: nil,
  k: nil,
  p: nil,
  seed: nil,
  frequency_penalty: nil,
  presence_penalty: nil,
  tools: [],
  &block
)
  response = connection.post("chat") do |req|
    req.body = {}

    req.body[:message] = message if message
    req.body[:model] = model if model
    if stream || block
      req.body[:stream] = true
      req.options.on_data = block if block
    end
    req.body[:preamble] = preamble if preamble
    req.body[:preamble_override] = preamble_override if preamble_override
    req.body[:chat_history] = chat_history if chat_history
    req.body[:conversation_id] = conversation_id if conversation_id
    req.body[:prompt_truncation] = prompt_truncation if prompt_truncation
    req.body[:connectors] = connectors if connectors
    req.body[:search_queries_only] = search_queries_only if search_queries_only
    req.body[:documents] = documents if documents
    req.body[:citation_quality] = citation_quality if citation_quality
    req.body[:temperature] = temperature if temperature
    req.body[:max_tokens] = max_tokens if max_tokens
    req.body[:k] = k if k
    req.body[:p] = p if p
    req.body[:seed] = seed if seed
    req.body[:frequency_penalty] = frequency_penalty if frequency_penalty
    req.body[:presence_penalty] = presence_penalty if presence_penalty
    req.body[:tools] = tools if tools
  end
  response.body
end

#classify(inputs:, examples:, model: nil, present: nil, truncate: nil) ⇒ Object



145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
# File 'lib/cohere/client.rb', line 145

def classify(
  inputs:,
  examples:,
  model: nil,
  present: nil,
  truncate: nil
)
  response = connection.post("classify") do |req|
    req.body = {
      inputs: inputs,
      examples: examples
    }
    req.body[:model] = model if model
    req.body[:present] = present if present
    req.body[:truncate] = truncate if truncate
  end
  response.body
end

#detect_language(texts:) ⇒ Object



178
179
180
181
182
183
# File 'lib/cohere/client.rb', line 178

def detect_language(texts:)
  response = connection.post("detect-language") do |req|
    req.body = {texts: texts}
  end
  response.body
end

#detokenize(tokens:, model: nil) ⇒ Object



171
172
173
174
175
176
# File 'lib/cohere/client.rb', line 171

def detokenize(tokens:, model: nil)
  response = connection.post("detokenize") do |req|
    req.body = model.nil? ? {tokens: tokens} : {tokens: tokens, model: model}
  end
  response.body
end

#embed(texts:, model: nil, input_type: nil, truncate: nil) ⇒ Object



107
108
109
110
111
112
113
114
115
116
117
118
119
120
# File 'lib/cohere/client.rb', line 107

def embed(
  texts:,
  model: nil,
  input_type: nil,
  truncate: nil
)
  response = connection.post("embed") do |req|
    req.body = {texts: texts}
    req.body[:model] = model if model
    req.body[:input_type] = input_type if input_type
    req.body[:truncate] = truncate if truncate
  end
  response.body
end

#generate(prompt:, model: nil, num_generations: nil, max_tokens: nil, preset: nil, temperature: nil, k: nil, p: nil, frequency_penalty: nil, presence_penalty: nil, end_sequences: nil, stop_sequences: nil, return_likelihoods: nil, logit_bias: nil, truncate: nil) ⇒ Object

This endpoint generates realistic text conditioned on a given input.



70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
# File 'lib/cohere/client.rb', line 70

def generate(
  prompt:,
  model: nil,
  num_generations: nil,
  max_tokens: nil,
  preset: nil,
  temperature: nil,
  k: nil,
  p: nil,
  frequency_penalty: nil,
  presence_penalty: nil,
  end_sequences: nil,
  stop_sequences: nil,
  return_likelihoods: nil,
  logit_bias: nil,
  truncate: nil
)
  response = connection.post("generate") do |req|
    req.body = {prompt: prompt}
    req.body[:model] = model if model
    req.body[:num_generations] = num_generations if num_generations
    req.body[:max_tokens] = max_tokens if max_tokens
    req.body[:preset] = preset if preset
    req.body[:temperature] = temperature if temperature
    req.body[:k] = k if k
    req.body[:p] = p if p
    req.body[:frequency_penalty] = frequency_penalty if frequency_penalty
    req.body[:presence_penalty] = presence_penalty if presence_penalty
    req.body[:end_sequences] = end_sequences if end_sequences
    req.body[:stop_sequences] = stop_sequences if stop_sequences
    req.body[:return_likelihoods] = return_likelihoods if return_likelihoods
    req.body[:logit_bias] = logit_bias if logit_bias
    req.body[:truncate] = truncate if truncate
  end
  response.body
end

#rerank(query:, documents:, model: nil, top_n: nil, rank_fields: nil, return_documents: nil, max_chunks_per_doc: nil) ⇒ Object



122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
# File 'lib/cohere/client.rb', line 122

def rerank(
  query:,
  documents:,
  model: nil,
  top_n: nil,
  rank_fields: nil,
  return_documents: nil,
  max_chunks_per_doc: nil
)
  response = connection.post("rerank") do |req|
    req.body = {
      query: query,
      documents: documents
    }
    req.body[:model] = model if model
    req.body[:top_n] = top_n if top_n
    req.body[:rank_fields] = rank_fields if rank_fields
    req.body[:return_documents] = return_documents if return_documents
    req.body[:max_chunks_per_doc] = max_chunks_per_doc if max_chunks_per_doc
  end
  response.body
end

#summarize(text:, length: nil, format: nil, model: nil, extractiveness: nil, temperature: nil, additional_command: nil) ⇒ Object



185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
# File 'lib/cohere/client.rb', line 185

def summarize(
  text:,
  length: nil,
  format: nil,
  model: nil,
  extractiveness: nil,
  temperature: nil,
  additional_command: nil
)
  response = connection.post("summarize") do |req|
    req.body = {text: text}
    req.body[:length] = length if length
    req.body[:format] = format if format
    req.body[:model] = model if model
    req.body[:extractiveness] = extractiveness if extractiveness
    req.body[:temperature] = temperature if temperature
    req.body[:additional_command] = additional_command if additional_command
  end
  response.body
end

#tokenize(text:, model: nil) ⇒ Object



164
165
166
167
168
169
# File 'lib/cohere/client.rb', line 164

def tokenize(text:, model: nil)
  response = connection.post("tokenize") do |req|
    req.body = model.nil? ? {text: text} : {text: text, model: model}
  end
  response.body
end