Class: HuggingFace::InferenceApi
- Inherits:
-
BaseApi
- Object
- BaseApi
- HuggingFace::InferenceApi
show all
- Defined in:
- lib/hugging_face/inference_api.rb
Constant Summary
collapse
- HOST =
"https://api-inference.huggingface.co"
- MAX_RETRY =
Retry connecting to the model for 1 minute
60
- QUESTION_ANSWERING_MODEL =
Default models that can be overriden by ‘model’ param
'distilbert-base-cased-distilled-squad'
- SUMMARIZATION_MODEL =
"sshleifer/distilbart-xsum-12-6"
- GENERATION_MODEL =
"distilgpt2"
- EMBEDING_MODEL =
"sentence-transformers/all-MiniLM-L6-v2"
- SENTIMENT_MODEL =
"distilbert-base-uncased-finetuned-sst-2-english"
Constants inherited
from BaseApi
BaseApi::HTTP_SERVICE_UNAVAILABLE, BaseApi::JSON_CONTENT_TYPE
Instance Method Summary
collapse
Methods inherited from BaseApi
#initialize
Instance Method Details
#call(input:, model:) ⇒ Object
15
16
17
|
# File 'lib/hugging_face/inference_api.rb', line 15
def call(input:, model:)
request(connection: connection(model), input: input)
end
|
#embedding(input:, model: EMBEDING_MODEL) ⇒ Object
33
34
35
|
# File 'lib/hugging_face/inference_api.rb', line 33
def embedding(input:, model: EMBEDING_MODEL)
request connection: connection(model), input: { inputs: input }
end
|
#question_answering(question:, context:, model: QUESTION_ANSWERING_MODEL) ⇒ Object
19
20
21
22
23
|
# File 'lib/hugging_face/inference_api.rb', line 19
def question_answering(question:, context:, model: QUESTION_ANSWERING_MODEL)
input = { question: question, context: context }
request connection: connection(model), input: input
end
|
#sentiment(input:, model: SENTIMENT_MODEL) ⇒ Object
37
38
39
|
# File 'lib/hugging_face/inference_api.rb', line 37
def sentiment(input:, model: SENTIMENT_MODEL)
request connection: connection(model), input: { inputs: input }
end
|
#summarization(input:, model: SUMMARIZATION_MODEL) ⇒ Object
25
26
27
|
# File 'lib/hugging_face/inference_api.rb', line 25
def summarization(input:, model: SUMMARIZATION_MODEL)
request connection: connection(model), input: { inputs: input }
end
|
#text_generation(input:, model: GENERATION_MODEL) ⇒ Object
29
30
31
|
# File 'lib/hugging_face/inference_api.rb', line 29
def text_generation(input:, model: GENERATION_MODEL)
request connection: connection(model), input: { inputs: input }
end
|