File size: 1,412 Bytes
c8a2d9e
 
 
 
 
 
 
 
 
9e37532
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import os

# Ensure Hugging Face caches are set to a writable location inside the container
os.environ.setdefault("HF_HOME", "/app/.cache/huggingface")
os.environ.setdefault("TRANSFORMERS_CACHE", "/app/.cache/huggingface")
os.environ.setdefault("XDG_CACHE_HOME", "/app/.cache")

os.makedirs(os.environ["HF_HOME"], exist_ok=True)

from transformers import pipeline

# Create the pipeline directly with the model name
# This will handle tokenizer and model initialization internally
classifier = pipeline(
    task="zero-shot-classification",
    model="MoritzLaurer/DeBERTa-v3-base-mnli-fever-anli",
    device=-1  # Use CPU, change to 0 for GPU if available
)

def function_out(review):
  review = review

  candidate_labels = [
      "worst experience",  
      "bad experience",       
      "average experience", 
      "good experience",     
      "excellent experience"  
  ]

  dict_candidate = {
      "worst experience": 1,
      "bad experience":  2,
      "average experience": 3,
      "good experience": 4,
      "excellent experience": 5,
  }

 
  result = classifier(review, candidate_labels, multi_label=False)
  result =  result["labels"][0]
  if result in dict_candidate.keys():
    return [dict_candidate[result] , result]

# @app.post("/")
# async def output(review: str):
#   my_output = function_out(review)
#   return my_output


# print(function_out("got another gift from the product"))