Sana42 commited on
Commit
bea78a8
·
1 Parent(s): 88af59b

Initial FastAPI deployment

Browse files
Files changed (7) hide show
  1. .dockerignore +7 -0
  2. .gitignore +6 -0
  3. Dockerfile +23 -0
  4. README.md +2 -12
  5. download_model.py +14 -0
  6. main.py +24 -0
  7. requirements.txt +7 -0
.dockerignore ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ venv
2
+ __pycache__
3
+ *.pyc
4
+ *.pyo
5
+ *.pyd
6
+ .env
7
+ .model_cache
.gitignore ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ venv/
2
+ model/
3
+ __pycache__/
4
+ *.pyc
5
+ .DS_Store
6
+ .env
Dockerfile ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Dockerfile
2
+ FROM python:3.10-slim
3
+
4
+ WORKDIR /app
5
+
6
+ # system deps
7
+ RUN apt-get update && apt-get install -y --no-install-recommends \
8
+ build-essential curl git && rm -rf /var/lib/apt/lists/*
9
+
10
+ # copy reqs and install packages
11
+ COPY requirements.txt .
12
+ RUN pip install --no-cache-dir -r requirements.txt
13
+
14
+ # copy app and download model during build
15
+ COPY download_model.py .
16
+ COPY main.py .
17
+
18
+ # run download script to save model into the image
19
+ RUN python download_model.py
20
+
21
+ # expose and run
22
+ EXPOSE 8000
23
+ CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]
README.md CHANGED
@@ -1,12 +1,2 @@
1
- ---
2
- title: Complaint Classification Ai
3
- emoji: 🏢
4
- colorFrom: indigo
5
- colorTo: pink
6
- sdk: gradio
7
- sdk_version: 5.49.1
8
- app_file: app.py
9
- pinned: false
10
- ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
1
+ # Complaint-Classification-AI-
2
+ This is for my FYP "Complaint Ecosystem App". To classify my users' complaints.
 
 
 
 
 
 
 
 
 
 
download_model.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoTokenizer, AutoModelForSequenceClassification
2
+
3
+ model_name = "joeddav/xlm-roberta-large-xnli"
4
+ save_path = "./models/xlm-roberta-large-xnli"
5
+
6
+ print(f"Downloading model {model_name}...")
7
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
8
+ model = AutoModelForSequenceClassification.from_pretrained(model_name)
9
+
10
+ print(f"Saving model to {save_path} ...")
11
+ tokenizer.save_pretrained(save_path)
12
+ model.save_pretrained(save_path)
13
+ print("Model saved locally. You can now use it offline!")
14
+
main.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # main.py
2
+ from fastapi import FastAPI
3
+ from pydantic import BaseModel
4
+ from transformers import pipeline
5
+ import os
6
+
7
+ app = FastAPI(title="Complaint Classifier")
8
+
9
+ MODEL_PATH = os.getenv("MODEL_PATH", "./model")
10
+ print("Loading model from", MODEL_PATH)
11
+ classifier = pipeline("zero-shot-classification", model=MODEL_PATH)
12
+
13
+ class Request(BaseModel):
14
+ text: str
15
+ labels: list[str]
16
+
17
+ @app.post("/classify")
18
+ def classify(req: Request):
19
+ result = classifier(req.text, candidate_labels=req.labels)
20
+ return {
21
+ "label": result["labels"][0],
22
+ "score": float(result["scores"][0]),
23
+ "all": list(zip(result["labels"], [float(s) for s in result["scores"]]))
24
+ }
requirements.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ fastapi
2
+ uvicorn[standard]
3
+ transformers
4
+ sentencepiece
5
+ protobuf
6
+ torch==2.2.2+cpu
7
+ --extra-index-url https://download.pytorch.org/whl/cpu