Noureddine-khaous commited on
Commit
2bd2f62
·
1 Parent(s): a50ebec

Expose LLM API with FastAPI

Browse files
Files changed (4) hide show
  1. Dockerfile +16 -0
  2. README.md +5 -0
  3. main.py +20 -0
  4. requirements.txt +6 -0
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9
2
+
3
+ RUN useradd -m -u 1000 user
4
+ WORKDIR /app
5
+
6
+ COPY --chown=user requirements.txt requirements.txt
7
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
8
+
9
+ COPY --chown=user . /app
10
+
11
+ USER user
12
+
13
+ ENV HOME=/home/user \
14
+ PATH=/home/user/.local/bin:$PATH
15
+
16
+ CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
README.md CHANGED
@@ -5,6 +5,11 @@ colorFrom: pink
5
  colorTo: blue
6
  sdk: docker
7
  pinned: false
 
8
  ---
9
 
 
10
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
5
  colorTo: blue
6
  sdk: docker
7
  pinned: false
8
+ app_port: 7860
9
  ---
10
 
11
+
12
  Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
13
+
14
+
15
+
main.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from pydantic import BaseModel
3
+ from transformers import pipeline
4
+
5
+ app = FastAPI()
6
+
7
+ pipe = pipeline(
8
+ "text2text-generation",
9
+ model="google/flan-t5-small"
10
+ )
11
+
12
+ class Prompt(BaseModel):
13
+ input: str
14
+
15
+ @app.post("/generate")
16
+ def generate(prompt: Prompt):
17
+ output = pipe(prompt.input)
18
+ return {
19
+ "output": output[0]["generated_text"]
20
+ }
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ fastapi==0.74.*
2
+ requests==2.27.*
3
+ sentencepiece==0.1.*
4
+ torch==1.11.*
5
+ transformers==4.*
6
+ uvicorn[standard]==0.17.*