First commit

This commit is contained in:
Akil 2025-06-27 15:08:58 +03:00
commit 7eb3d18064
6 changed files with 6420 additions and 0 deletions

6
.gitignore vendored Normal file
View File

@ -0,0 +1,6 @@
venv/
.env
__pycache__/
*.pyc
.env
*.pyo

29
Dockerfile Normal file
View File

@ -0,0 +1,29 @@
# Use an official Python runtime as a parent image
FROM python:3.9-slim
# Set environment variables
ENV GROQ_API_KEY "add your GROQ API key here"
# Set work directory
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
gcc \
python3-dev \
&& rm -rf /var/lib/apt/lists/*
# Copy requirements file
COPY ./app/requirements.txt .
# Install Python dependencies
RUN pip install --no-cache-dir -r requirements.txt
# Copy project
COPY ./app /app
# Expose the port the app runs on
EXPOSE 8000
# Command to run the application
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]

61
README.md Normal file
View File

@ -0,0 +1,61 @@
# 📖 Quran Tafsir API (Arabic)
This is a FastAPI-based project that provides Quranic verse explanations (tafsir) in **Arabic** using a Large Language Model (LLM) from Groq (`llama3-70b-8192`). You provide a verse key and the corresponding Arabic text, and the API returns a detailed tafsir.
---
## 🔧 Features
- 📖 Accepts verse key (e.g., `2:3`) and verse text in Arabic
- 🤖 Uses Groq's LLM to generate simple, accurate tafsir
- 🚀 Built with FastAPI for fast and easy API development
- 📦 Includes health check endpoint
- 🔐 API key management using `.env` (do **not** commit `.env` to GitHub!)
---
## 📁 Project Structure
quran-tafsir-api/
├── main.py # FastAPI app
├── quran_arabic.csv # CSV with Quran verse keys and Arabic text
├── .env # (excluded) Contains your GROQ_API_KEY
├── .gitignore # Ignore venv, .env, etc.
└── README.md
## Install dependencies:
pip install -r requirements.txt
## Environment Variables
Create a .env file in the project root:
GROQ_API_KEY=your-groq-api-key-here
Make sure to add .env to your .gitignore to avoid pushing secrets.
## How to Use
▶️ Run the FastAPI App
uvicorn main:app --reload
🔁 Endpoints
1. POST /tafsir
Returns Arabic tafsir for a given verse.
Request body:
{
"verse_key": "2:3",
"verse_text": "ٱلَّذِينَ يُؤْمِنُونَ بِٱلْغَيْبِ وَيُقِيمُونَ ٱلصَّلَوٰةَ"
}
Response:
{
"reference": "2:3",
"text": "ٱلَّذِينَ يُؤْمِنُونَ بِٱلْغَيْبِ وَيُقِيمُونَ ٱلصَّلَوٰةَ",
"tafsir": "تفسير الآية باللغة العربية الفصحى ..."
}
2. GET /health
Simple health check:
{
"status": "ok",
"quran_verses_loaded": 6236
}

81
main.py Normal file
View File

@ -0,0 +1,81 @@
from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel
from typing import List
from groq import Groq
import pandas as pd
import os
from dotenv import load_dotenv
load_dotenv()
# Initialize FastAPI app
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=[
"http://localhost:3000",
"http://127.0.0.1:3000"
],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
# ... other parameters
)
# Load Groq API key
api_key = os.getenv("GROQ_API_KEY")
if not api_key:
raise EnvironmentError("GROQ_API_KEY environment variable not set.")
client = Groq(api_key=api_key)
# Load Quran data
try:
data = pd.read_csv('quran_arabic.csv', encoding='utf-8')
new_data = data[['verse_key', 'text_uthmani']]
verse_dict = dict(zip(new_data['verse_key'], new_data['text_uthmani']))
except FileNotFoundError:
raise FileNotFoundError("CSV file 'quran_arabic.csv' not found. Please check the path.")
# Request schema
class TafsirRequest(BaseModel):
verse_key: str # e.g., "1:2"
verse_text: str # e.g., "ٱلْحَمْدُ لِلَّهِ رَبِّ ٱلْعَـٰلَمِينَ"
# Build LLM prompt
def build_arabic_prompt(reference: str, text: str) -> str:
return (
"أنت عالم متخصص في تفسير القرآن الكريم.\n"
"يرجى تقديم تفسير شامل ومبسط للآية التالية، مع الأخذ بعين الاعتبار رقم السورة ورقم الآية:\n\n"
f"{reference}\t{text}\n\n"
"اكتب التفسير باللغة العربية الفصحى وبأسلوب واضح وميسر للقارئ العام."
)
# Call Groq LLM
def query_llm_arabic(prompt: str) -> str:
try:
response = client.chat.completions.create(
model="llama3-70b-8192",
messages=[{"role": "user", "content": prompt}],
temperature=0.3
)
return response.choices[0].message.content
except Exception as e:
raise HTTPException(status_code=500, detail=f"LLM Error: {str(e)}")
# Full tafsir workflow
def get_tafsir_from_input(verse_key: str, verse_text: str) -> dict:
prompt = build_arabic_prompt(verse_key, verse_text)
tafsir = query_llm_arabic(prompt)
return {"reference": verse_key, "text": verse_text, "tafsir": tafsir}
# Route: Tafsir
@app.post("/tafsir")
async def get_tafsir(request: TafsirRequest):
return get_tafsir_from_input(request.verse_key, request.verse_text)
# Health check
@app.get("/health")
def health_check():
return {"status": "ok", "quran_verses_loaded": len(verse_dict)}

6237
quran_arabic.csv Normal file

File diff suppressed because it is too large Load Diff

6
requirements.txt Normal file
View File

@ -0,0 +1,6 @@
fastapi
uvicorn
pandas
pydantic
groq
python-dotenv