diff --git a/.github/workflows/deploy-backend-to-hf.yml b/.github/workflows/deploy-backend-to-hf.yml new file mode 100644 index 00000000..2fc6d7fd --- /dev/null +++ b/.github/workflows/deploy-backend-to-hf.yml @@ -0,0 +1,59 @@ +name: πŸš€ Deploy Backend to HF Space + +on: + push: + branches: + - main # or your primary branch + paths: + - "backend/**" # only trigger when anything under backend/ changes + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - name: πŸ‘‰ Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: πŸ”’ Install HF CLI + run: pip install huggingface_hub + + - name: πŸ”‘ HF login + env: + HF_TOKEN: ${{ secrets.HF_TOKEN }} + run: huggingface-cli login --token "$HF_TOKEN" + + - name: πŸ“‚ Prepare Space repo + env: + HF_TOKEN: ${{ secrets.HF_TOKEN }} + run: | + rm -rf space-backend + git clone https://Thunder1245:${HF_TOKEN}@huggingface.co/spaces/Thunder1245/perspective-backend.git space-backend + + - name: πŸ“¦ Install rsync + run: | + sudo apt-get update + sudo apt-get install -y rsync + + - name: πŸ“€ Sync backend code + env: + HF_TOKEN: ${{ secrets.HF_TOKEN }} + run: | + cd space-backend + + # Only remove tracked files (preserve .git and config) + git rm -r . || true + cd .. + + # Copy new backend files in + cp -R backend/. space-backend/ + + # Push new code to HF Space + cd space-backend + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + git add --all + git commit -m "Auto‑deploy backend: ${{ github.sha }}" || echo "No changes to commit" + git push origin main + diff --git a/.gitignore b/.gitignore index ad6b8fb3..17ee8272 100644 --- a/.gitignore +++ b/.gitignore @@ -25,6 +25,9 @@ share/python-wheels/ *.egg MANIFEST +.github/act-events/ +.secrets + # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. diff --git a/backend/.dockerignore b/backend/.dockerignore new file mode 100644 index 00000000..076f6a3d --- /dev/null +++ b/backend/.dockerignore @@ -0,0 +1,2 @@ +/.venv +*/.env \ No newline at end of file diff --git a/new-backend/.python-version b/backend/.python-version similarity index 100% rename from new-backend/.python-version rename to backend/.python-version diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 00000000..52fedb3e --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,30 @@ +FROM python:3.13-slim-bookworm + +COPY --from=ghcr.io/astral-sh/uv:0.7.12 /uv /bin/uv + +# Install OS dependencies +RUN apt-get update && apt-get install -y curl build-essential + +# Create app dir and set it as working directory +WORKDIR /app + +# Create a writable cache dir & change ownership to a non-root user +ENV UV_CACHE_DIR=/app/.uv-cache +RUN mkdir -p /app/.uv-cache && \ + adduser --disabled-password --gecos "" appuser && \ + chown -R appuser:appuser /app + +# Copy project code +COPY . /app + +# Switch to non-root user +USER appuser + +# Install dependencies +RUN uv sync --locked --no-cache + +# Expose the port Hugging Face looks for +EXPOSE 7860 + +# Start the app +CMD ["uv", "run", "main.py"] diff --git a/backend/README.md b/backend/README.md index e69de29b..4d27c968 100644 --- a/backend/README.md +++ b/backend/README.md @@ -0,0 +1,91 @@ +--- +title: Perspective API 🧠 +emoji: 🧠 +colorFrom: green +colorTo: blue +sdk: docker +sdk_version: "0.100.0" +app_file: Dockerfile +pinned: false +--- + + +# Perspective Backend + + +Welcome to the **Perspective** backend! πŸš€ + +This backend is built with FastAPI and managed using **uv**, a handy Python project tool that simplifies dependency management and running the app. + +--- + +## Getting Started + +### 1. Clone the repo & jump into backend folder + +```bash +git clone https://github.com/AOSSIE-Org/Perspective.git +cd new-backend +```` + +### 2. Add new modules easily + +To add any new Python package/module, just run: + +```bash +uv add +``` + +Example: + +```bash +uv add fastapi requests +``` + +This will automatically update your `pyproject.toml` and install the package for you. + +*No need to manually create or activate virtual environments β€” uv handles it for you!* + +### 3. Run the server + +Start the backend server with: + +```bash +uv run main.py +``` + +The server will be available at: + +``` +http://localhost:8000/api/ +``` + +--- + +## Important Notes + +* All dependencies are tracked in `pyproject.toml`. +* No manual setup of venv or conda environments is required. +* For full documentation on **uv**, visit: + [https://docs.astral.sh/uv/](https://docs.astral.sh/uv/) + +--- + +## Project Structure (brief) + +``` +new-backend/ +β”œβ”€β”€ main.py # App entry point +β”œβ”€β”€ pyproject.toml # Dependency & project config +β”œβ”€β”€ uv.lock # .loc file like package-lock.json +β”œβ”€β”€ .python-version # Python version used by the backend +└── app/ + β”œβ”€β”€ routes/ # API route handlers + β”œβ”€β”€ components/ # Business logic components + β”œβ”€β”€ db/ # Database related code + └── utils/ # Utility functions +``` + +--- + +If you hit any issues or want to contribute, feel free to open an issue or PR. diff --git a/backend/app/core/__init__.py b/backend/app/.gitignore similarity index 100% rename from backend/app/core/__init__.py rename to backend/app/.gitignore diff --git a/backend/app/core/config.py b/backend/app/core/config.py deleted file mode 100644 index e69de29b..00000000 diff --git a/backend/app/db/database.py b/backend/app/db/database.py deleted file mode 100644 index e69de29b..00000000 diff --git a/backend/app/db/models.py b/backend/app/db/models.py deleted file mode 100644 index e69de29b..00000000 diff --git a/new-backend/app/db/vector_store.py b/backend/app/db/vector_store.py similarity index 100% rename from new-backend/app/db/vector_store.py rename to backend/app/db/vector_store.py diff --git a/backend/app/main.py b/backend/app/main.py deleted file mode 100644 index 6ab9b842..00000000 --- a/backend/app/main.py +++ /dev/null @@ -1,23 +0,0 @@ -from fastapi import FastAPI -from app.routes import router -from fastapi.middleware.cors import CORSMiddleware -import uvicorn - -app = FastAPI(title="Perspective AI", version="1.0.0") - -app.add_middleware( - CORSMiddleware, - allow_origins=["*"], - allow_credentials=True, - allow_methods=["*"], - allow_headers=["*"], -) - -app.include_router(router) - -@app.get("/") -def home(): - return {"message": "Welcome to the Perspective AI"} - -if __name__ == "__main__": - uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/backend/app/models/schemas.py b/backend/app/models/schemas.py deleted file mode 100644 index e69de29b..00000000 diff --git a/backend/app/models/__init__.py b/backend/app/modules/__init__.py similarity index 100% rename from backend/app/models/__init__.py rename to backend/app/modules/__init__.py diff --git a/backend/app/prompts/__init__.py b/backend/app/modules/facts_check/__init__.py similarity index 100% rename from backend/app/prompts/__init__.py rename to backend/app/modules/facts_check/__init__.py diff --git a/new-backend/app/modules/facts_check/llm_processing.py b/backend/app/modules/facts_check/llm_processing.py similarity index 100% rename from new-backend/app/modules/facts_check/llm_processing.py rename to backend/app/modules/facts_check/llm_processing.py diff --git a/new-backend/app/modules/facts_check/web_search.py b/backend/app/modules/facts_check/web_search.py similarity index 100% rename from new-backend/app/modules/facts_check/web_search.py rename to backend/app/modules/facts_check/web_search.py diff --git a/new-backend/app/modules/langgraph_builder.py b/backend/app/modules/langgraph_builder.py similarity index 100% rename from new-backend/app/modules/langgraph_builder.py rename to backend/app/modules/langgraph_builder.py diff --git a/backend/app/scrapers/__init__.py b/backend/app/modules/langgraph_nodes/__init__.py similarity index 100% rename from backend/app/scrapers/__init__.py rename to backend/app/modules/langgraph_nodes/__init__.py diff --git a/new-backend/app/modules/langgraph_nodes/error_handler.py b/backend/app/modules/langgraph_nodes/error_handler.py similarity index 100% rename from new-backend/app/modules/langgraph_nodes/error_handler.py rename to backend/app/modules/langgraph_nodes/error_handler.py diff --git a/new-backend/app/modules/langgraph_nodes/fact_check.py b/backend/app/modules/langgraph_nodes/fact_check.py similarity index 100% rename from new-backend/app/modules/langgraph_nodes/fact_check.py rename to backend/app/modules/langgraph_nodes/fact_check.py diff --git a/new-backend/app/modules/langgraph_nodes/generate_perspective.py b/backend/app/modules/langgraph_nodes/generate_perspective.py similarity index 100% rename from new-backend/app/modules/langgraph_nodes/generate_perspective.py rename to backend/app/modules/langgraph_nodes/generate_perspective.py diff --git a/new-backend/app/modules/langgraph_nodes/judge.py b/backend/app/modules/langgraph_nodes/judge.py similarity index 100% rename from new-backend/app/modules/langgraph_nodes/judge.py rename to backend/app/modules/langgraph_nodes/judge.py diff --git a/new-backend/app/modules/langgraph_nodes/sentiment.py b/backend/app/modules/langgraph_nodes/sentiment.py similarity index 100% rename from new-backend/app/modules/langgraph_nodes/sentiment.py rename to backend/app/modules/langgraph_nodes/sentiment.py diff --git a/new-backend/app/modules/langgraph_nodes/store_and_send.py b/backend/app/modules/langgraph_nodes/store_and_send.py similarity index 100% rename from new-backend/app/modules/langgraph_nodes/store_and_send.py rename to backend/app/modules/langgraph_nodes/store_and_send.py diff --git a/new-backend/app/modules/pipeline.py b/backend/app/modules/pipeline.py similarity index 100% rename from new-backend/app/modules/pipeline.py rename to backend/app/modules/pipeline.py diff --git a/backend/app/services/__init__.py b/backend/app/modules/scraper/__init__.py similarity index 100% rename from backend/app/services/__init__.py rename to backend/app/modules/scraper/__init__.py diff --git a/new-backend/app/modules/scraper/cleaner.py b/backend/app/modules/scraper/cleaner.py similarity index 100% rename from new-backend/app/modules/scraper/cleaner.py rename to backend/app/modules/scraper/cleaner.py diff --git a/new-backend/app/modules/scraper/extractor.py b/backend/app/modules/scraper/extractor.py similarity index 100% rename from new-backend/app/modules/scraper/extractor.py rename to backend/app/modules/scraper/extractor.py diff --git a/new-backend/app/modules/scraper/keywords.py b/backend/app/modules/scraper/keywords.py similarity index 100% rename from new-backend/app/modules/scraper/keywords.py rename to backend/app/modules/scraper/keywords.py diff --git a/new-backend/app/modules/vector_store/chunk_rag_data.py b/backend/app/modules/vector_store/chunk_rag_data.py similarity index 100% rename from new-backend/app/modules/vector_store/chunk_rag_data.py rename to backend/app/modules/vector_store/chunk_rag_data.py diff --git a/new-backend/app/modules/vector_store/embed.py b/backend/app/modules/vector_store/embed.py similarity index 100% rename from new-backend/app/modules/vector_store/embed.py rename to backend/app/modules/vector_store/embed.py diff --git a/backend/app/prompts/opposite_perspective.py b/backend/app/prompts/opposite_perspective.py deleted file mode 100644 index 9d00df5d..00000000 --- a/backend/app/prompts/opposite_perspective.py +++ /dev/null @@ -1,21 +0,0 @@ -OPPOSITE_PERSPECTIVE_PROMPT = """ - You are an analytical assistant that provides comprehensive alternative viewpoints to news articles and opinion pieces. Given the article text below, create a thoughtful, balanced, and detailed opposite perspective. - - Article text: - {article_text} - - Please follow these steps in your analysis: - 1. Carefully identify the core claims and underlying assumptions of the original article. - 2. Develop a comprehensive opposite perspective that challenges these claims and assumptions. - 3. Support your alternative view with logical arguments, potential evidence, and contextual factors. - 4. Consider different value systems, priorities, or interpretations that lead to opposing conclusions. - 5. Maintain a respectful, measured, and analytical tone throughout. - - - """ - -def get_opposite_perspective_prompt(article_text: str) -> str: - """ - Formats the prompt for generating an opposite perspective by injecting the provided article text. - """ - return OPPOSITE_PERSPECTIVE_PROMPT.format(article_text=article_text) \ No newline at end of file diff --git a/backend/app/prompts/related_topics.py b/backend/app/prompts/related_topics.py deleted file mode 100644 index 8015dc2a..00000000 --- a/backend/app/prompts/related_topics.py +++ /dev/null @@ -1,26 +0,0 @@ -RELATED_TOPICS_PROMPT = """ -You are an expert content analyst specializing in identifying nuanced and interconnected themes within articles. - -Guidelines: -- Identify 5-7 topics closely related to the original article's core themes -- For each topic, provide: - 1. A clear, concise topic name - 2. Two distinct perspectives - 3. Potential research or source directions - -Input Article Summary: -{article_text} - -Output Requirements: -- Maintain academic rigor and intellectual depth -- Ensure perspectives offer genuine alternative viewpoints -- Provide actionable research directions - -Format your response as a structured list of related topics with their perspectives. -""" - -def get_related_topics_prompt(article_text: str) -> str: - """ - Formats the prompt for generating related topics by injecting the provided article text. - """ - return RELATED_TOPICS_PROMPT.format(article_text=article_text) \ No newline at end of file diff --git a/backend/app/routes.py b/backend/app/routes.py deleted file mode 100644 index 8d51442b..00000000 --- a/backend/app/routes.py +++ /dev/null @@ -1,69 +0,0 @@ -from fastapi import APIRouter, HTTPException -from pydantic import BaseModel -from app.scrapers.article_scraper import scrape_website -from app.scrapers.clean_data import clean_scraped_data -from app.services.summarization_service import summarize_text -import json -from app.services.counter_service import generate_opposite_perspective -import logging -from typing import List, Optional -import uuid -from sqlalchemy.orm import Session -from app.services.related_topics import generate_related_topics - -router = APIRouter() -logger = logging.getLogger("uvicorn.error") - -class ArticleRequest(BaseModel): - summary: str # summarized article text to generate opposite perspective - -class ScrapURLRequest(BaseModel): - url: str # URL to scrape data from - -class RelatedTopicsRequest(BaseModel): - summary: str # Ensure this matches the frontend's request - -@router.post("/generate-perspective") -def generate_ai_perspective(request: ArticleRequest): - try: - new_perspective = generate_opposite_perspective(request.summary) - logger.info("Generated perspective: %s", new_perspective) - return {"perspective": new_perspective} - except Exception as e: - logger.error("Error in generate-perspective: %s", e) - raise HTTPException(status_code=500, detail="Error generating perspective") - -@router.post("/scrape-and-summarize") -async def scrape_article(article: ScrapURLRequest): - print("huhuh") - try: - if not article.url: - raise HTTPException(status_code=422, detail="URL is required") - - # Scrape the website - print(article.url) - data = scrape_website(article.url) - if data is None: - logger.error("Scraped data is None for URL: %s", article.url) - raise HTTPException(status_code=500, detail="Error scraping the article. No data returned.") - logger.info("Scraped data: %s", data) - - # Clean the data (make sure data is a string) - clean = clean_scraped_data(data) - print("Cleaned data: %s", clean) - - # Summarize the text - summary = summarize_text({"inputs": clean}) - print("Summary output: %s", summary) - - # Return summary directly (assuming it's a JSON-serializable object) - return {"summary": summary} - except Exception as e: - logger.error("Error in scrape-and-summarize: %s", e, exc_info=True) - raise HTTPException(status_code=500, detail="Error processing the URL") - - -@router.post("/related-topics") -async def get_related_topics(request: RelatedTopicsRequest): - related_topics = generate_related_topics(request.summary) - return {"topics": related_topics} \ No newline at end of file diff --git a/new-backend/app/__init__.py b/backend/app/routes/__init__.py similarity index 100% rename from new-backend/app/__init__.py rename to backend/app/routes/__init__.py diff --git a/new-backend/app/routes/routes.py b/backend/app/routes/routes.py similarity index 100% rename from new-backend/app/routes/routes.py rename to backend/app/routes/routes.py diff --git a/backend/app/scrapers/article_scraper.py b/backend/app/scrapers/article_scraper.py deleted file mode 100644 index d9c68126..00000000 --- a/backend/app/scrapers/article_scraper.py +++ /dev/null @@ -1,23 +0,0 @@ -import requests -from bs4 import BeautifulSoup - -def scrape_website(url, headers=None): - """ - Scrapes the content of a website and returns the raw HTML. - """ - try: - if headers is None: - headers = { - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36' - } - - response = requests.get(url, headers=headers) - response.raise_for_status() # Raise an error for bad responses (4xx, 5xx) - - soup = BeautifulSoup(response.content, 'html.parser') - return soup.get_text(separator=' ', strip=True) # Extract only readable text - - except requests.exceptions.RequestException as e: - print(f"Error: {e}") - return None - diff --git a/backend/app/scrapers/clean_data.py b/backend/app/scrapers/clean_data.py deleted file mode 100644 index 2805e9f7..00000000 --- a/backend/app/scrapers/clean_data.py +++ /dev/null @@ -1,16 +0,0 @@ -import re - -def clean_scraped_data(raw_text): - """ - Cleans the scraped text by removing unwanted characters, multiple spaces, and normalizing case. - """ - if raw_text is None: - return None - - # Remove excessive spaces and new lines - clean_text = re.sub(r'\s+', ' ', raw_text).strip() - - # Remove special characters (keeping only letters, numbers, and spaces) - clean_text = re.sub(r'[^a-zA-Z0-9\s.,!?]', '', clean_text) - - return clean_text.lower() # Normalize to lowercase \ No newline at end of file diff --git a/backend/app/services/ai_service.py b/backend/app/services/ai_service.py deleted file mode 100644 index 58395a0e..00000000 --- a/backend/app/services/ai_service.py +++ /dev/null @@ -1,81 +0,0 @@ - - -# import json -# import requests -# import os -# import sys -# import re - -# sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))) - -# from app.prompts.opposite_perspective import get_opposite_perspective_prompt - -# class AIService: -# def __init__(self, api_key=None): -# self.api_key = api_key -# self.model_id = "mistralai/Mistral-7B-Instruct-v0.2" -# self.api_url = f"https://api-inference.huggingface.co/models/{self.model_id}" -# self.headers = {"Authorization": f"Bearer {self.api_key}"} if api_key else {} - -# def clean_article_text(self, article_text): -# """Clean the article text by removing extra whitespace, normalizing quotes, etc.""" -# if not article_text: -# return "" -# cleaned = article_text.strip() -# cleaned = "\n".join(line for line in cleaned.splitlines() if line.strip()) -# return cleaned - -# def get_opposite_perspective(self, article_text): -# prompt_template = get_opposite_perspective_prompt() -# cleaned_text = self.clean_article_text(article_text) -# formatted_prompt = prompt_template.format(article_text=cleaned_text) - -# try: -# print("Sending request to Hugging Face API...") -# payload = { -# "inputs": formatted_prompt, -# "parameters": { -# "max_new_tokens": 2500, # Increased for a more detailed response -# "temperature": 0.7, -# "return_full_text": False -# } -# } -# response = requests.post(self.api_url, headers=self.headers, json=payload) -# if response.status_code != 200: -# return {"error": f"API Error: {response.status_code}", "details": response.text} - -# print("Response received from API, parsing result...") -# result = self._parse_llm_response(response.json()) -# return result -# except Exception as e: -# return {"error": str(e)} - -# def _parse_llm_response(self, response): - -# try: -# if isinstance(response, list) and len(response) > 0: -# raw_text = response[0].get("generated_text", "") -# elif isinstance(response, dict): -# raw_text = response.get("generated_text", "") -# else: -# raw_text = str(response) - -# pat = r"```(.*?)```" -# matches = re.findall(pat, raw_text, re.DOTALL) -# if not matches: -# return {"error": "Could not extract formatted output using regex", "raw_response": raw_text} -# res = matches[0].replace("json", "") -# result = json.loads(res) -# return result -# except Exception as e: -# return {"error": str(e), "raw_response": raw_text if 'raw_text' in locals() else response} - -# def format_result(self, result): -# """ -# Output Format: -# """ -# try: -# formatted_output = "{{\n" + json.dumps(result, indent=2) + "\n}}" -# return formatted_output -# except Exception as e: -# return f"Error formatting result: {e}" diff --git a/backend/app/services/analysis_service.py b/backend/app/services/analysis_service.py deleted file mode 100644 index 390a14ea..00000000 --- a/backend/app/services/analysis_service.py +++ /dev/null @@ -1,22 +0,0 @@ -# # backend/app/services/analysis_service.py - Add a summarization method - -# async def generate_article_summary(self, article_content: str, max_length: int = 300) -> str: -# """ -# Generate a concise summary of the article content -# """ -# try: -# # Use your existing AI service to generate a summary -# prompt = f"""Summarize the following article in a concise manner, under {max_length} characters. -# Focus on the main points and key takeaways. - -# Article content: {article_content[:4000]} - -# Summary:""" - -# # Use your existing AI connector -# summary = await self.ai_service.generate_text(prompt) - -# return summary -# except Exception as e: -# self.logger.error(f"Error generating summary: {str(e)}") -# raise \ No newline at end of file diff --git a/backend/app/services/counter_service.py b/backend/app/services/counter_service.py deleted file mode 100644 index 44879188..00000000 --- a/backend/app/services/counter_service.py +++ /dev/null @@ -1,38 +0,0 @@ - -import requests -from app.prompts.opposite_perspective import get_opposite_perspective_prompt -from dotenv import load_dotenv -import os - -load_dotenv() -API_KEY = os.getenv("API_KEY") - - -def generate_opposite_perspective(article_text): - PERSPECTIVE_URL = "https://openrouter.ai/api/v1/chat/completions" - headers = { - "Authorization": f"Bearer {API_KEY}", - "Content-Type": "application/json" - } - - final_prompt = get_opposite_perspective_prompt(article_text) - - payload = { - "model": "deepseek/deepseek-r1-zero:free", - "messages": [ - { - "role": "user", - "content": final_prompt - } - ], - } - - response = requests.post(PERSPECTIVE_URL, headers=headers, json=payload) - result = response.json()['choices'][0]['message']['content'] - - if "Opposite Perspective:" in result: - perspective = result.split("Opposite Perspective:")[-1].strip() - else: - perspective = result.strip() - - return perspective diff --git a/backend/app/services/related_topics.py b/backend/app/services/related_topics.py deleted file mode 100644 index 2181b4fc..00000000 --- a/backend/app/services/related_topics.py +++ /dev/null @@ -1,39 +0,0 @@ - -import requests -import os -import json -from dotenv import load_dotenv - -load_dotenv() -API_KEY = os.getenv("API_KEY") - -def generate_related_topics(summary: str): - url = "https://openrouter.ai/api/v1/chat/completions" - - headers = { - "Authorization": f"Bearer {API_KEY}", - "Content-Type": "application/json" - } - - payload = json.dumps({ - "model": "deepseek/deepseek-r1-zero:free", - "messages": [ - { - "role": "system", - "content": "You are an AI that only generates relevant links to topics based on a given summary." - }, - { - "role": "user", - "content": f"Generate a list of 5 relevant online links based on this summary:\n{summary}" - } - ], - }) - - response = requests.post(url, data=payload, headers=headers) - print(response) - if response.status_code == 200: - data = response.json() - ai_response = data["choices"][0]["message"]["content"] - return ai_response - else: - return ["Error fetching related topics"] diff --git a/backend/app/services/summarization_service.py b/backend/app/services/summarization_service.py deleted file mode 100644 index 79e5fa14..00000000 --- a/backend/app/services/summarization_service.py +++ /dev/null @@ -1,52 +0,0 @@ - -import requests -import os -import json -from dotenv import load_dotenv -import logging - -load_dotenv() -openrouter_token = os.getenv("API_KEY") - -Summarization_URL = "https://openrouter.ai/api/v1/chat/completions" -headers = { - "Authorization": f"Bearer {openrouter_token}", - "Content-Type": "application/json", -} - -logger = logging.getLogger("uvicorn.error") - -def summarize_text(payload): - print("h") - try: - openrouter_payload = json.dumps({ - "model": "deepseek/deepseek-r1-zero:free", - "messages": [ - { - "role": "system", - "content": "You are a helpful assistant that provides concise and accurate summaries." - }, - { - "role": "user", - "content": f"Please provide a concise summary of the following text:\n\n{payload['inputs']}" - } - ], - }) - print(openrouter_payload) - - response = requests.post(Summarization_URL, headers=headers, data=openrouter_payload) - - print("Summarization API response status: %s", response.status_code) - print("Summarization API response text: %s", response.text) - - if response.status_code != 200 or not response.text: - raise Exception(f"Summarization API error, status code {response.status_code}") - - summary_response = response.json() - summary = summary_response['choices'][0]['message']['content'] - - return summary - - except Exception as e: - print("Error in summarization service: %s", e) - raise Exception("Error in summarization service: " + str(e)) diff --git a/backend/app/test_perspective.py b/backend/app/test_perspective.py deleted file mode 100644 index d7d99f00..00000000 --- a/backend/app/test_perspective.py +++ /dev/null @@ -1,59 +0,0 @@ - - - -import os -import sys -import json -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) - -from app.services.ai_service import AIService - -def test_opposite_perspective(): - api_key = os.environ.get('HF_API_KEY', None) # set this as an environment variable - ai_service = AIService(api_key=api_key) - - print("=" * 80) - print("OPPOSITE PERSPECTIVE GENERATOR") - print("=" * 80) - print("Enter or paste the article text (press Enter twice when done):") - - lines = [] - while True: - line = input() - if not line: - break - lines.append(line) - - article_text = "\n".join(lines) - - print("\nGenerating comprehensive opposite perspective...\n") - # Get the opposite perspective (as a dictionary) - result = ai_service.get_opposite_perspective(article_text) - - print("\n" + "=" * 80) - print("ANALYSIS RESULTS") - print("=" * 80) - - if "error" in result: - print(f"Error: {result['error']}") - if "raw_response" in result: - print("\nRaw response:") - print(result["raw_response"]) - return - - # Convert the dictionary result to a formatted string with double curly braces. - formatted_result = ai_service.format_result(result) - - print(formatted_result) - - # Optionally, you could also save the formatted result as a file. - save = input("\nWould you like to save the results to a file? (y/n): ") - if save.lower() == 'y': - filename = input("Enter filename (default: perspective_analysis.json): ") or "perspective_analysis.json" - with open(filename, 'w') as f: - # Save the formatted string output. - f.write(formatted_result) - print(f"Results saved to {filename}") - -if __name__ == "__main__": - test_opposite_perspective() \ No newline at end of file diff --git a/new-backend/app/utils/fact_check_utils.py b/backend/app/utils/fact_check_utils.py similarity index 100% rename from new-backend/app/utils/fact_check_utils.py rename to backend/app/utils/fact_check_utils.py diff --git a/new-backend/app/utils/generate_chunk_id.py b/backend/app/utils/generate_chunk_id.py similarity index 100% rename from new-backend/app/utils/generate_chunk_id.py rename to backend/app/utils/generate_chunk_id.py diff --git a/backend/app/utils/helpers.py b/backend/app/utils/helpers.py deleted file mode 100644 index e69de29b..00000000 diff --git a/backend/app/utils/logger.py b/backend/app/utils/logger.py deleted file mode 100644 index e69de29b..00000000 diff --git a/new-backend/app/utils/prompt_templates.py b/backend/app/utils/prompt_templates.py similarity index 100% rename from new-backend/app/utils/prompt_templates.py rename to backend/app/utils/prompt_templates.py diff --git a/new-backend/app/utils/store_vectors.py b/backend/app/utils/store_vectors.py similarity index 100% rename from new-backend/app/utils/store_vectors.py rename to backend/app/utils/store_vectors.py diff --git a/new-backend/main.py b/backend/main.py similarity index 93% rename from new-backend/main.py rename to backend/main.py index 62a66e83..04ebe2a0 100644 --- a/new-backend/main.py +++ b/backend/main.py @@ -25,6 +25,6 @@ import uvicorn import os - port = int(os.environ.get("PORT", 8000)) + port = int(os.environ.get("PORT", 7860)) print(f"Server is running on http://0.0.0.0:{port}") uvicorn.run(app, host="0.0.0.0", port=port) diff --git a/new-backend/pyproject.toml b/backend/pyproject.toml similarity index 95% rename from new-backend/pyproject.toml rename to backend/pyproject.toml index 4aa8280a..e402c550 100644 --- a/new-backend/pyproject.toml +++ b/backend/pyproject.toml @@ -19,7 +19,6 @@ dependencies = [ "newspaper3k>=0.2.8", "nltk>=3.9.1", "pinecone>=7.3.0", - "pinecone-client>=6.0.0", "rake-nltk>=1.0.6", "readability-lxml>=0.8.4.1", "requests>=2.32.3", diff --git a/backend/requirements.txt b/backend/requirements.txt deleted file mode 100644 index b3691053..00000000 --- a/backend/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -fastapi -uvicorn -requests -beautifulsoup4 -python-dotenv - -newspaper3k -httpx -python-dotenv \ No newline at end of file diff --git a/new-backend/start.sh b/backend/start.sh similarity index 100% rename from new-backend/start.sh rename to backend/start.sh diff --git a/new-backend/uv.lock b/backend/uv.lock similarity index 99% rename from new-backend/uv.lock rename to backend/uv.lock index 5ebe17cf..71d1cb61 100644 --- a/new-backend/uv.lock +++ b/backend/uv.lock @@ -940,7 +940,6 @@ dependencies = [ { name = "newspaper3k" }, { name = "nltk" }, { name = "pinecone" }, - { name = "pinecone-client" }, { name = "rake-nltk" }, { name = "readability-lxml" }, { name = "requests" }, @@ -965,7 +964,6 @@ requires-dist = [ { name = "newspaper3k", specifier = ">=0.2.8" }, { name = "nltk", specifier = ">=3.9.1" }, { name = "pinecone", specifier = ">=7.3.0" }, - { name = "pinecone-client", specifier = ">=6.0.0" }, { name = "rake-nltk", specifier = ">=1.0.6" }, { name = "readability-lxml", specifier = ">=0.8.4.1" }, { name = "requests", specifier = ">=2.32.3" }, @@ -1271,22 +1269,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/a6/c5d54a5fb1de3983a8739c1a1660e7a7074db2cbadfa875b823fcf29b629/pinecone-7.3.0-py3-none-any.whl", hash = "sha256:315b8fef20320bef723ecbb695dec0aafa75d8434d86e01e5a0e85933e1009a8", size = 587563, upload-time = "2025-06-27T20:03:50.249Z" }, ] -[[package]] -name = "pinecone-client" -version = "6.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "pinecone-plugin-interface" }, - { name = "python-dateutil" }, - { name = "typing-extensions" }, - { name = "urllib3", marker = "python_full_version < '4.0'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6c/ab/3ab3b81e8ad82fbfcaa4f446c7f962b18968d61543c8c9e2c38bd777c056/pinecone_client-6.0.0.tar.gz", hash = "sha256:f224fc999205e4858c4737c40922bdf42d178b361c8859bc486ec00d45b359a9", size = 7004, upload-time = "2025-02-21T19:47:38.899Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/e4/7780cd631dc6dad0172a245e958b41b28a70779594c0790fa08b952aa97f/pinecone_client-6.0.0-py3-none-any.whl", hash = "sha256:d81a9e73cae441e4ab6dfc9c1d8b51c9895dae2488cda64f3e21b9dfc10c8d94", size = 6654, upload-time = "2025-02-21T19:47:37.23Z" }, -] - [[package]] name = "pinecone-plugin-assistant" version = "1.7.0" diff --git a/frontend/app/analyze/loading/page.tsx b/frontend/app/analyze/loading/page.tsx index 8c5ba033..055a1a08 100644 --- a/frontend/app/analyze/loading/page.tsx +++ b/frontend/app/analyze/loading/page.tsx @@ -61,7 +61,7 @@ export default function LoadingPage() { setArticleUrl(storedUrl) try { - const res = await axios.post("https://Thunder1245-perspective1.hf.space/api/process", { + const res = await axios.post("https://Thunder1245-perspective-backend.hf.space/api/process", { url: storedUrl, }) diff --git a/new-backend/Dockerfile b/new-backend/Dockerfile deleted file mode 100644 index f475f1fb..00000000 --- a/new-backend/Dockerfile +++ /dev/null @@ -1,28 +0,0 @@ -FROM python:3.11-slim - -# Optional: install build tools (for C deps) -RUN apt-get update && apt-get install -y curl build-essential - -# Install uv -RUN curl -LsSf https://astral.sh/uv/install.sh | sh - -# Add uv to PATH (it's installed at /root/.cargo/bin) -ENV PATH="/root/.cargo/bin:${PATH}" - -# Set workdir -WORKDIR /app - -# Copy lock and pyproject -COPY uv.lock pyproject.toml ./ - -# Sync deps using uv -RUN uv sync - -# Copy full project -COPY . . - -# Expose HF port -EXPOSE 7860 - -# Run app using uv -CMD ["uv", "run", "main.py"] diff --git a/new-backend/README.md b/new-backend/README.md deleted file mode 100644 index 72ebda93..00000000 --- a/new-backend/README.md +++ /dev/null @@ -1,79 +0,0 @@ - -# Perspective Backend - -Welcome to the **Perspective** backend! πŸš€ - -This backend is built with FastAPI and managed using **uv**, a handy Python project tool that simplifies dependency management and running the app. - ---- - -## Getting Started - -### 1. Clone the repo & jump into backend folder - -```bash -git clone https://github.com/AOSSIE-Org/Perspective.git -cd new-backend -```` - -### 2. Add new modules easily - -To add any new Python package/module, just run: - -```bash -uv add -``` - -Example: - -```bash -uv add fastapi requests -``` - -This will automatically update your `pyproject.toml` and install the package for you. - -*No need to manually create or activate virtual environments β€” uv handles it for you!* - -### 3. Run the server - -Start the backend server with: - -```bash -uv run main.py -``` - -The server will be available at: - -``` -http://localhost:8000/api/ -``` - ---- - -## Important Notes - -* All dependencies are tracked in `pyproject.toml`. -* No manual setup of venv or conda environments is required. -* For full documentation on **uv**, visit: - [https://docs.astral.sh/uv/](https://docs.astral.sh/uv/) - ---- - -## Project Structure (brief) - -``` -new-backend/ -β”œβ”€β”€ main.py # App entry point -β”œβ”€β”€ pyproject.toml # Dependency & project config -β”œβ”€β”€ uv.lock # .loc file like package-lock.json -β”œβ”€β”€ .python-version # Python version used by the backend -└── app/ - β”œβ”€β”€ routes/ # API route handlers - β”œβ”€β”€ components/ # Business logic components - β”œβ”€β”€ db/ # Database related code - └── utils/ # Utility functions -``` - ---- - -If you hit any issues or want to contribute, feel free to open an issue or PR. diff --git a/new-backend/app/db/__init__.py b/new-backend/app/db/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/new-backend/app/modules/__init__.py b/new-backend/app/modules/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/new-backend/app/modules/facts_check/__init__.py b/new-backend/app/modules/facts_check/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/new-backend/app/modules/langgraph_nodes/__init__.py b/new-backend/app/modules/langgraph_nodes/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/new-backend/app/modules/scraper/__init__.py b/new-backend/app/modules/scraper/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/new-backend/app/routes/__init__.py b/new-backend/app/routes/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/new-backend/app/utils/__init__.py b/new-backend/app/utils/__init__.py deleted file mode 100644 index e69de29b..00000000