Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions new-backend/app/modules/langgraph_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,11 @@


def build_langgraph():
"""
Constructs and compiles a state graph workflow for sequential text processing tasks.

The workflow consists of sentiment analysis, fact checking, perspective generation, and judgment steps, with conditional reruns based on a score threshold. The process concludes by storing and sending the results. Returns the compiled graph ready for execution.
"""
graph = StateGraph()

graph.add_node(
Expand Down
16 changes: 16 additions & 0 deletions new-backend/app/modules/langgraph_nodes/fact_check.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,26 @@
# web search + fact check

def search_web():
"""
Placeholder for a web search function.

Currently returns None and does not perform any search operations.
"""
return None


def run_fact_check(state):
"""
Performs a web-based fact check using provided text and keywords.

Combines the input text and keywords into a search query, retrieves web search results, and returns a list of fact snippets and their corresponding URLs.

Args:
state: A dictionary containing "text" (the statement to check) and "keywords" (a list of related terms).

Returns:
A dictionary with a "facts" key mapping to a list of sources, each containing a "snippet" and "url".
"""
text = state["text"]
keywords = state["keywords"]
results = search_web(text + " " + " ".join(keywords))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,15 @@


def generate_perspective(state):
"""
Generates a reasoned opposing perspective on an article using provided factual snippets.

Args:
state: A dictionary containing "text" (the article) and "facts" (a list of fact dictionaries with "snippet" fields).

Returns:
A dictionary with the generated opposing perspective under the key "perspective".
"""
text = state["text"]
facts = "\n".join([f["snippet"] for f in state["facts"]])
result = chain.run({"text": text, "facts": facts})
Expand Down
12 changes: 12 additions & 0 deletions new-backend/app/modules/langgraph_nodes/judge.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,16 @@
def judge_perspective(state):
"""
Evaluates the 'perspective' in the given state and assigns a score.

Assigns a score of 85 if the substring "reasoned" is present in the perspective;
otherwise, assigns a score of 40. Returns the score in a dictionary.

Args:
state: A dictionary containing a 'perspective' key.

Returns:
A dictionary with the assigned score under the key 'score'.
"""
perspective = state["perspective"]
# Dummy scoring
score = 85 if "reasoned" in perspective else 40
Expand Down
9 changes: 9 additions & 0 deletions new-backend/app/modules/langgraph_nodes/sentiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,15 @@


def run_sentiment(state):
"""
Analyzes the sentiment of the provided text and returns the result.

Args:
state: A dictionary containing a "text" key with the input string to analyze.

Returns:
A dictionary with the sentiment label and its confidence score.
"""
text = state["text"]
result = sentiment_pipeline(text)[0]
return {"sentiment": result["label"], "sentiment_score": result["score"]}
5 changes: 5 additions & 0 deletions new-backend/app/modules/langgraph_nodes/store_and_send.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,11 @@

def store_and_send(state):
# to store data in vector db
"""
Stores selected data from the input state in a vector database and returns a success status.

Extracts the "text", "perspective", and "facts" fields from the provided state dictionary, stores them in a vector database, and returns a dictionary indicating successful completion.
"""
save_to_vector_db({
"text": state["text"],
"perspective": state["perspective"],
Expand Down
20 changes: 20 additions & 0 deletions new-backend/app/modules/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,17 @@


def run_scraper_pipeline(url: str) -> dict:
"""
Extracts and processes article content from a given URL.

The function retrieves the article text from the specified URL, cleans the extracted text, and identifies relevant keywords. Returns a dictionary containing the cleaned text and extracted keywords.

Args:
url: The URL of the article to process.

Returns:
A dictionary with 'cleaned_text' and 'keywords' keys.
"""
extractor = Article_extractor(url)
raw_text = extractor.extract()

Expand All @@ -25,6 +36,15 @@ def run_scraper_pipeline(url: str) -> dict:


def run_langgraph_workflow(state: dict):
"""
Executes a language graph workflow with the provided state.

Args:
state: A dictionary representing the initial state for the workflow.

Returns:
The result produced by invoking the language graph workflow with the given state.
"""
langgraph_workflow = build_langgraph()
result = langgraph_workflow.invoke(state)
return result
6 changes: 6 additions & 0 deletions new-backend/app/routes/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,12 @@ async def home():

@router.post("/process")
async def run_pipelines(request: URlRequest):
"""
Processes a URL by extracting article text and running a language workflow.

Accepts a request containing a URL, extracts the article text using a scraper pipeline,
and processes the extracted text through a language workflow. Returns the result of the workflow.
"""
article_text = run_scraper_pipeline(request.url)
print(json.dumps(article_text, indent=2))
data = run_langgraph_workflow(article_text)
Expand Down