Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Integration of Goal Setting & Reflection to AI Mentor #235

Draft
wants to merge 49 commits into
base: develop
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
49 commits
Select commit Hold shift + click to select a range
61444c6
add loading state
milesha Dec 17, 2024
7cb81bf
add initial system message generation
milesha Dec 17, 2024
3ec8033
Merge branch 'develop' into feature/auto-start-mentor-chat
milesha Dec 17, 2024
59a11d1
reformat code
milesha Dec 17, 2024
c21b144
fromat code
milesha Dec 17, 2024
b15254d
Start git integration
milesha Dec 18, 2024
1276f20
revert changed
milesha Dec 18, 2024
5afbf94
improve code structure
milesha Dec 18, 2024
13be91e
Update IS structure
milesha Dec 18, 2024
a6ec666
format code
milesha Dec 18, 2024
1682809
start integrating logic
milesha Dec 20, 2024
4832305
add prompts
milesha Dec 20, 2024
a75fd75
fix folder naming + update reposnse generation function
milesha Dec 20, 2024
7f23184
Merge branch 'feature/auto-start-mentor-chat' into feature/integrate-…
milesha Dec 20, 2024
e204a70
start development
milesha Dec 23, 2024
9ef03f3
Memory integration
milesha Jan 10, 2025
2be7ae0
Merge branch 'develop' into feature/integrate-progress-mentor-chat
milesha Jan 10, 2025
a7b4bdc
Update prompts
milesha Jan 20, 2025
e179333
Refactor chat session handling and add chat summary component
milesha Jan 20, 2025
c6fae1a
Fix formating
milesha Jan 20, 2025
bb89af7
Update prompts
milesha Jan 20, 2025
b102975
Merge branch 'develop' into feature/integrate-progress-mentor-chat
milesha Jan 21, 2025
4e8f09d
update to angular v19
milesha Jan 21, 2025
eff6852
fix formating and prompting
milesha Jan 21, 2025
99b4974
fix formating
milesha Jan 21, 2025
676487b
update model initialization to use gpt-4o
milesha Jan 21, 2025
df79007
add method to find assigned pull requests updated since a specific date
milesha Jan 21, 2025
2af662e
change summary layout
milesha Jan 21, 2025
b13d704
Merge branch 'feature/integrate-progress-mentor-chat' into feature/me…
milesha Jan 21, 2025
7d5cca6
Refactor mentor state and routing logic to add github issues integration
milesha Jan 22, 2025
49952d6
format code
milesha Jan 22, 2025
9ca76c6
fix poetry, add extras to psycopg
milesha Jan 22, 2025
04b955d
Merge branch 'feature/integrate-progress-mentor-chat' into feature/me…
milesha Jan 22, 2025
825e107
fix the session order
milesha Jan 23, 2025
4d9e09d
Refactor message parsing logic + update prompt formats for clarity.
milesha Jan 23, 2025
a66f84f
formating fixes
milesha Jan 23, 2025
cd4763d
fix formatting
milesha Jan 23, 2025
752d19c
add extra check for prs
milesha Jan 23, 2025
3b222fa
add dev_progress integration to the impediments
milesha Jan 23, 2025
54c1527
initial goal and reflection implementation
milesha Jan 23, 2025
21a751d
re-structuring of the node files
milesha Jan 24, 2025
ea7f45a
Merge branch 'develop' into feature/mentor-reflections-goals
milesha Jan 25, 2025
5ba2099
fix message content
milesha Jan 26, 2025
cbe81a0
further goal setting integration
milesha Jan 27, 2025
d7bd58e
bug + format fixing
milesha Jan 27, 2025
43af341
update pormpts
milesha Jan 27, 2025
90dd5ea
Merge branch 'develop' into feature/mentor-reflections-goals
milesha Jan 28, 2025
78368ba
fix styling
milesha Jan 28, 2025
8f61e53
refactor goal handling and prompt logic in memory and state updates
milesha Feb 11, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@
@JsonPropertyOrder({
MentorStartRequest.JSON_PROPERTY_DEV_PROGRESS,
MentorStartRequest.JSON_PROPERTY_PREVIOUS_SESSION_ID,
MentorStartRequest.JSON_PROPERTY_SESSION_ID
MentorStartRequest.JSON_PROPERTY_SESSION_ID,
MentorStartRequest.JSON_PROPERTY_USER_ID
})
@jakarta.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", comments = "Generator version: 7.7.0")
public class MentorStartRequest {
Expand All @@ -43,6 +44,9 @@ public class MentorStartRequest {
public static final String JSON_PROPERTY_SESSION_ID = "session_id";
private String sessionId;

public static final String JSON_PROPERTY_USER_ID = "user_id";
private String userId;

public MentorStartRequest() {
}

Expand Down Expand Up @@ -121,6 +125,31 @@ public void setSessionId(String sessionId) {
this.sessionId = sessionId;
}

public MentorStartRequest userId(String userId) {

this.userId = userId;
return this;
}

/**
* Get userId
* @return userId
*/
@jakarta.annotation.Nonnull
@JsonProperty(JSON_PROPERTY_USER_ID)
@JsonInclude(value = JsonInclude.Include.ALWAYS)

public String getUserId() {
return userId;
}


@JsonProperty(JSON_PROPERTY_USER_ID)
@JsonInclude(value = JsonInclude.Include.ALWAYS)
public void setUserId(String userId) {
this.userId = userId;
}

@Override
public boolean equals(Object o) {
if (this == o) {
Expand All @@ -132,12 +161,13 @@ public boolean equals(Object o) {
MentorStartRequest mentorStartRequest = (MentorStartRequest) o;
return Objects.equals(this.devProgress, mentorStartRequest.devProgress) &&
Objects.equals(this.previousSessionId, mentorStartRequest.previousSessionId) &&
Objects.equals(this.sessionId, mentorStartRequest.sessionId);
Objects.equals(this.sessionId, mentorStartRequest.sessionId) &&
Objects.equals(this.userId, mentorStartRequest.userId);
}

@Override
public int hashCode() {
return Objects.hash(devProgress, previousSessionId, sessionId);
return Objects.hash(devProgress, previousSessionId, sessionId, userId);
}

@Override
Expand All @@ -147,6 +177,7 @@ public String toString() {
sb.append(" devProgress: ").append(toIndentedString(devProgress)).append("\n");
sb.append(" previousSessionId: ").append(toIndentedString(previousSessionId)).append("\n");
sb.append(" sessionId: ").append(toIndentedString(sessionId)).append("\n");
sb.append(" userId: ").append(toIndentedString(userId)).append("\n");
sb.append("}");
return sb.toString();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,10 @@ public MessageDTO sendMessage(String content, Long sessionId) {
Session currentSession = session.get();

// Prevent sending messages to closed sessions
Session previousSession = sessionRepository
Session previouSession = sessionRepository
.findFirstByUserOrderByCreatedAtDesc(currentSession.getUser())
.orElse(null);
if (previousSession != null && previousSession.isClosed()) {
if (previouSession != null && previouSession.isClosed()) {
return null;
}

Expand All @@ -70,7 +70,7 @@ public MessageDTO sendMessage(String content, Long sessionId) {

return MessageDTO.fromMessage(savedMentorMessage);
} catch (Exception e) {
// prevent saving empty system messages if the intelligence service is down
// Prevent saving empty system messages if the intelligence service is down
logger.error("Failed to generate response for message: {}", content);
return null;
}
Expand All @@ -82,10 +82,11 @@ public void sendFirstMessage(Session session, String previousSessionId, String d
mentorStartRequest.setPreviousSessionId(previousSessionId);
mentorStartRequest.setSessionId(String.valueOf(session.getId()));
mentorStartRequest.setDevProgress(devProgress);
mentorStartRequest.setUserId(Long.toString(session.getUser().getId()));
MentorResponse mentorMessage = intelligenceServiceApi.startMentorStartPost(mentorStartRequest);
createMentorMessage(session, mentorMessage.getContent());
} catch (Exception e) {
// prevent saving empty system messages if the intelligence service is down
// Prevent saving empty system messages if the intelligence service is down
logger.error("Failed to generate response during session start");
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,9 @@ public SessionDTO createSession(User user) {
.orElse("");
// Close the previous session if it exists to prevent multiple open sessions
if (previousSessionId != "") {
Session previousSession = sessionRepository.findFirstByUserOrderByCreatedAtDesc(user).get();
previousSession.setClosed(true);
sessionRepository.save(previousSession);
Session previous_session = sessionRepository.findFirstByUserOrderByCreatedAtDesc(user).get();
previous_session.setClosed(true);
sessionRepository.save(previous_session);
}

// Get the last time interval's PRs
Expand All @@ -73,7 +73,6 @@ public SessionDTO createSession(User user) {
.toList();
String devProgress = formatPullRequests(pullRequests);

// create a new session
Session session = new Session();
session.setUser(user);
Session savedSession = sessionRepository.save(session);
Expand Down
24 changes: 20 additions & 4 deletions server/intelligence-service/app/mentor/conditions.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,9 @@ def start_router(state: State):


def main_router(state: State):
if state["development"]:
if state["goal_setting"]:
return "check_goals"
elif state["development"]:
return "development_node"
elif state["status"]:
return "status_node"
Expand All @@ -19,7 +21,21 @@ def main_router(state: State):
return "promises_node"
elif state["summary"]:
return "summary_node"
elif state["finish"]:
elif state["goal_reflection"]:
return "check_goal_reflection"
else: # state["finish"]
return "finish_node"
else:
return "mentor_node"


def goal_setting_router(state: State):
# check_goals updated the state and finished the goal setting
if not state["goal_setting"]:
return "set_goals"
return "goal_setting_node"


def goal_reflection_router(state: State):
# check_goal_reflection updated the state and finished the goal reflection
if not state["goal_reflection"]:
return "adjust_goals"
return "goal_reflection_node"
86 changes: 86 additions & 0 deletions server/intelligence-service/app/mentor/nodes/memory_updates.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
from ..state import State
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from ...model import model
from uuid import uuid4
from langchain_core.runnables.config import RunnableConfig
from langgraph.store.base import BaseStore
from ..prompt_loader import PromptLoader

prompt_loader = PromptLoader()


# updating the long-term session memory with the sprint progress: impediments and promises
def update_memory(state: State, config: RunnableConfig, *, store: BaseStore):
session_id = config["configurable"]["thread_id"]
namespace = (session_id, "summary")
steps = ["impediments", "promises"] # steps to process

for step in steps:
prompt = ChatPromptTemplate(
[
(
"system",
prompt_loader.get_prompt(
type="analyzer", name="update_memory"
).format_map({"step": step}),
),
MessagesPlaceholder("messages"),
]
)

chain = prompt | model
response = chain.invoke({"messages": state["messages"]}).content
store.put(namespace, key=str(uuid4()), value={step: response})

return


def set_goals(state: State, config: RunnableConfig, *, store: BaseStore):
user_id = state["user_id"]
namespace = (user_id, "goals")

prompt = ChatPromptTemplate(
[
(
"system",
prompt_loader.get_prompt(type="analyzer", name="set_goals"),
),
MessagesPlaceholder("messages"),
]
)

chain = prompt | model
response = chain.invoke({"messages": state["messages"]}).content
print("\nset_goals", response, "\n")
store.put(namespace, key=str(uuid4()), value={"goal_list": response})
return


def adjust_goals(state: State, config: RunnableConfig, *, store: BaseStore):
user_id = state["user_id"]
namespace = (user_id, "goals")
# TODO: check the position of the goal in the list
goals = store.search(namespace)
if not goals:
goals = ""
else:
for item in goals:
if "goal_list" in item.value:
goals = item.value["goal_list"]

prompt = ChatPromptTemplate(
[
(
"system",
prompt_loader.get_prompt(type="analyzer", name="update_memory").format_map({"goals": "goals"}),
),
MessagesPlaceholder("messages"),
]
)

chain = prompt | model
response = chain.invoke({"messages": state["messages"]}).content
print("\nadjust_goals", response, "\n")
store.put(namespace, key=str(uuid4()), value={"goal_list": response})

return
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
from .state import State
from ..state import State
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from ..model import model
from ...model import model
from uuid import uuid4
from langchain_core.runnables.config import RunnableConfig
from langgraph.store.base import BaseStore
from .prompt_loader import PromptLoader
from ..prompt_loader import PromptLoader

prompt_loader = PromptLoader()
persona_prompt = prompt_loader.get_prompt(type="mentor", name="persona")
Expand All @@ -21,7 +20,7 @@ def greet(state: State):

return {
"messages": [chain.invoke({"messages": state["messages"]})],
"development": True, # directly update the state to the next step
"goal_setting": True, # directly update the state to the next possible step
}


Expand All @@ -39,7 +38,6 @@ def get_dev_progress(state: State):
]
)
chain = prompt | model

return {
"messages": [chain.invoke({"messages": state["messages"]})],
"development": False,
Expand Down Expand Up @@ -154,76 +152,54 @@ def finish(state: State):
"messages": [chain.invoke({"messages": state["messages"]})],
"finish": False,
"closed": True,
"mentor_node": True,
}


# node responsible for checking the state of the conversation and updating it accordingly
def check_state(state: State):
if state["development"]:
# call dev_progress node only if there is development progress to show
if state["dev_progress"] == "":
return {"development": False, "status": True}
else:
return

step_order = ["status", "impediments", "promises", "summary", "finish"]
step = next((key for key in step_order if state.get(key)), None)
if not step:
return # exit early if no step is active without state update

# generate responses after the user has finished the project update
def talk_to_mentor(state: State):
prompt = ChatPromptTemplate(
[
(
"system",
prompt_loader.get_prompt(
type="analyzer", name="check_state"
).format_map({"step": step}),
),
("system", persona_prompt),
MessagesPlaceholder("messages"),
]
)

chain = prompt | model
return {"messages": [chain.invoke({"messages": state["messages"]})]}

if chain.invoke({"messages": state["messages"]}).content == "YES":
step_index = step_order.index(step)
if step_index < len(step_order) - 1:
next_step = step_order[step_index + 1]
return {step: False, next_step: True}
return


# node responsible for updating the long-term session memory, that can be used across multiple sessions
def update_memory(state: State, config: RunnableConfig, *, store: BaseStore):
session_id = config["configurable"]["thread_id"]
namespace = (session_id, "summary")
steps = ["impediments", "promises"] # steps to process

for step in steps:
prompt = ChatPromptTemplate(
[
(
"system",
prompt_loader.get_prompt(
type="analyzer", name="update_memory"
).format_map({"step": step}),
),
MessagesPlaceholder("messages"),
]
)

chain = prompt | model
response = chain.invoke({"messages": state["messages"]}).content
store.put(namespace, key=str(uuid4()), value={step: response})
def ask_goals(state: State):
prompt = ChatPromptTemplate(
[
("system", persona_prompt),
("system", prompt_loader.get_prompt(type="mentor", name="goal_setting")),
MessagesPlaceholder("messages"),
]
)
chain = prompt | model
return {"messages": [chain.invoke({"messages": state["messages"]})]}

return

def reflect_goals(state: State, store: BaseStore):
user_id = state["user_id"]
namespace = (user_id, "goals")
goals = store.search(namespace)
if not goals:
goals = ""
else:
for item in goals:
if "goal_list" in item.value:
goals = item.value["goal_list"]

# node responsible for generating responses after the user has finished the project update
def talk_to_mentor(state: State):
prompt = ChatPromptTemplate(
[
("system", persona_prompt),
(
"system",
prompt_loader.get_prompt(
type="mentor", name="goal_reflection"
).format_map({"goals": goals}),
),
MessagesPlaceholder("messages"),
]
)
Expand Down
Loading
Loading