Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Automatic Greeting Feature #216

Merged
merged 11 commits into from
Dec 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -52,29 +52,34 @@ public MessageDTO sendMessage(String content, Long sessionId) {
currentSession.getMessages().add(savedUserMessage);
sessionRepository.save(currentSession);

String systemResponse = generateResponse(sessionId, content);
String systemResponse = generateResponse(sessionId);

// prevent saving empty system messages if the intelligence service is down
if (systemResponse == null) {
logger.error("Failed to generate response for message: {}", content);
return MessageDTO.fromMessage(savedUserMessage);
}

Message systemMessage = new Message();
systemMessage.setSender(MessageSender.MENTOR);
systemMessage.setContent(systemResponse);
systemMessage.setSession(currentSession);
Message savedSystemMessage = createSystemMessage(currentSession, systemResponse);
return MessageDTO.fromMessage(savedSystemMessage);
}

Message savedSystemMessage = messageRepository.save(systemMessage);
currentSession.getMessages().add(savedSystemMessage);
sessionRepository.save(currentSession);
public void generateFirstSystemMessage(Session session) {
String systemResponse = generateResponse(session.getId());

return MessageDTO.fromMessage(savedSystemMessage);
// prevent saving empty system messages if the intelligence service is down
if (systemResponse == null) {
logger.error("Failed to generate response for the conversation start");
return;
}

createSystemMessage(session, systemResponse);
}

private String generateResponse(Long sessionId, String messageContent) {
private String generateResponse(Long sessionId) {
List<Message> messages = messageRepository.findBySessionId(sessionId);

ISMessageHistory messageHistory = new ISMessageHistory();

messageHistory.setMessages(
messages
.stream()
Expand All @@ -91,4 +96,17 @@ private String generateResponse(Long sessionId, String messageContent) {
return null;
}
}

private Message createSystemMessage(Session currentSession, String systemResponse) {
Message systemMessage = new Message();
systemMessage.setSender(MessageSender.MENTOR);
systemMessage.setContent(systemResponse);
systemMessage.setSession(currentSession);

Message savedSystemMessage = messageRepository.save(systemMessage);
currentSession.getMessages().add(savedSystemMessage);
sessionRepository.save(currentSession);

return savedSystemMessage;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import de.tum.in.www1.hephaestus.core.exception.AccessForbiddenException;
import de.tum.in.www1.hephaestus.gitprovider.user.User;
import de.tum.in.www1.hephaestus.mentor.message.MessageService;
import java.util.List;
import java.util.Optional;
import org.springframework.beans.factory.annotation.Autowired;
Expand All @@ -13,6 +14,9 @@ public class SessionService {
@Autowired
private SessionRepository sessionRepository;

@Autowired
private MessageService messageService;

public void checkAccessElseThrow(User user, Session session) {
if (!session.getUser().getId().equals(user.getId())) {
throw new AccessForbiddenException("Session", session.getId());
Expand All @@ -32,6 +36,8 @@ public SessionDTO createSession(User user) {
Session session = new Session();
session.setUser(user);

return SessionDTO.fromSession(sessionRepository.save(session));
Session savedSession = sessionRepository.save(session);
messageService.generateFirstSystemMessage(session);
return SessionDTO.fromSession(savedSession);
}
}
19 changes: 19 additions & 0 deletions server/intelligence-service/app/mentor/prompt_loader.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from pathlib import Path
from typing import Dict


class PromptLoader:
def __init__(self, prompt_dir: str = "prompts"):
self.prompt_dir = Path(__file__).parent / prompt_dir

def load_prompts(self) -> Dict[str, str]:
prompts = {}
for txt_file in self.prompt_dir.glob("*.txt"):
key = txt_file.stem # use the filename without extension as the key
with open(txt_file, "r", encoding="utf-8") as f:
prompts[key] = f.read().strip()
return prompts

def get_prompt(self, name: str) -> str:
prompts = self.load_prompts()
return prompts.get(name, "")
17 changes: 17 additions & 0 deletions server/intelligence-service/app/mentor/prompts/mentor_persona.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
You are a friendly, approachable AI mentor focused on helping a student with their software engineering project. Your goal is to provide constructive guidance, support, and encouragement. Follow these rules and guidelines:

- **Stay On-Topic**: Only answer questions related to the student’s software engineering project. Avoid unrelated topics and general advice that does not support their specific work.

- **Focus on Guidance**: Offer actionable suggestions. If the student’s explanation is unclear, ask clarifying questions. Help them break down complex issues into manageable steps, and encourage them to think critically about their problem-solving approach.

- **Tone**: Maintain a friendly, supportive, and empathetic demeanor. Keep the conversation casual and encouraging, rather than formal or distant. Show understanding and reassure them when they face challenges.

- **Personality**: Be positive and motivating. Praise the student’s progress and offer constructive feedback when needed. Support them in reflecting on their decisions and thought processes to improve their project outcomes.

- **Empathy and Accountability**: Acknowledge any difficulties and provide practical strategies to overcome obstacles. Encourage the student to take responsibility for their learning and project development, while remaining patient and understanding.

- **Context for the Conversation**:
- If the student is stuck, ask questions to pinpoint their confusion and then suggest targeted steps to move forward.
- If the student is making good progress, recognize their achievements and continue to motivate them.

**Remember**: Your primary objective is to help the student succeed in their software engineering project. Do not deviate from this focus.
33 changes: 30 additions & 3 deletions server/intelligence-service/app/mentor/run.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
from typing_extensions import Annotated, TypedDict

from .prompt_loader import PromptLoader
from langgraph.graph import START, StateGraph, END
from langgraph.graph.message import add_messages
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder

from ..model import model

prompt_loader = PromptLoader()
persona_prompt = prompt_loader.get_prompt("mentor_persona")


class State(TypedDict):
messages: Annotated[list, add_messages]
Expand All @@ -14,9 +17,10 @@ class State(TypedDict):
def mentor(state: State):
prompt = ChatPromptTemplate(
[
("system", persona_prompt),
(
"system",
"You are an AI mentor helping a students working on the software engineering projects embracing structured self-reflection practices. You need to guide the student through the set questions regarding their work on the project during the last week (sprint). Your value is the fact, that you help students to reflect on their past progress. Throughout the conversation you need to perform all of the following tasks in the given order: Task 1: Greet the student and say you are happy to start the session. Task 2: Ask the student about the overall progress on the project. Task 3: Ask the student about the challenges faced during the sprint referring to what he said about progress. Task 4: Ask about the plan for the next sprint. You need to understand at which task in the conversation you are from the message history and what is the next task. Please, don't repeat yourself throughout the conversation. Don't perform more then one task at a time. If the user already shared something to a task you can go to the next. Be polite, friendly and do not let the student drive the conversation to any other topic except for the current project. Do not make a questionaire out of the conversation, but rather make it a natural conversation. Don't repeat the answer of the student to your latest question but try to react on it. If the student asks questions be helpful and try to find solutions.",
"You need to guide the student through the set questions regarding their work on the project during the last week (sprint). Your value is the fact, that you help students to reflect on their past progress. Throughout the conversation you need to perform all of the following tasks in the given order: Task 1: Ask the student about the overall progress on the project. Task 2: Ask the student about the challenges faced during the sprint referring to what he said about progress. Task 3: Ask about the plan for the next sprint. You need to understand at which task in the conversation you are from the message history and what is the next task. Please, don't repeat yourself throughout the conversation. Don't perform more then one task at a time. If the user already shared something to a task you can go to the next.",
),
MessagesPlaceholder("messages"),
]
Expand All @@ -25,9 +29,32 @@ def mentor(state: State):
return {"messages": [chain.invoke({"messages": state["messages"]})]}


def greeting(state: State):
prompt = ChatPromptTemplate(
[
("system", persona_prompt),
(
"system",
"Greet the user warmly and express excitement about starting today’s session. Keep the greeting friendly and encouraging. Mention that you are here to support them and look forward to making progress together.",
),
]
)
chain = prompt | model
return {"messages": [chain.invoke({"messages": state["messages"]})]}


def isFirstInteraction(state: State):
if len(state["messages"]) == 0:
return "greeting"
return "mentor"


graph_builder = StateGraph(State)
graph_builder.add_node("mentor", mentor)
graph_builder.add_edge(START, "mentor")
graph_builder.add_node("greeting", greeting)

graph_builder.add_conditional_edges(START, isFirstInteraction)
graph_builder.add_edge("mentor", END)
graph_builder.add_edge("greeting", END)

graph = graph_builder.compile()
9 changes: 5 additions & 4 deletions server/intelligence-service/app/routers/mentor.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,10 @@ class MentorMessage(BaseModel):
def generate(request: MessageHistory):
messages = []
for message in request.messages:
if message.sender == "USER":
messages.append(HumanMessage(content=message.content))
else:
messages.append(AIMessage(content=message.content))
if message.content:
if message.sender == "USER":
messages.append(HumanMessage(content=message.content))
else:
messages.append(AIMessage(content=message.content))
response_message = graph.invoke({"messages": messages})["messages"][-1].content
return MentorMessage(content=response_message)
4 changes: 2 additions & 2 deletions webapp/src/app/mentor/mentor.component.html
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@
} @else {
<div class="flex flex-col lg:col-span-3 lg:justify-center" [class.my-32]="(selectedSessionMessages.data()?.length ?? 0) === 0">
@if (selectedSessionId() !== undefined) {
@if ((selectedSessionMessages.data()?.length ?? 0) > 0) {
@if ((selectedSessionMessages.data()?.length ?? 0) > 0 || selectedSessionMessages.isPending()) {
<hlm-scroll-area #messagesScrollArea class="h-[calc(100dvh-200px)] lg:h-[calc(100dvh-300px)] -mr-2.5">
<div class="pr-3">
<app-messages [messages]="selectedSessionMessages.data() ?? []" class="p-4 space-y-4" />
<app-messages [isLoading]="selectedSessionMessages.isPending()" [messages]="selectedSessionMessages.data() ?? []" class="p-4 space-y-4" />
</div>
</hlm-scroll-area>
} @else {
Expand Down
1 change: 1 addition & 0 deletions webapp/src/app/mentor/mentor.component.ts
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ export class MentorComponent {
},
onSuccess: (session) => {
this.selectedSessionId.set(session.id);
this.queryClient.invalidateQueries({ queryKey: ['sessions', this.selectedSessionId()] });
}
}));

Expand Down
57 changes: 39 additions & 18 deletions webapp/src/app/mentor/messages/messages.component.html
Original file line number Diff line number Diff line change
@@ -1,27 +1,48 @@
<div class="flex flex-col gap-4">
@for (message of messages(); track message.id) {
<div class="flex w-full" [ngClass]="{ 'justify-end': message.sender === Message.SenderEnum.User, 'justify-start': message.sender === Message.SenderEnum.Mentor }">
<div class="flex space-x-2 md:w-3/5" [ngClass]="{ 'flex-row-reverse': message.sender === Message.SenderEnum.User }">
@if (message.sender === Message.SenderEnum.Mentor) {
<div class="mr-2 flex flex-col">
<div class="w-10 h-10 bg-transparent border-2 border-cyan-500 rounded-full flex items-center justify-center">
<lucide-angular [img]="BotMessageSquare" class="size-6 text-cyan-500" />
@if (isLoading()) {
@for (i of [1, 2, 3, 4]; track i) {
<div class="flex w-full" [ngClass]="{ 'justify-end': i % 2 === 0 }">
@if (i % 2 === 0) {
<div>
<hlm-skeleton class="rounded-lg inline-block w-64 h-12" />
<div class="flex justify-end">
<hlm-skeleton class="h-4 w-32 justify-end" />
</div>
</div>
} @else {
<hlm-skeleton class="w-10 h-10 rounded-full" />
<div class="ml-3">
<hlm-skeleton class="rounded-lg inline-block w-64 h-12" />
<hlm-skeleton class="h-4 w-32" />
</div>
}
<div class="flex flex-col space-y-2" [ngClass]="{ 'items-end': message.sender === Message.SenderEnum.User, 'items-start': message.sender === Message.SenderEnum.Mentor }">
<div
[ngClass]="{
'bg-cyan-500 dark:bg-cyan-600 text-white': message.sender === Message.SenderEnum.User,
'bg-muted text-primary': message.sender === Message.SenderEnum.Mentor
}"
class="p-3 px-4 rounded-lg inline-block w-fit"
>
<p>{{ message.content }}</p>
</div>
}
} @else {
@for (message of messages(); track message.id) {
<div class="flex w-full" [ngClass]="{ 'justify-end': message.sender === Message.SenderEnum.User, 'justify-start': message.sender === Message.SenderEnum.Mentor }">
<div class="flex space-x-2 md:w-3/5" [ngClass]="{ 'flex-row-reverse': message.sender === Message.SenderEnum.User }">
@if (message.sender === Message.SenderEnum.Mentor) {
<div class="mr-2 flex flex-col">
<div class="w-10 h-10 bg-transparent border-2 border-cyan-500 rounded-full flex items-center justify-center">
<lucide-angular [img]="BotMessageSquare" class="size-6 text-cyan-500" />
</div>
</div>
}
<div class="flex flex-col space-y-2" [ngClass]="{ 'items-end': message.sender === Message.SenderEnum.User, 'items-start': message.sender === Message.SenderEnum.Mentor }">
<div
[ngClass]="{
'bg-cyan-500 dark:bg-cyan-600 text-white': message.sender === Message.SenderEnum.User,
'bg-muted text-primary': message.sender === Message.SenderEnum.Mentor
}"
class="p-3 px-4 rounded-lg inline-block w-fit"
>
<p>{{ message.content }}</p>
</div>
<span class="text-xs text-muted-foreground"> {{ message.sender === Message.SenderEnum.User ? 'You' : 'AI Mentor' }} · {{ message.sentAt | date: 'shortTime' }} </span>
</div>
<span class="text-xs text-muted-foreground"> {{ message.sender === Message.SenderEnum.User ? 'You' : 'AI Mentor' }} · {{ message.sentAt | date: 'shortTime' }} </span>
</div>
</div>
</div>
}
}
</div>
4 changes: 3 additions & 1 deletion webapp/src/app/mentor/messages/messages.component.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,19 @@ import { LucideAngularModule, BotMessageSquare } from 'lucide-angular';
import { HlmAvatarModule } from '@spartan-ng/ui-avatar-helm';
import { SecurityStore } from '@app/core/security/security-store.service';
import { Message } from '@app/core/modules/openapi';
import { HlmSkeletonComponent } from '@spartan-ng/ui-skeleton-helm';

@Component({
selector: 'app-messages',
templateUrl: './messages.component.html',
standalone: true,
imports: [CommonModule, LucideAngularModule, HlmAvatarModule]
imports: [CommonModule, LucideAngularModule, HlmAvatarModule, HlmSkeletonComponent]
})
export class MessagesComponent {
protected BotMessageSquare = BotMessageSquare;
protected Message = Message;

securityStore = inject(SecurityStore);
messages = input<Message[]>([]);
isLoading = input<boolean>(false);
}
16 changes: 11 additions & 5 deletions webapp/src/app/mentor/messages/messages.stories.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,35 +11,35 @@ const meta: Meta<MessagesComponent> = {
id: 1,
sentAt: '2024-12-05T10:15:00Z',
sender: Message.SenderEnum.Mentor,
content: 'Hello! How can I assist you today?',
content: 'Hello! I’m excited to help you with your software engineering project today. What are you currently working on?',
sessionId: 101
},
{
id: 2,
sentAt: '2024-12-05T10:16:30Z',
sender: Message.SenderEnum.User,
content: 'I need help with understanding my recent order.',
content: 'Hi! I’m struggling with designing the database schema for my project.',
sessionId: 101
},
{
id: 3,
sentAt: '2024-12-05T10:17:00Z',
sender: Message.SenderEnum.Mentor,
content: 'Sure! Could you provide your order ID?',
content: 'Got it! Can you tell me a bit more about the project?',
sessionId: 101
},
{
id: 4,
sentAt: '2024-12-05T10:17:45Z',
sender: Message.SenderEnum.User,
content: 'The order ID is #12345. I’m looking for the details.',
content: 'It’s an e-commerce app where users can browse products, add them to a cart, and place orders.',
sessionId: 101
},
{
id: 5,
sentAt: '2024-12-05T10:18:10Z',
sender: Message.SenderEnum.Mentor,
content: "Got it! Please hold on while I fetch your details. Thank you for your patience. :) I'll be back in a moment...",
content: 'A good first step is identifying the main entities: users, products, orders, and the cart. Let’s start with that — do you have any initial thoughts?',
sessionId: 101
}
]
Expand All @@ -50,3 +50,9 @@ export default meta;
type Story = StoryObj<MessagesComponent>;

export const Default: Story = {};

export const isLoading: Story = {
args: {
isLoading: true
}
};
Loading