AngularChatbotIaSse
Construire un chatbot IA dans Angular avec streaming SSE, historique de conversation et interface responsive.
Architecture
Un chatbot IA dans Angular repose sur 3 couches :
- Backend proxy — Node.js/Express qui appelle l'API IA (OpenAI, Claude, etc.) et gère la clé API
- Service Angular — communique avec le backend via
HttpClientouEventSource(SSE) - Composant chat — affiche les messages, gère l'input utilisateur et le streaming
A retenir : Ne jamais appeler l'API IA directement depuis Angular — ta clé serait exposée dans le navigateur. Passe toujours par un backend proxy.
Service IA
// chat.service.ts
import { Injectable, signal } from '@angular/core';
import { HttpClient } from '@angular/common/http';
export interface Message {
role: 'user' | 'assistant';
content: string;
}
@Injectable({ providedIn: 'root' })
export class ChatService {
private apiUrl = '/api/chat';
messages = signal<Message[]>([]);
isLoading = signal(false);
constructor(private http: HttpClient) {}
async sendMessage(userInput: string): Promise<void> {
// Ajouter le message utilisateur
this.messages.update(msgs => [...msgs, { role: 'user', content: userInput }]);
this.isLoading.set(true);
// Ajouter un placeholder pour la réponse
this.messages.update(msgs => [...msgs, { role: 'assistant', content: '' }]);
// Appel streaming (voir section suivante)
await this.streamResponse(userInput);
this.isLoading.set(false);
}
private async streamResponse(userInput: string): Promise<void> {
const history = this.messages().slice(0, -1); // sans le placeholder
const response = await fetch(this.apiUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ messages: history }),
});
const reader = response.body!.getReader();
const decoder = new TextDecoder();
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value);
this.messages.update(msgs => {
const updated = [...msgs];
updated[updated.length - 1].content += chunk;
return updated;
});
}
}
}
Composant chat
// chat.component.ts
import { Component, inject, ViewChild, ElementRef, effect } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { ChatService } from './chat.service';
@Component({
selector: 'app-chat',
standalone: true,
imports: [FormsModule],
templateUrl: './chat.component.html',
})
export class ChatComponent {
chat = inject(ChatService);
userInput = '';
@ViewChild('messagesContainer') messagesContainer!: ElementRef;
constructor() {
// Auto-scroll quand les messages changent
effect(() => {
this.chat.messages(); // réagit aux changements
setTimeout(() => {
const el = this.messagesContainer?.nativeElement;
if (el) el.scrollTop = el.scrollHeight;
}, 0);
});
}
async onSubmit() {
if (!this.userInput.trim() || this.chat.isLoading()) return;
const input = this.userInput;
this.userInput = '';
await this.chat.sendMessage(input);
}
}
<!-- chat.component.html -->
<div class="chat-container">
<div class="messages" #messagesContainer>
@for (msg of chat.messages(); track $index) {
<div class="message" [class]="msg.role">
<span class="bubble">{{ msg.content }}</span>
</div>
}
@if (chat.isLoading() && chat.messages().at(-1)?.content === '') {
<div class="message assistant"><span class="typing-indicator">...</span></div>
}
</div>
<form (ngSubmit)="onSubmit()">
<input [(ngModel)]="userInput" name="input" placeholder="Votre message...">
<button type="submit" [disabled]="chat.isLoading()">Envoyer</button>
</form>
</div>
Streaming SSE — Backend
Le backend Express transmet les tokens en temps réel via Server-Sent Events.
// server.js (Express)
import express from 'express';
import OpenAI from 'openai';
const app = express();
app.use(express.json());
const openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
app.post('/api/chat', async (req, res) => {
const { messages } = req.body;
res.setHeader('Content-Type', 'text/plain; charset=utf-8');
res.setHeader('Transfer-Encoding', 'chunked');
const stream = await openai.chat.completions.create({
model: 'gpt-4o',
messages: [
{ role: 'system', content: 'Tu es un assistant web utile.' },
...messages,
],
stream: true,
});
for await (const chunk of stream) {
const delta = chunk.choices[0]?.delta?.content || '';
if (delta) res.write(delta);
}
res.end();
});
app.listen(3000);
Gestion de l'historique
// Persister l'historique en localStorage
@Injectable({ providedIn: 'root' })
export class ChatService {
private readonly STORAGE_KEY = 'chat-history';
messages = signal<Message[]>(this.loadHistory());
private loadHistory(): Message[] {
const stored = localStorage.getItem(this.STORAGE_KEY);
return stored ? JSON.parse(stored) : [];
}
saveHistory() {
localStorage.setItem(this.STORAGE_KEY, JSON.stringify(this.messages()));
}
clearHistory() {
this.messages.set([]);
localStorage.removeItem(this.STORAGE_KEY);
}
}
Interface utilisateur
/* chat.component.css */
.chat-container {
display: flex;
flex-direction: column;
height: 600px;
border: 1px solid #e0e0e0;
border-radius: 12px;
overflow: hidden;
}
.messages {
flex: 1;
overflow-y: auto;
padding: 16px;
display: flex;
flex-direction: column;
gap: 12px;
}
.message { display: flex; }
.message.user { justify-content: flex-end; }
.message.assistant { justify-content: flex-start; }
.bubble {
max-width: 70%;
padding: 10px 14px;
border-radius: 18px;
line-height: 1.5;
white-space: pre-wrap;
}
.user .bubble { background: #0084ff; color: white; border-radius: 18px 18px 4px 18px; }
.assistant .bubble { background: #f0f0f0; color: #333; border-radius: 18px 18px 18px 4px; }
.typing-indicator { font-size: 1.5rem; letter-spacing: 4px; }
Note : Pour le rendu Markdown dans les réponses (code colorisé, listes, etc.), utilise
ngx-markdown ou marked.js pour parser le contenu des messages assistant.