-
Notifications
You must be signed in to change notification settings - Fork 14
Expand file tree
/
Copy pathllm-chat-human-in-loop.ts
More file actions
138 lines (119 loc) · 3.99 KB
/
llm-chat-human-in-loop.ts
File metadata and controls
138 lines (119 loc) · 3.99 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
/**
* LLM Chat with Human-in-the-Loop — Interactive chat with WAIT pauses
*
* Demonstrates an LLM chat where the workflow pauses (WAIT task) for
* human input between turns. The user updates the waiting task to continue.
*
* Prerequisites:
* - An LLM integration configured in Conductor
*
* Run:
* CONDUCTOR_SERVER_URL=http://localhost:8080 npx ts-node examples/agentic-workflows/llm-chat-human-in-loop.ts
*/
import {
OrkesClients,
ConductorWorkflow,
llmChatCompleteTask,
waitTaskDuration,
Role,
} from "../../src/sdk";
async function main() {
const clients = await OrkesClients.from();
const workflowClient = clients.getWorkflowClient();
const taskClient = clients.getTaskClient();
const provider = process.env.LLM_PROVIDER ?? "openai_integration";
const model = process.env.LLM_MODEL ?? "gpt-4o";
// ── Define workflow with WAIT for human input ─────────────────────
const wf = new ConductorWorkflow(
workflowClient,
"llm_chat_human_in_loop"
)
.description("Interactive LLM chat with human-in-the-loop WAIT tasks")
.timeoutSeconds(3600);
// Initial LLM greeting
wf.add(
llmChatCompleteTask("greeting_ref", provider, model, {
messages: [
{
role: Role.SYSTEM,
message:
"You are a helpful assistant. Greet the user and ask how you can help them today. Be concise.",
},
{
role: Role.USER,
message: "Topic: ${workflow.input.topic}",
},
],
temperature: 0.7,
maxTokens: 200,
})
);
// Wait for human response (external signal)
wf.add(waitTaskDuration("human_input_ref", "300s"));
// LLM responds to human input
wf.add(
llmChatCompleteTask("response_ref", provider, model, {
messages: [
{
role: Role.SYSTEM,
message: "You are a helpful assistant. Respond to the user's message concisely.",
},
{
role: Role.ASSISTANT,
message: "${greeting_ref.output.result}",
},
{
role: Role.USER,
message: "${human_input_ref.output.userMessage}",
},
],
temperature: 0.7,
maxTokens: 300,
})
);
wf.outputParameters({
greeting: "${greeting_ref.output.result}",
userMessage: "${human_input_ref.output.userMessage}",
response: "${response_ref.output.result}",
});
await wf.register(true);
console.log("Registered workflow:", wf.getName());
// ── Start workflow (async — it will pause at WAIT) ────────────────
const workflowId = await wf.startWorkflow({
topic: "TypeScript best practices",
});
console.log("Started workflow:", workflowId);
console.log("Workflow will pause at WAIT task for human input...");
// Wait for workflow to reach the WAIT task
await new Promise((resolve) => setTimeout(resolve, 5000));
// Check status
const status = await workflowClient.getWorkflow(workflowId, true);
console.log("Current status:", status.status);
// Find the waiting task
const waitingTask = status.tasks?.find(
(t) => t.taskDefName === "WAIT" && t.status === "IN_PROGRESS"
);
if (waitingTask?.taskId) {
console.log("\nSimulating human input...");
// Update the WAIT task with human input
await taskClient.updateTaskResult(
workflowId,
"human_input_ref",
"COMPLETED",
{ userMessage: "Tell me about async/await patterns in TypeScript" }
);
console.log("Human input provided. Workflow continuing...");
// Wait for completion
await new Promise((resolve) => setTimeout(resolve, 10000));
const finalStatus = await workflowClient.getWorkflow(workflowId, true);
console.log("\nFinal status:", finalStatus.status);
console.log("Output:", JSON.stringify(finalStatus.output, null, 2));
} else {
console.log("WAIT task not found. Workflow may have completed or failed.");
}
process.exit(0);
}
main().catch((err) => {
console.error(err);
process.exit(1);
});