Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
168 changes: 168 additions & 0 deletions packages/langchain-cascadeflow/examples/lcel-chains.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,168 @@
/**
* LCEL (LangChain Expression Language) Chains Example
*
* Demonstrates how to use CascadeFlow with LCEL composition patterns.
* The cascade works seamlessly with pipes, sequences, and other LCEL constructs.
*/

import { ChatOpenAI } from '@langchain/openai';
import { PromptTemplate } from '@langchain/core/prompts';
import { StringOutputParser } from '@langchain/core/output_parsers';
import { RunnableSequence, RunnablePassthrough } from '@langchain/core/runnables';
import { withCascade } from '../src/index.js';

async function main() {
console.log('=== CascadeFlow LCEL Chains Demo ===\n');

const drafter = new ChatOpenAI({ model: 'gpt-4o-mini', temperature: 0.7 });
const verifier = new ChatOpenAI({ model: 'gpt-4o', temperature: 0.7 });

const cascade = withCascade({
drafter,
verifier,
qualityThreshold: 0.7,
});

// Example 1: Simple Pipe Chain
console.log('--- Example 1: Simple Pipe Chain ---');

const parser = new StringOutputParser();
const simpleChain = cascade.pipe(parser);

const result1 = await simpleChain.invoke('What is the capital of France?');
console.log('Query: What is the capital of France?');
console.log(`Response: ${result1}`);

const metadata1 = cascade.getLastCascadeResult();
console.log(`Model used: ${metadata1?.modelUsed}`);
console.log('\n');

// Example 2: Prompt | Cascade | Parser
console.log('--- Example 2: Prompt → Cascade → Parser ---');

const prompt = PromptTemplate.fromTemplate(
'You are a helpful assistant. Answer this question: {question}'
);

const chain2 = prompt.pipe(cascade).pipe(parser);

const result2 = await chain2.invoke({
question: 'Explain how photosynthesis works in simple terms',
});

console.log('Query: Explain how photosynthesis works');
console.log(`Response: ${result2.substring(0, 150)}...`);

const metadata2 = cascade.getLastCascadeResult();
console.log(`Model used: ${metadata2?.modelUsed}`);
console.log(`Quality: ${metadata2?.drafterQuality?.toFixed(2)}`);
console.log('\n');

// Example 3: RunnableSequence
console.log('--- Example 3: RunnableSequence ---');

const sequenceChain = RunnableSequence.from([
PromptTemplate.fromTemplate('Topic: {topic}\n\nWrite a brief summary.'),
cascade,
new StringOutputParser(),
]);

const result3 = await sequenceChain.invoke({ topic: 'Artificial Intelligence' });
console.log('Topic: Artificial Intelligence');
console.log(`Summary: ${result3.substring(0, 150)}...`);
console.log('\n');

// Example 4: Batch Processing
console.log('--- Example 4: Batch Processing ---');

const batchChain = cascade.pipe(parser);

const questions = [
'What is 2+2?',
'What is the speed of light?',
'Who wrote Romeo and Juliet?',
];

const results4 = await batchChain.batch(questions);

console.log('Batch processing 3 questions:');
results4.forEach((result, idx) => {
console.log(` ${idx + 1}. ${questions[idx]}`);
console.log(` → ${result.substring(0, 60)}...`);
});
console.log('\n');

// Example 5: Complex Chain with RunnablePassthrough
console.log('--- Example 5: RunnablePassthrough.assign() ---');

const complexChain = RunnablePassthrough.assign({
answer: cascade.pipe(new StringOutputParser()),
context: () => 'Generated by CascadeFlow',
});

const result5 = await complexChain.invoke('What is machine learning?');

console.log('Query: What is machine learning?');
console.log(`Answer: ${result5.answer.substring(0, 100)}...`);
console.log(`Context: ${result5.context}`);
console.log('\n');

// Example 6: Streaming in LCEL Chain
console.log('--- Example 6: Streaming in LCEL Chain ---');

const streamChain = prompt.pipe(cascade).pipe(parser);

console.log('Query: What are the benefits of renewable energy?');
process.stdout.write('Response (streaming): ');

const stream = await streamChain.stream({
question: 'What are the benefits of renewable energy?',
});

for await (const chunk of stream) {
process.stdout.write(chunk);
}

console.log('\n');

const metadata6 = cascade.getLastCascadeResult();
console.log(`Model used: ${metadata6?.modelUsed}`);
console.log(`Pre-routed: ${metadata6?.preRouted}`);
console.log('\n');

// Example 7: Nested Chains
console.log('--- Example 7: Nested Chains ---');

const innerChain = cascade.pipe(new StringOutputParser());
const outerChain = RunnableSequence.from([
PromptTemplate.fromTemplate('Question: {text}'),
innerChain,
]);

const result7 = await outerChain.invoke({ text: 'What is the meaning of life?' });

console.log('Query: What is the meaning of life?');
console.log(`Response: ${result7.substring(0, 150)}...`);
console.log('\n');

// Example 8: Method Chaining before Piping
console.log('--- Example 8: .bind() + Pipe ---');

const boundChain = cascade
.bind({ temperature: 0.3 }) // More deterministic
.pipe(new StringOutputParser());

const result8 = await boundChain.invoke('Count from 1 to 5');

console.log('Query: Count from 1 to 5 (temperature=0.3)');
console.log(`Response: ${result8}`);
console.log('\n');

console.log('=== LCEL Chains Demo Complete ===');
console.log('\n💡 CascadeFlow works seamlessly with LCEL');
console.log('💡 Use .pipe() to chain cascade with other runnables');
console.log('💡 Batch processing, streaming, and all LCEL patterns supported');
console.log('💡 Cascade metadata remains accessible through getLastCascadeResult()');
}

main().catch(console.error);
Loading