استفاده از n8n
برای استفاده از n8n با سرویس هوش مصنوعی آروانکلاد و Langchain میتوانید از نمونهکدهای زیر کمک بگیرید:
- Python
- Golang
- TypeScript
شیوه اجرا:
Pip install langchain langchain-openai langchain-core
Python main.py
نمونه کد:
from langchain_openai import ChatOpenAI
from langchain_core.messages import HumanMessage
# Set up the LangChain ChatOpenAI with custom base URL and API key
llm = ChatOpenAI(
api_key="apikey ...",
base_url="https://arvancloudai.ir/gateway/models/Gemini-2.5-Pro/.../v1",
model="Gemini-2.5-Pro",
max_tokens=10000,
temperature=0.8,
)
# Create a human message
messages = [HumanMessage(content="tell me a joke about product managers")]
# Invoke the model
response = llm.invoke(messages)
print(response)
شیوه اجرا:
Go mod tidy
Go run main.go
نمونه کد:
package main
import (
"context"
"fmt"
"log"
"github.com/tmc/langchaingo/llms"
"github.com/tmc/langchaingo/llms/openai"
)
func main() {
// Create LangChain OpenAI LLM with custom base URL and API key
llm, err := openai.New(
openai.WithToken("apikey ..."),
openai.WithBaseURL("https://arvancloudai.ir/gateway/models/Gemini-2.5-Pro/.../v1"),
openai.WithModel("Gemini-2.5-Pro"),
)
if err != nil {
log.Fatalf("Failed to create OpenAI LLM: %v", err)
}
// Create context
ctx := context.Background()
// Create message content
content := "tell me a joke about product managers"
// Generate response using LangChain
response, err := llms.GenerateFromSinglePrompt(
ctx,
llm,
content,
llms.WithTemperature(0.8),
llms.WithMaxTokens(10000),
)
if err != nil {
log.Fatalf("Failed to generate response: %v", err)
}
fmt.Printf("Response: %s\n", response)
// Alternative: Using the Call method directly with prompt
callResponse, err := llm.Call(
ctx,
content,
llms.WithTemperature(0.8),
llms.WithMaxTokens(10000),
)
if err != nil {
log.Fatalf("Failed to call LLM: %v", err)
}
fmt.Printf("Call Response: %s\n", callResponse)
}
فایل package.json:
{
"name": "ai-chat-langchain-client",
"version": "1.0.0",
"description": "TypeScript LangChain LLM chat client",
"main": "main.ts",
"scripts": {
"start": "ts-node main.ts",
"build": "tsc main.ts",
"dev": "ts-node-dev main.ts"
},
"dependencies": {
"@langchain/openai": "^0.3.11",
"@langchain/core": "^0.3.18",
"langchain": "^0.3.6"
},
"devDependencies": {
"@types/node": "^22.12.0",
"ts-node": "^10.9.2",
"ts-node-dev": "^2.0.0",
"typescript": "^5.5.4"
}
}
نمونه کد:
import { ChatOpenAI } from "@langchain/openai";
import { HumanMessage } from "@langchain/core/messages";
async function main() {
try {
// Create LangChain ChatOpenAI with custom base URL and API key
const llm = new ChatOpenAI({
apiKey: "apikey ...",
configuration: {
baseURL: "https://arvancloudai.ir/gateway/models/Gemini-2.5-Pro/.../v1",
},
model: "Gemini-2.5-Pro",
maxTokens: 10000,
temperature: 0.8,
});
// Create a human message
const messages = [new HumanMessage("tell me a joke about product managers")];
// Invoke the model
const response = await llm.invoke(messages);
console.log("Response:", response.content);
} catch (error) {
console.error('Error:', error);
}
}
main();
دستور زیر را اجرا کنید:
Npm start