with Llama AI and LiveKit?
Emit new event for LiveKit room activities via webhook. See the documentation
Create a new ingress from url in LiveKit. See the documentation
Generate an access token for a participant to join a LiveKit room. See the documentation
The Llama AI API provides powerful machine learning capabilities, enabling users to harness advanced AI for image recognition, natural language processing, and predictive modeling. By leveraging this API on Pipedream, you can automate complex workflows that require AI-driven insights, enhancing data analysis and decision-making processes across various business applications.
import { axios } from "@pipedream/platform"
export default defineComponent({
props: {
llama_ai: {
type: "app",
app: "llama_ai",
}
},
async run({steps, $}) {
const data = {
"messages": [
{"role": "user", "content": "What is the weather like in Boston?"},
],
"functions": [
{
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"days": {
"type": "number",
"description": "for how many days ahead you wants the forecast",
},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
},
},
"required": ["location", "days"],
}
],
"stream": "false",
"function_call": "get_current_weather",
}
return await axios($, {
method: "post",
url: `https://api.llama-api.com/chat/completions`,
headers: {
Authorization: `Bearer ${this.llama_ai.$auth.api_token}`,
},
data,
})
},
})
import { RoomServiceClient } from 'livekit-server-sdk';
export default defineComponent({
props: {
livekit: {
type: "app",
app: "livekit",
}
},
async run({steps, $}) {
const svc = new RoomServiceClient(
this.livekit.$auth.project_url,
this.livekit.$auth.api_key,
this.livekit.$auth.secret_key);
return await svc.listRooms();
},
})