with Groq Cloud and Llama AI?
Creates a model response for the given chat conversation. See the documentation
import { axios } from "@pipedream/platform"
export default defineComponent({
props: {
groqcloud: {
type: "app",
app: "groqcloud",
}
},
async run({steps, $}) {
const data = {"messages": [{"role": "user", "content": "What is Pipedream?"}], "model": "llama3-8b-8192"}
return await axios($, {
method: "post",
url: `https://api.groq.com/openai/v1/chat/completions`,
headers: {
Authorization: `Bearer ${this.groqcloud.$auth.api_key}`,
},
data,
})
},
})
The Llama AI API provides powerful machine learning capabilities, enabling users to harness advanced AI for image recognition, natural language processing, and predictive modeling. By leveraging this API on Pipedream, you can automate complex workflows that require AI-driven insights, enhancing data analysis and decision-making processes across various business applications.
import { axios } from "@pipedream/platform"
export default defineComponent({
props: {
llama_ai: {
type: "app",
app: "llama_ai",
}
},
async run({steps, $}) {
const data = {
"messages": [
{"role": "user", "content": "What is the weather like in Boston?"},
],
"functions": [
{
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"days": {
"type": "number",
"description": "for how many days ahead you wants the forecast",
},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
},
},
"required": ["location", "days"],
}
],
"stream": "false",
"function_call": "get_current_weather",
}
return await axios($, {
method: "post",
url: `https://api.llama-api.com/chat/completions`,
headers: {
Authorization: `Bearer ${this.llama_ai.$auth.api_token}`,
},
data,
})
},
})