with OpenRouter and LlamaIndex?
Returns a list of models available through the API. See the documentation
Send a chat completion request to a selected model. See the documentation
Send a completion request to a selected model (text-only format) See the documentation
import { axios } from "@pipedream/platform"
export default defineComponent({
props: {
openrouter: {
type: "app",
app: "openrouter",
}
},
async run({steps, $}) {
return await axios($, {
url: `https://openrouter.ai/api/v1/parameters/openai/o1`,
headers: {
Authorization: `Bearer ${this.openrouter.$auth.api_key}`,
},
params: {
provider: `OpenAI`,
},
})
},
})
import { axios } from "@pipedream/platform";
import FormData from "form-data";
import request from "request";
export default defineComponent({
props: {
llamaindex: {
type: "app",
app: "llamaindex",
}
},
async run({steps, $}) {
const data = new FormData();
data.append("file", request("https://www.learningcontainer.com/wp-content/uploads/2019/09/sample-pdf-file.pdf"));
return await axios($, {
method: "POST",
url: `${this.llamaindex.$auth.url}/api/parsing/upload`,
headers: {
"Content-Type": `multipart/form-data`,
Authorization: `Bearer ${this.llamaindex.$auth.api_key}`,
},
data,
})
},
})