with Mistral AI and fal.ai?
Emit new event when a new batch job is completed. See the Documentation
Emit new event when a new batch job fails. See the Documentation
Emit new event when a new AI model is registered or becomes available. See the Documentation
Create a new batch job, it will be queued for processing. See the Documentation
Adds a request to the queue for asynchronous processing, including specifying a webhook URL for receiving updates. See the documentation
Cancels a request in the queue. This allows you to stop a long-running task if it's no longer needed. See the documentation
Download a batch job results file to the /tmp directory. See the Documentation
import { axios } from "@pipedream/platform"
export default defineComponent({
props: {
mistral_ai: {
type: "app",
app: "mistral_ai",
}
},
async run({steps, $}) {
return await axios($, {
url: `https://api.mistral.ai/v1/models`,
headers: {
Authorization: `Bearer ${this.mistral_ai.$auth.api_key}`,
"content-type": `application/json`,
},
})
},
})
import { fal } from "@fal-ai/client"
export default defineComponent({
props: {
fal_ai: {
type: "app",
app: "fal_ai",
}
},
async run({ steps, $ }) {
fal.config({
credentials: `${this.fal_ai.$auth.api_key}`,
});
const result = await fal.subscribe("fal-ai/lora", {
input: {
model_name: "stabilityai/stable-diffusion-xl-base-1.0",
prompt:
"Photo of a rhino dressed suit and tie sitting at a table in a bar with a bar stools, award winning photography, Elke vogelsang",
},
logs: true,
});
return result;
},
})