const SUMMARIZE_LENGTH = {
SHORT: {
label: "Short",
value: "short",
},
MEDIUM: {
label: "Medium",
value: "medium",
},
LONG: {
label: "Long",
value: "long",
},
}
export default {
name: "Summarize",
version: "0.0.1",
key: "openai-passthrough-summarize",
description:
"Summarizes text using the OpenAI API. [See the documentation](https://platform.openai.com/docs/api-reference/chat)",
type: "action",
props: {
app: {
type: "app",
app: "openai_passthrough",
},
modelId: {
label: "Model ID",
type: "string",
options: ["gpt-4.1", "o4-mini", "o3", "gpt-3.5-turbo"],
},
text: {
label: "Text",
type: "string",
description: "The text to summarize.",
},
summaryLength: {
label: "Summary Length",
type: "string",
description: "The desired length of the summary.",
options: Object.values(SUMMARIZE_LENGTH),
default: SUMMARIZE_LENGTH.MEDIUM.value,
optional: true,
},
focusArea: {
label: "Focus Area",
type: "string",
description: "Specific aspects of the text to focus on in the summary.",
optional: true,
},
temperature: {
type: "string",
label: "Temperature",
description:
"Controls randomness: 0 is deterministic, higher values (up to 2) make output more random",
default: "1",
optional: true,
},
maxTokens: {
type: "integer",
label: "Maximum Length",
description: "The maximum number of tokens to generate in the summary",
optional: true,
},
topP: {
type: "string",
label: "Top P",
description:
"Alternative to temperature, nucleus sampling considers the results of the tokens with top_p probability mass",
default: "1",
optional: true,
},
frequencyPenalty: {
type: "string",
label: "Frequency Penalty",
description:
"Decreases the likelihood of repeating the same line verbatim",
default: "0",
optional: true,
},
presencePenalty: {
type: "string",
label: "Presence Penalty",
description: "Increases the likelihood of talking about new topics",
default: "0",
optional: true,
},
messages: {
label: "Prior Message History",
type: "string[]",
description:
"_Advanced_. Because [the models have no memory of past requests](https://platform.openai.com/docs/guides/chat/introduction), all relevant information must be supplied via the conversation. You can provide [an array of messages](https://platform.openai.com/docs/guides/chat/introduction) from prior conversations here. If this param is set, the action ignores the values passed to **System Instructions**, appends the new **Text** to summarize to the end of this array, and sends it to the API.",
optional: true,
},
},
async run({ $ }) {
const messages = []
let systemInstructions = "You are a skilled summarizer. Create a"
switch (this.summaryLength) {
case SUMMARIZE_LENGTH.SHORT.value:
systemInstructions += " brief and concise"
break
case SUMMARIZE_LENGTH.LONG.value:
systemInstructions += " detailed and thorough"
break
default:
systemInstructions += " comprehensive but focused"
}
systemInstructions += " summary of the following content."
if (this.focusArea) {
systemInstructions += ` Focus particularly on aspects related to: ${this.focusArea}.`
}
if (this.messages && this.messages.length > 0) {
messages.push(...this.messages)
} else {
messages.push({
role: "system",
content: systemInstructions,
})
}
messages.push({
role: "user",
content: this.text,
})
const params = {
model: this.modelId,
messages: messages,
}
if (this.temperature !== undefined) params.temperature = +this.temperature
if (this.maxTokens !== undefined) params.max_tokens = this.maxTokens
if (this.topP !== undefined) params.top_p = +this.topP
if (this.frequencyPenalty !== undefined)
params.frequency_penalty = +this.frequencyPenalty
if (this.presencePenalty !== undefined)
params.presence_penalty = +this.presencePenalty
const response = await $.services.openai.completions.create(params)
const summary = response.choices[0].message.content
$.export("$summary", "Generated summary from OpenAI")
return {
summary,
raw_response: response,
}
},
}