auths
objectreturn
or this.key = 'value'
, pass input data to your code viaparams
, and maintain state across executions with$checkpoint.async
(event, steps) => {
}
this.TMP_DIR = "/tmp"
auths
objectreturn
or this.key = 'value'
, pass input data to your code viaparams
, and maintain state across executions with$checkpoint.async
(event, steps) => {
}
const fs = require("fs")
const { TMP_DIR } = steps.CONSTANTS
for (a of event.attachments) {
const fileData = Buffer.from(a.content_b64, 'base64')
// Save to disk using the same filename as the attachment
fs.writeFileSync(`${TMP_DIR}/${a.fileName}`, fileData)
console.log(`Writing ${a.fileName} to ${TMP_DIR}`)
}
const tmpFiles = fs.readdirSync(TMP_DIR)
console.log(`Saved ${tmpFiles.length} files`)
this.tmpFiles = tmpFiles
auths
objectreturn
or this.key = 'value'
, pass input data to your code viaparams
, and maintain state across executions with$checkpoint.async
(event, steps, params, auths) => {
}
// NOTE: we could have just sent the base64-encoded data to S3 in this
// example, but the workflow is meant to show how to save a file to disk
// and send it to an external service.
const fs = require("fs")
const AWS = require("aws-sdk")
const { TMP_DIR } = steps.CONSTANTS
const { bucket } = params
const { accessKeyId, secretAccessKey } = auths.aws
const s3 = new AWS.S3({
accessKeyId,
secretAccessKey,
})
this.S3Responses = []
for (file of fs.readdirSync(TMP_DIR)) {
const fileData = fs.readFileSync(`${TMP_DIR}/${file}`)
// Store at prefix named for subject + current epoch
const Key = `${event.subject.replace(/[/ ]/g, '_')}-${+new Date()}/${file}`
const uploadParams = { Bucket: bucket, Key, Body: fileData }
this.S3Responses.push(await s3.upload(uploadParams).promise())
console.log(`Uploaded ${file} to S3!`)
}
auths
objectreturn
or this.key = 'value'
, pass input data to your code viaparams
, and maintain state across executions with$checkpoint.async
(event, steps) => {
}
// The files stored on disk may persist in our VM for a few executions
// of our workflow. So we need to remove the files we just processed
// to make sure we don't process them again on the next run.
const fs = require("fs")
const { TMP_DIR } = steps.CONSTANTS
for (file of fs.readdirSync(TMP_DIR)) {
fs.unlinkSync(`${TMP_DIR}/${file}`)
console.log(`Deleted ${file}`)
}