auths
objectreturn
or this.key = 'value'
, pass input data to your code viaparams
, and maintain state across executions with$checkpoint.async
(event, steps) => {
}
this.states = {
"AL": "Alabama",
"AK": "Alaska",
"AZ": "Arizona",
"AR": "Arkansas",
"CA": "California",
"CO": "Colorado",
"CT": "Connecticut",
"DE": "Delaware",
"FL": "Florida",
"GA": "Georgia",
"HI": "Hawaii",
"ID": "Idaho",
"IL": "Illinois",
"IN": "Indiana",
"IA": "Iowa",
"KS": "Kansas",
"KY": "Kentucky",
"LA": "Louisiana",
"ME": "Maine",
"MD": "Maryland",
"MA": "Massachusetts",
"MI": "Michigan",
"MN": "Minnesota",
"MS": "Mississippi",
"MO": "Missouri",
"MT": "Montana",
"NE": "Nebraska",
"NV": "Nevada",
"NH": "New Hampshire",
"NJ": "New Jersey",
"NM": "New Mexico",
"NY": "New York",
"NC": "North Carolina",
"ND": "North Dakota",
"OH": "Ohio",
"OK": "Oklahoma",
"OR": "Oregon",
"PA": "Pennsylvania",
"RI": "Rhode Island",
"SC": "South Carolina",
"SD": "South Dakota",
"TN": "Tennessee",
"TX": "Texas",
"UT": "Utah",
"VT": "Vermont",
"VA": "Virginia",
"WA": "Washington",
"WV": "West Virginia",
"WI": "Wisconsin",
"WY": "Wyoming"
}
auths
objectreturn
or this.key = 'value'
, pass input data to your code viaparams
, and maintain state across executions with$checkpoint.async
(event, steps) => {
}
import fetch from 'node-fetch';
const NPS_KEY = process.env.NPS_KEY;
// Credit: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/random
function getRandomIntInclusive(min, max) {
min = Math.ceil(min);
max = Math.floor(max);
return Math.floor(Math.random() * (max - min + 1)) + min;
}
let stateAbbrs = Object.keys(steps.states.states);
let chosenState = stateAbbrs[getRandomIntInclusive(0, stateAbbrs.length-1)];
console.log(`I chose ${chosenState}, NPS ${NPS_KEY}`);
console.log(`https://developer.nps.gov/api/v1/parks?stateCode=${chosenState}&limit=100&fields=images&api_key=${NPS_KEY}`)
let httpResult = await fetch(`https://developer.nps.gov/api/v1/parks?stateCode=${chosenState}&limit=100&fields=images&api_key=${NPS_KEY}`,
{
headers: {
'User-Agent':'Node'
}
});
let results = await httpResult.json();
let data = results.data.filter(r => r.images.length);
if(!data) $end('Data was blank.');
// now select a random park
this.selectedPark = data[getRandomIntInclusive(0, data.length - 1)];
// now select a random image
this.selectedImage = this.selectedPark.images[getRandomIntInclusive(0, this.selectedPark.images.length - 1)];
this.tootText = `Picture from ${this.selectedPark.fullName}. More information at ${this.selectedPark.url}`;
//console.log(JSON.stringify(selectedPark));
URL of media to upload to Twitter
The MIME type of the media being uploaded. See https://developer.twitter.com/en/docs/media/upload-media/api-reference/post-media-upload-init
async
(params, auths) => {
}
const axios = require("axios")
const chunk = require("lodash.chunk")
// Data we need to make the Twitter API request
const oauthSignerUri = auths.twitter.oauth_signer_uri
const token = {
key: auths.twitter.oauth_access_token,
secret: auths.twitter.oauth_refresh_token,
}
const signConfig = {
token,
oauthSignerUri
}
// Download image, base64 encode it, then upload the media to Twitter
const imageResponse = await axios({
url: params.url,
method: "GET",
responseType: "arraybuffer"
})
const file = Buffer.from(imageResponse.data, 'binary')
const total_bytes = file.length
const base64EncodedFile = file.toString('base64')
const { media_type } = params
// First, tell Twitter the type of file you're uploading, how big it is, etc.
// https://developer.twitter.com/en/docs/media/upload-media/api-reference/post-media-upload-init
const mediaUploadInitRequest = {
method: 'POST',
data: '',
url: "https://upload.twitter.com/1.1/media/upload.json",
params: {
command: "INIT",
total_bytes,
media_type,
}
}
// Twitter returns a media_id_string that we use to upload the file,
// and to reference it in future steps
this.uploadMediaInitResponse = (await require("@pipedreamhq/platform").axios(this, mediaUploadInitRequest, signConfig))
this.mediaIdString = this.uploadMediaInitResponse.media_id_string
// Split the file into chunks, APPEND each chunk
const splitStringRe = new RegExp('.{1,' + 10000 + '}', 'g');
const chunks = base64EncodedFile.match(splitStringRe);
// Collect all of our requests into an array
const requests = []
for (const [segment_index, media_data] of chunks.entries()) {
// APPEND file content in chunks
// See https://developer.twitter.com/en/docs/media/upload-media/api-reference/post-media-upload-append
const mediaUploadAppendRequest = {
method: 'POST',
data: '',
url: "https://upload.twitter.com/1.1/media/upload.json",
params: {
command: "APPEND",
media_id: this.mediaIdString,
segment_index,
media_data,
}
}
requests.push(require("@pipedreamhq/platform").axios(this, mediaUploadAppendRequest, signConfig))
}
// Send N requests at a time
const requestChunks = chunk(requests, 10)
for (const [i, chunk] of requestChunks.entries()) {
console.log(`Processing chunk ${i}`)
await Promise.all(chunk)
}
// Finally, tell Twitter we're done uploading
// https://developer.twitter.com/en/docs/media/upload-media/api-reference/post-media-upload-finalize
const mediaUploadFinalizeRequest = {
method: 'POST',
data: '',
url: "https://upload.twitter.com/1.1/media/upload.json",
params: {
command: "FINALIZE",
media_id: this.mediaIdString,
}
}
await require("@pipedreamhq/platform").axios(this, mediaUploadFinalizeRequest, signConfig)
async
(params, auths) => {
}
const axios = require('axios')
const {status, in_reply_to_status_id, auto_populate_reply_metadata, exclude_reply_user_ids, attachment_url, media_ids, possibly_sensitive, lat, long, place_id, display_coordinates, trim_user, enable_dmcommands, fail_dmcommands, card_uri} = params
const body = {
config: {
url: `https://api.twitter.com/1.1/statuses/update.json`,
method: 'POST',
params,
},
token: {
key: auths.twitter.oauth_access_token,
secret: auths.twitter.oauth_refresh_token,
},
}
const proxy = "https://api.pipedream.com/v1/oauth1/app_13GhY1"
const resp = await axios.post(proxy,body)
const {messages, data} = resp.data
for (const message of messages) {
console.log(message)
}
return data