I had a similar issue where I had to write errors to a log file in S3 during a long-running proces (couple of hours). So I didn't had a file locally to create a one-time stream, but I had to append the errors to a file on runtime.
So what you can do is keeping an open connection with a specific file and write to the file when you want:
const { S3 } = require('aws-sdk')
const { PassThrough } = require('stream')
// append to open connection
const append = (stream, data ) => new Promise(resolve => {
stream.write(`${data}\n`, resolve)
})
const openConnectionWithS3 = async () => {
const s3 = new S3({
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
},
endpoint: process.env.AWS_S3_ENDPOINT,
region: process.env.AWS_DEFAULT_REGION,
})
const fileName = 'test.log'
const bucketName = 'my-bucket'
// create pass through stream. This stream we use to write data to
// but this stream we also use to pass the same data to aws
const pass = new PassThrough()
// dont resolve the promise, but keep it open and await for the result when the long running process is done
const promise = s3
.upload({
Bucket: bucketName,
Key: fileName,
// pass the stream as body, aws will handle the stream from now
Body: pass,
})
.promise()
// write data to our open connection.
// we can even write it on different places
for (let i = 0; i < 100000; i++) {
await append(pass, `foo${i}`)
}
// here we resolve the promise and close the connection
await Promise.all([
// push null to the stream, the stream now knows after the
// 1000 foo's it should stop writing
pass.push(null),
promise,
])
}
openConnectionWithS3()
It will append items to a file in S3, and resolves when it is done.