const uploadEvents = async (events) => { const outStream = zlib.createGzip(); events.forEach(e => { outStream.write(`${JSON.stringify(humps.decamelizeKeys(e))}\n`, 'utf8'); }); outStream.end(); const date = new Date().toISOString(); const partitionPrefix = date.substring(0, 13); const fileName = `dt=${partitionPrefix}/${date}.json.gz`; const params = { Bucket: process.env.HN_INSIGHTS_EVENTS_BUCKET || 'hn-insights-events', Key: fileName, Body: outStream }; console.log(`Uploading ${fileName}: ${events.length} events...`); await s3.upload(params).promise(); console.log(`Uploading ${fileName} done`); }
// end redirect // Convenience method for retrieving a signed link to an S3 bucket object async getLink(path,expires,callback) { let params = UTILS.parseS3(path) // Default Expires params.Expires = !isNaN(expires) ? parseInt(expires) : 900 // Default callback let fn = typeof expires === 'function' ? expires : typeof callback === 'function' ? callback : e => { if (e) this.error(e) } // getSignedUrl doesn't support .promise() return await new Promise(r => S3.getSignedUrl('getObject',params, async (e,url) => { if (e) { // Execute callback with caught error await fn(e) this.error(e) // Throw error if not done in callback } r(url) // return the url })) }
let data = await S3.getObject(params).promise()
s3.upload({ Bucket: u.host, Key: u.path.slice(1) + '/' + p + suffix, Body: wc.outputStream }, err => { if (err) log('S3 upload error', err); done(); }).promise()
s3.upload( { Bucket: u.host, Key: u.path.slice(1) + '/' + p + suffix, Body: readStream }, err => { if (err) log('S3 upload error', err); done(); } );
const upload = audioStream => s3 .upload({ ACL: "public-read", ContentType: "audio/mp3", Bucket: process.env.TRANSCRIPTS_BUCKET, Key: uuid(), Body: audioStream, StorageClass: "REDUCED_REDUNDANCY" }) .promise() .then(data => data.Location)
function checkS3(key) { return new Promise((resolve, reject) => { s3.headObject({ Bucket: process.env.BUCKET, Key: key }, (err, metadata) => { if (err && ['NotFound', 'Forbidden'].indexOf(err.code) > -1) return resolve(); else if (err) { const e = Object.assign({}, Errors.SOMETHING_WRONG, { err }); return reject(e); } return resolve(metadata); }); }); }
// action s3.listObjects(params, function(err,data) { if(err) { console.log(err,err.stack); } else { console.log(data); } });
const getFileObjectsFromS3 = (bucketName, fileList) => fileList.Contents.map(file => { const getParams = { Bucket: bucketName, Key: file.Key }; return s3.getObject(getParams).promise(); })
/* * Initialize an S3 bucket. */ async function initBucket () { const s3 = new AWS.S3() try { await s3.headBucket({ Bucket: config.AMAZON.ATTACHMENT_S3_BUCKET }).promise() } catch (err) { await s3.createBucket({ Bucket: config.AMAZON.ATTACHMENT_S3_BUCKET }).promise() } }
async getTotal() { const objects = await s3 .listObjects({ Bucket: bucket, Prefix: id }) .promise(); return Number(objects.Contents.length); }
/** * Download file from S3 * @param {String} attachmentId the attachment id * @return {Promise} promise resolved to downloaded data */ async function downloadFromS3 (attachmentId) { const file = await s3.getObject({ Bucket: config.AMAZON.ATTACHMENT_S3_BUCKET, Key: attachmentId }).promise() return { data: file.Body, mimetype: file.ContentType } }
const multipartUpload = ({ content, filename }) => { const newName = rename(filename); const promise = S3.upload( { Bucket: STORAGE_BUCKET, Key: BASE_PATH + newName, Body: content }, options ).promise(); return promise; }
const putObject = (myBucket, key, body, contentType) => s3.putObject({ Bucket: myBucket, Key: key, Body: body, ContentType: contentType }).promise()
async getTotal() { const objects = await s3 .listObjects({ Bucket: bucket, Prefix: id }) .promise(); return Number(objects.Contents.length); }