const uploadEvents = async (events) => { const outStream = zlib.createGzip(); events.forEach(e => { outStream.write(`${JSON.stringify(humps.decamelizeKeys(e))}\n`, 'utf8'); }); outStream.end(); const date = new Date().toISOString(); const partitionPrefix = date.substring(0, 13); const fileName = `dt=${partitionPrefix}/${date}.json.gz`; const params = { Bucket: process.env.HN_INSIGHTS_EVENTS_BUCKET || 'hn-insights-events', Key: fileName, Body: outStream }; console.log(`Uploading ${fileName}: ${events.length} events...`); await s3.upload(params).promise(); console.log(`Uploading ${fileName} done`); }
s3.upload({ Bucket: u.host, Key: u.path.slice(1) + '/' + p + suffix, Body: wc.outputStream }, err => { if (err) log('S3 upload error', err); done(); }).promise()
s3.upload( { Bucket: u.host, Key: u.path.slice(1) + '/' + p + suffix, Body: readStream }, err => { if (err) log('S3 upload error', err); done(); } );
const upload = audioStream => s3 .upload({ ACL: "public-read", ContentType: "audio/mp3", Bucket: process.env.TRANSCRIPTS_BUCKET, Key: uuid(), Body: audioStream, StorageClass: "REDUCED_REDUNDANCY" }) .promise() .then(data => data.Location)
const s3Upload = async ({ createReadStream, uniqueFilename, mimetype }) => { const s3 = new S3({ region, endpoint }) const s3Location = `original/${uniqueFilename}` const params = { Bucket, Key: s3Location, ACL: 'public-read', ContentType: mimetype, Body: createReadStream(), } const data = await s3.upload(params).promise() const { Location } = data return Location }
(async function uploadFile() { const { bucket, pathname } = program; const filePath = !path.isAbsolute(pathname) ? path.resolve(pathname) : pathname; const fileKey = path.basename(filePath); const fileContent = fs.readFileSync(program.pathname); const s3 = new S3({ apiVersion: "latest" }); await s3 .upload({ Bucket: bucket, Key: fileKey, Body: fileContent, }) .promise(); })();
s3.upload({Key: key, Body: body}, function(err, data) { if(err) { //console.log("failed to upload: ", err); reject(err); } else { // console.log(data); if(clean) { fs.unlink(srcPath); } resolve(data); } });
/** * Upload the data as a readable stream to. * * @param {stream.Readable} src - The source stream. * @param {string} filePath - The file path to upload. * @param {Object} [options={}] - The upload options. * @returns {Promise} */ upload(src, filePath, options = {}) { const params = Object.assign({ 'Bucket': this.bucket, 'Key': filePath, 'Body': src }, options); debug(`[storage-s3:upload] Bucket=${params['Bucket']} Key=${params['Key']}`); return this.client.upload(params).promise(); }
put(user, repo, oid, stream) { var self = this; return new Promise(function(resolve, reject) { let storageClass = self._options.storage_class || 'STANDARD'; let params = { Bucket: self._options.bucket, Key: S3Store._getKey(user, repo, oid), Body: stream, StorageClass: storageClass }; self._s3.upload(params, function(err, data) { if (err) { return reject(err); } resolve(data); }); }); }
const multipartUpload = ({ content, filename }) => { const newName = rename(filename); const promise = S3.upload( { Bucket: STORAGE_BUCKET, Key: BASE_PATH + newName, Body: content }, options ).promise(); return promise; }
function uploadRSSFeedToS3(xml){ var uploadParams = { Bucket: 'media.downtowncornerstone.org', Key: 'DCCBandRef.xml', Body: xml, ContentType: 'application/rss+xml'}; s3.upload(uploadParams, function(err, data) { if (err) { console.log("Error uploading data: ", err); } else { console.log("Successfully updated feed at " + uploadParams.Bucket + '/' + uploadParams.Key); } }); }
zlib.gzip(json, function (error, result) { if (error) throw error; cont++; var fname = filename + '.json'; var file = cont + '_' + fname + '.zip'; s3Array.push(file); var params = { Bucket: config.bucketUpload, Key: "sqlupload/" + file, Body: result, ContentType: 'application/zip' }; var options = { partSize: 10 * 1024 * 1024, queueSize: 1 }; s3.upload(params, options, function (err, data) { if (data) { //console.log("Uploaded File", file); done(); } }); });
async function uploadS3(stream, config, key) { const s3 = new AWS.S3({ region: config.S3_REGION }) const result = await s3.upload({ Key: key, Bucket: config.S3_BUCKET, Body: stream }).promise() console.log('Uploaded to', result.Location) return result.Location }
/** * Upload file to S3 * @param {String} attachmentId the attachment id * @param {Buffer} data the file data * @param {String} mimetype the MIME type * @param {String} fileName the original file name * @return {Promise} promise to upload file to S3 */ async function uploadToS3 (attachmentId, data, mimetype, fileName) { const params = { Bucket: config.AMAZON.ATTACHMENT_S3_BUCKET, Key: attachmentId, Body: data, ContentType: mimetype, Metadata: { fileName } } // Upload to S3 return s3.upload(params).promise() }
s3.upload({Key: key, Body: body}, function(err, data) { if(err) { reject(err); } else { fs.unlink(path); resolve(data); } });