I am getting an error which says "The request signature we calculated does not match the signature you provided. Check your key and signing method." when I execute await s3Upload.promise() in the end. Any help will be highly appreciated.
My code is below
var aws = require("aws-sdk");
const s3 = new aws.S3();
aws.config.update({
accessKeyId: 'my-access-key',
secretAccessKey: 'my-secret'
});
const _archiver = require('archiver');
var stream = require('stream');
const streamPassThrough = new stream.PassThrough();
var params = {
ACL: 'private',
Body: streamPassThrough,
Bucket: bucketName,
ContentType: 'application/zip',
Key: zipFileName
};
//This returns us a stream.. consider it as a real pipe sending fluid to S3 bucket.. Don't forget it
const s3Upload = s3.upload(params,
(err, resp) => {
if (err) {
console.error('Got error creating stream to s3 ${ err.name } ${ err.message } ${ err.stack }');
throw err;
}
console.log(resp);
});
var _s3DownloadStreams = await Promise.all(_keys.map(_key => new Promise((_resolve, _reject) => {
s3.getObject({ Bucket: bucketName, Key: _key }).promise()
.then(_data => _resolve(
{ data: _data.Body, name: `${_key.split('/').pop()}` })
);
}
))).catch(_err => { throw new Error(_err) } );
await new Promise((_resolve, _reject) => {
// var _myStream = s3Upload(bucketName, zipFileName); //Now we instantiate that pipe...
var _archive = _archiver('zip');
_archive.on('error', err => { throw new Error(err); });
//Your promise gets resolved when the fluid stops running... so that's when you get to close and resolve
s3Upload.on('close', _resolve());
s3Upload.on('end', _resolve());
s3Upload.on('error', _reject());
_archive.pipe(streamPassThrough); //Pass that pipe to _archive so it can push the fluid straigh down to S3 bucket
_s3DownloadStreams.forEach(_itm => _archive.append(_itm.data, { name: _itm.name })); //And then we start adding files to it
_archive.finalize(); //Tell is, that's all we want to add. Then when it finishes, the promise will resolve in one of those events up there
}).catch(_err => {
throw new Error(_err)
});
console.log(params);
await s3Upload.promise().catch(_err => {
console.log('Error Is : ' + _err)
});
//_cb(null, {}); //Handle response back to server
I've just finished implementing with the help of Samet <3, thank you both!
I had to change:
s3Upload.on('close', resolve);
s3Upload.on('end', resolve);
s3Upload.on('error', reject);
to:
s3Upload.on('close', resolve());
s3Upload.on('end', resolve());
s3Upload.on('error', reject());
if not the promise doesn't resolves and the code after:
await s3Upload.promise();
doesn't execute
For further actions, you may consider blocking this person and/or reporting abuse
We're a place where coders share, stay up-to-date and grow their careers.
Thanks for the post!
Here are the typos.
Now for our array of
keys/keys, we can iterateofter/after it to create the S3StreamDetails objectsNow we can connect the archiver to pipe
date/data to the upload stream and append all the download streams to itWith
s3StreamUpload
variable, you means3Upload
?Hi Samet,
I am getting an error which says "The request signature we calculated does not match the signature you provided. Check your key and signing method." when I execute await s3Upload.promise() in the end. Any help will be highly appreciated.
My code is below
var aws = require("aws-sdk");
const s3 = new aws.S3();
aws.config.update({
accessKeyId: 'my-access-key',
secretAccessKey: 'my-secret'
});
const _archiver = require('archiver');
var stream = require('stream');
const bucketName = 'myBucket';
const zipFileName = 'zipper.zip';
const streamPassThrough = new stream.PassThrough();
var params = {
ACL: 'private',
Body: streamPassThrough,
Bucket: bucketName,
ContentType: 'application/zip',
Key: zipFileName
};
//This returns us a stream.. consider it as a real pipe sending fluid to S3 bucket.. Don't forget it
const s3Upload = s3.upload(params,
(err, resp) => {
if (err) {
console.error('Got error creating stream to s3 ${ err.name } ${ err.message } ${ err.stack }');
throw err;
}
console.log(resp);
});
exports.handler = async (_req, _ctx, _cb) => {
var _keys = ['PDF/00CRO030.pdf', 'PDF/MM07200231.pdf'];
};
Did you double check your accessKeyId and secretAccessKey? There might be a space before or after your keys (access key or secret key).
Hi Samet, yes I checked and the credentials are correct. However, I am able to download the bytes array from Keys array using s3.getObject.
Is there something which I am missing?
Could you compare your implementation with this?
github.com/rokumatsumoto/aws-node-...
Also please share your implementation with gist link (gist.github.com/)
The difference is this one I posted here dev.to/prosonf/comment/18d6d, the handlers on close, end and error.
Many thanks. Post updated!
s3StreamUpload
should be replaced bystreamPassThrough
.I've just finished implementing with the help of Samet <3, thank you both!
I had to change:
s3Upload.on('close', resolve);
s3Upload.on('end', resolve);
s3Upload.on('error', reject);
to:
s3Upload.on('close', resolve());
s3Upload.on('end', resolve());
s3Upload.on('error', reject());
if not the promise doesn't resolves and the code after:
await s3Upload.promise();
doesn't execute