I need to have some generated S3 bucket with all avavilable permissions - READ and WRITE are mandatatory.
I tried so far to make it using this code:
var createBucketParams = {Bucket: bucketName, ACL: 'public-read-write', GrantFullControl:'FULL_CONTROL'};
S3.createBucket(createBucketParams, function(err, data) {
if (err) {
console.log("Error while calling createBucket() - Error: " + err);
} else {
console.log("Successfully Bucket created.");
}
});
My problem is that the permissions are not working well, i'm getting AccessDenied while trying to getObject(), any ideas how to solve it ?
EDIT: This is my current code with all the latest changes:
before(() => {
console.log("Mocking AWS.DynamoDB.DocumentClient API");
AWSMock.mock('DynamoDB.DocumentClient', 'put', function(params, callback) {
callback(null, "Mock: successfully put object in DynamoDB");
});
console.log("Mocking AWS.S3 API");
AWSMock.mock('S3', 'createBucket', function (params, callback){
callback(null, "successfully bucket created in S3");
});
AWSMock.mock('S3', 'putObject', function (params, callback){
callback(null, "successfully put item in S3");
});
AWSMock.mock('S3', 'getObject', function (params, callback){
callback(null, "successfully get item in S3");
});
AWSMock.mock('S3', 'putBucketPolicy', function (params, callback){
callback(null, "successfully putBucketPolicy in S3");
});
});
it('Writing a file to S3 with user metadata - when data is valid JSON and updating the DB is Succeed',
function(done) {
var bucketName = 'my.unique.bucket.name';
var fileName = 'fileName.csv';
var s3Policy = {
"Version":"2012-10-17",
"Id":"http referer policy example",
"Statement":[
{
"Sid":"Allow get requests originating from www.example.com and example.com.",
"Effect": "Allow",
"Principal": "*",
"Action": ["s3:GetObject,s3:PutObject"],
"Resource": "arn:aws:s3:::" + bucketName + "/*",
"Condition": {
"IpAddress": {"aws:SourceIp": "127.0.0.1"},
"NotIpAddress": {"aws:SourceIp": "127.0.0.1"}
}
}
]
};
var S3 = new AWS.S3();
var createBucketParams = {Bucket: bucketName, ACL: "FULL_CONTROL", Region: "us-west-2"};
S3.createBucket(createBucketParams, function(err, data) {
if (err) {
console.log("Error while calling createBucket() - Error: " + err);
} else {
console.log("Successfully Bucket created.");
}
});
var putBucketPolicyParams = {
Bucket: bucketName,
Policy: JSON.stringify(s3Policy)
};
S3.putBucketPolicy(putBucketPolicyParams, function(err, data) {
if (err) console.log(err, err.stack);
else console.log(data);
});
var putObjectParams = {Bucket: bucketName,
Key: fileName,
Body: 'Hello!',
Metadata: {startDate: "2016-12-12T12:34:56.000Z", endDate:"2016-12-31T12:34:56.000Z",
userName:"someUser",
originalFileName:"fileName.csv"}};
S3.putObject(putObjectParams, function(err, data) {
if (err) {
console.log(err, err.stack)
} else {
console.log("Successfully put a file to bucket");
}
});
LambdaTester(myHandler)
.event(JSON.parse(JSON.stringify(require('./testcases/single_record_with_user_metadata.json'))))
.expectSucceed(function(result) {
expect(result.valid).to.be.true;
})
.verify(done);
});
And the usage in the js file:
S3.getObject(s3FileParams, function(err, data) {
if (err) {
var message = "Error while trying to get file object " + fullFileName + " from bucket " + bucketName + ". Make sure they exist and your bucket is in the same region as this function. Error: " + err;
console.error(message);
// console.log(err, err.stack);
console.log(JSON.stringify(err, null, 2));
} else {
userMetaDataJson = JSON.parse(JSON.stringify(data.Metadata));
}
resolve();
})
Aucun commentaire:
Enregistrer un commentaire