diff --git a/.pydevproject b/.pydevproject
new file mode 100644
index 0000000..40e9f40
--- /dev/null
+++ b/.pydevproject
@@ -0,0 +1,5 @@
+
+
+Default
+python 2.7
+
diff --git a/README.md b/README.md
index 6bed371..f29fabc 100644
--- a/README.md
+++ b/README.md
@@ -347,9 +347,24 @@ that files are loaded every N minutes, use the following process to force period
When you create the configuration, add a filenameFilterRegex such as '.*\.csv', which
only loads CSV files that are put into the specified S3 prefix. Then every N minutes,
-schedule the included dummy file generator through a CRON Job.
+schedule one of the included trigger file generators to run:
-```./path/to/function/dir/generate-dummy-file.py ```
+### Using Scheduled Lambda Functions
+
+You can use an included Lambda function to generate trigger files into all configured prefixes that have a regular expression filter, by completing the following:
+
+* Create a new AWS Lambda Function, and deploy the same zip file from the `dist` folder as you did for the AWS Lambda Redshift Loader. However, when you configure the Handler name, use `createS3TriggerFile.handler`, and configure it with the timeout and RAM required.
+* In the AWS Web Console, select Services/CloudWatch, and in the left hand navigation select 'Events/Rules'
+* Choose Event Source = 'Schedule' and specify the interval for your trigger files to be gnerated
+* Add Target to be the Lambda function you previously configured
+
+Once done, you will see CloudWatch Logs being created on the configured schedule, and trigger files arriving in the specified prefixes
+
+### Through a CRON Job
+
+You can use a Python based script to generate trigger files to specific input buckets and prefixes, using the following utility:
+
+```./path/to/function/dir/generate-trigger-file.py ```
* region - the region in which the input bucket for loads resides
* input bucket - the bucket which is configured as an input location
diff --git a/build.sh b/build.sh
index d406466..85d0904 100755
--- a/build.sh
+++ b/build.sh
@@ -2,4 +2,4 @@
ver=`cat package.json | grep version | cut -d: -f2 | sed -e "s/\"//g" | sed -e "s/ //g" | sed -e "s/\,//g"`
-zip -r AWSLambdaRedshiftLoader-$ver.zip index.js common.js constants.js kmsCrypto.js upgrades.js *.txt package.json node_modules/ && mv AWSLambdaRedshiftLoader-$ver.zip dist
+zip -r AWSLambdaRedshiftLoader-$ver.zip index.js common.js createS3TriggerFile.js constants.js kmsCrypto.js upgrades.js *.txt package.json node_modules/ && mv AWSLambdaRedshiftLoader-$ver.zip dist
diff --git a/createS3TriggerFile.js b/createS3TriggerFile.js
index 427b2da..0682a14 100644
--- a/createS3TriggerFile.js
+++ b/createS3TriggerFile.js
@@ -29,7 +29,7 @@ exports.handler = function(event, context) {
} else {
if (!data.Items) {
console.log("Looks like you don't have any configured Prefix entries!");
- context.success();
+ context.succeed();
} else {
// create a trigger file entry for each prefix
async.each(data.Items, function(configItem, callback) {
@@ -42,13 +42,15 @@ exports.handler = function(event, context) {
var fileKey = configItem.s3Prefix.S.replace(bucketName + "\/", "");
// create a trigger file on S3
- createTriggerFile(bucketName, fileKey, callback);
+ exports.createTriggerFile(bucketName, fileKey, callback);
+ } else {
+ callback();
}
}, function(err) {
if (err) {
context.fail(err);
} else {
- context.success();
+ context.succeed();
}
});
}
@@ -57,7 +59,7 @@ exports.handler = function(event, context) {
/** function which will create a trigger file in the specified path */
exports.createTriggerFile = function(bucketName, fileKey, callback) {
- var prefix = fileKey + "/lambda-redshift-trigger-file.trigger";
+ var prefix = fileKey + "/lambda-redshift-trigger-file.dummy";
var createParams = {
Bucket : bucketName,
diff --git a/dist/AWSLambdaRedshiftLoader-2.2.0.zip b/dist/AWSLambdaRedshiftLoader-2.2.0.zip
deleted file mode 100644
index 81c18d9..0000000
Binary files a/dist/AWSLambdaRedshiftLoader-2.2.0.zip and /dev/null differ
diff --git a/dist/AWSLambdaRedshiftLoader-2.1.0.zip b/dist/AWSLambdaRedshiftLoader-2.2.1.zip
similarity index 51%
rename from dist/AWSLambdaRedshiftLoader-2.1.0.zip
rename to dist/AWSLambdaRedshiftLoader-2.2.1.zip
index 638814b..e4cfdb1 100644
Binary files a/dist/AWSLambdaRedshiftLoader-2.1.0.zip and b/dist/AWSLambdaRedshiftLoader-2.2.1.zip differ
diff --git a/index.js b/index.js
index aff1231..3b13b8a 100644
--- a/index.js
+++ b/index.js
@@ -560,7 +560,7 @@ exports.handler = function(event, context) {
doProcessBatch = true;
}
- if (config.batchTimeoutSecs && config.batchTimeoutSecs.N) {
+ if (config.batchTimeoutSecs && config.batchTimeoutSecs.N && config.batchSize.N > 0) {
if (common.now() - lastUpdateTime > parseInt(config.batchTimeoutSecs.N) && pendingEntries.length > 0) {
console.log("Batch Size " + config.batchSize.N + " not reached but reached Age " + config.batchTimeoutSecs.N + " seconds");
doProcessBatch = true;
diff --git a/package.json b/package.json
index 63d986f..7d31ce1 100644
--- a/package.json
+++ b/package.json
@@ -1,16 +1,16 @@
{
"name": "aws-lambda-redshift-loader",
"description": "An Amazon Redshift Database Loader written for AWS Lambda",
- "version": "2.2.0",
+ "version": "2.2.1",
"homepage": "http://github.com/awslabs/aws-lambda-redshift-loader",
"bugs": {
"url": "http://github.com/awslabs/aws-lambda-redshift-loader/issues",
"email": "meyersi@amazon.com"
},
"dependencies": {
- "async": "0.9.0",
- "node-uuid": "1.4.2",
- "pg":"4.3.0"
+ "async": "1.5.2",
+ "node-uuid": "1.4.7",
+ "pg":"4.4.3"
},
"keywords": [
"amazon",
@@ -28,6 +28,7 @@
"addAdditionalClusterEndpoint.js",
"common.js",
"constants.js",
+ "createS3TriggerFile.js",
"describeBatch.js",
"encryptValue.js",
"generate-trigger-file.py",
@@ -48,5 +49,4 @@
"type": "git",
"url": "http://github.com/awslabs/aws-lambda-redshift-loader"
}
-}
-
\ No newline at end of file
+}
\ No newline at end of file