我正在使用dynamodb和nodejs,我有3000条记录,并且我在代码中编写了60多个段,每段扫描1mb数据并显示结果到60多个1MB的限制。因此,请提供解决方案,如何在单个步骤中获得3000条记录扫描,这意味着在一个分段中。请迅速提供解决方案,因为我在项目中间受到了攻击。请帮帮我。以下是我的代码。如何在dynamodb中超过1mb的扫描数据限制
var AWS = require("aws-sdk");
var async = require("async");
AWS.config.update({
region: "---",
endpoint: "-----------",
accessKeyId: "-----------------",
secretAccessKey:"----------"
});
var db = new AWS.DynamoDB.DocumentClient()
var table = "rets_property_all";
var pstart =new Date() .getTime();
async.parallel({
0 : function(callback){
db.scan ({TableName: table,
ProjectionExpression: "#cityname,ListingKey ",
FilterExpression: "#cityname = :v_id",
ExpressionAttributeNames: {
"#cityname": "CityName",
},
ExpressionAttributeValues: {":v_id" : 'BALTIMORE'},
TotalSegments: 63,
Segment: 0//by the worker who has been called
},function (err , res) {
callback (null , res.Items);
});
},
1 : function(callback){
db.scan ({TableName: table,
ProjectionExpression: "#cityname,ListingKey ",
FilterExpression: "#cityname = :v_id",
ExpressionAttributeNames: {
"#cityname": "CityName",
},
ExpressionAttributeValues: {":v_id" : 'BALTIMORE'},
TotalSegments: 63,
Segment: 1//by the worker who has been called
}, function (err , res) {
callback (null , res.Items);
});
},
2 : function(callback){
db.scan ({TableName: table,
ProjectionExpression: "#cityname,ListingKey ",
FilterExpression: "#cityname = :v_id",
ExpressionAttributeNames: {
"#cityname": "CityName",
},
ExpressionAttributeValues: {":v_id" : 'BALTIMORE'},
TotalSegments: 63,
Segment: 2//by the worker who has been called
}, function (err , res) {
callback (null , res.Items);
});
},
--------
---------
------
62 : function(callback){
db.scan ({TableName: table,
ProjectionExpression: "#cityname,ListingKey ",
FilterExpression: "#cityname = :v_id",
ExpressionAttributeNames: {
"#cityname": "CityName",
},
ExpressionAttributeValues: {":v_id" : 'BALTIMORE'},
TotalSegments: 63,
Segment: 62//by the worker who has been called
}, function (err , res) {
callback (null , res.Items);
});
},
},function(err,results){
if (err) {throw err; }
var pend = new Date() .getTime();
console.log (results);
})
我希望所有的3000条记录在一个扫描由于性能问题。它需要4.5秒。为了改善这一点,有任何解决方案。 – purushottam
如果上述代码有任何更正,请提供正确的代码。 – purushottam