Proposed solution for pagination in nodejs Client

I was trying to solve the problem of Pagination in Aerospike , my query looked like ,"Get the next 10 products after the given timestamp value ". So I looked into various discussions and asked questions , I came up with this solution.

This is my Nodejs Code -

` var query = client.query(aerospikeDBParams.dbName, aerospikeDBParams.productTable)
  var total = 0;
  if (params.timestamp == 0) {
    params.timestamp = (new Date).getTime();
  }
  params.timestamp = parseInt(params.timestamp)
  query.where(aerospike.filter.range('timestamp', 0, params.timestamp))
  query.setUdf('test', 'order_by', [params])
  var stream = query.foreach()
  stream.on('error', (err) => {
    throw err;
    console.log('startsWith: ', err);
  })
  stream.on('data', (data) => {
    total = total + 1;
    if (data.length == 1)
      results = [];
    else
      results = data;

  })
  stream.on('end', () => {

    res.json({ status: "success", records: results });

  })`

I have created a secondary Index on timestamp and here is my Test.lua file, this code is been taken from the solution to order_by and limit problem:

   function order_by(stream, arguments)
 local function map_record(rec, fields)
-- Could add other record bins here as well.
-- This code shows different data access to record bins
local result = map()

if fields ~= nil then -- selected fields
for v in list.iterator(fields) do
  result[v] = rec[v]
end
end

if fields == nil then -- all fields
local names = record.bin_names(rec)
for i, v in ipairs(names) do
  result[v] = rec[v]
end
end
result["meta_data"] = map()
result["meta_data"]["digest"] = record.digest(rec)
result["meta_data"]["generation"] = record.gen(rec)
result["meta_data"]["set_name"] = record.setname(rec)
result["meta_data"]["expiry"] = record.ttl(rec)
return result
 local function list_truncate(l, limit)
if list.size(l) > limit then
  info("list.size[%d] > limit[%d]. Trucate it.", list.size(l), limit)
  list.trim(l, limit + 1)
   end
 end
 -- insert a rec into a sorted list, return the insertion index for merge sort
 local function insert_sort(sorted_list, rec_map, sort_key, order, start_index)
local v = rec_map[sort_key]
debug("sort_key: %s, order: %s, value: %s", sort_key, order, v)
if v == nil then
  return 0
end

len = list.size(sorted_list)
for i = start_index or 1, len do
  v2 = sorted_list[i][sort_key]
  if compare(v, v2, order) then
    list.insert(sorted_list, i, rec_map)
    return i
  end
end

list.append(sorted_list, rec_map)
return len
 end

 local function sort_aggregator(sort_key, order, limit)
-- insert a rec into a sorted list is quite easy
return function(sorted_list, rec)
  -- convert rec to map
  local rec_map = map_record(rec)

  -- apply orderBy
  insert_sort(sorted_list, rec_map, sort_key, order)

  -- apply limit
  list_truncate(sorted_list, limit)

  return sorted_list
end
 end

 local function sort_reducer(sort_key, order, limit)
return function(sorted_list1, sorted_list2)
  -- apply merge sort
  local start_index;
  for i = 1, list.size(sorted_list2) do
    local rec_map = sorted_list2[i]
    start_index = insert_sort(sorted_list1, rec_map, sort_key, order, start_index)
  end

  -- apply limit
  list_truncate(sorted_list1, limit)
  return sorted_list1
end
    end
-- default order by id DESC, limit 10
 -- change DESC to ASC
 local sort_key="timestamp";
 local order="DESC";
  local limit = 10
     if arguments ~= nil then -- only support one sort key right now
    if arguments["sort"] ~= nil then
    order = arguments["sort"]
end
  if arguments["limit"] ~= nil then
limit = arguments["limit"] 
end
end
 local aggregator = sort_aggregator(sort_key, order, limit)
  local reducer = sort_reducer(sort_key, order, limit)
 return stream : aggregate(list(), aggregator) : reduce(reducer)
 end

I am new to aerospike , so if anyone can help me to improve this code or propose some other method to achieve my target with better efficiency?