We’re seeing some use cases where large amounts of device data needs to be batch inserted from CSV files. This script provides an example of using Node.js to solve this problem. This script breaks up a file into 100 states per API call to Losant. This allows you to insert much more data than if you inserted one at a time. This is because Losant enforces maximum rate of 2 messages per second per device, however each message can contain multiple states with individual time stamps.
Here’s the package.json file with required dependencies:
{
"name": "csv-import",
"version": "1.0.0",
"description": "",
"main": "index.js",
"author": "Brandon Cannaday @BadCanyon",
"license": "MIT",
"dependencies": {
"async": "^2.6.1",
"csv-parse": "^4.2.0",
"losant-rest": "^2.3.1"
}
}
Here’s the script. As-is, it will read a file named input.csv
from the local directory. Modify as needed.
const fs = require('fs');
const async = require('async');
const parse = require('csv-parse/lib/sync');
const api = require('losant-rest');
var client = api.createClient({ accessToken: 'your-api-token' });
/* Contents of input.csv
Time,Temperature,Voltage
2018-12-14T19:55:47.941Z,55,12.1
2018-12-14T19:56:07.145Z,57,12.5
2018-12-14T19:56:15.153Z,52,12.2
...
*/
// Read the CSV contents into objects with time, temperature, and voltage fields.
const records = parse(fs.readFileSync('input.csv'), { columns: ['time', 'temperature', 'voltage'], skip_empty_lines: true });
// Convert all entries to Losant state objects.
// Parsing the original time and sending it as milliseconds since epoch to
// reduce body size and get more entries in 256KB body limit.
// This example is assuming the device has two attribute: temperature and voltage.
// The slice(1) is removing the header row.
const states = records.slice(1).map((record) => {
return {
time: new Date(record.time).getTime(),
data: {
temperature: record.temperature,
voltage: record.voltage
}
}
});
// Batch the states in groups of 100.
// You may be able to get much more than 100 as long as the total
// size is under 256KB of data. In the example above, each entry
// is roughly 100 bytes, so you could do ~2000 entries at once.
const batchSize = 100;
const batches = []
for(var i = 0; i < states.length; i+= batchSize) {
batches.push(states.slice(i, i + batchSize));
}
// Iterates over each batch and report to Losant.
// This delays each execution by 500ms to stay under
// the Losant's maximum data rate limit.
async.eachSeries(batches, (batch, callback) => {
console.log(`Sending ${batch.length} states to Losant...`);
client.device.sendState({
deviceId: 'your-device-id',
applicationId: 'your-application-id',
deviceState: batch
}).then((response) => {
console.log(response);
// Wait 500ms before firing the next report.
setTimeout(callback, 500);
});
}, (err) => { console.log('Done!')});