Import data using a CSV file

Forest natively supports data creation but it’s sometimes more efficient to simply import it. This “How-to” shows a way to achieve that in your Forest admin.

In the following example, we’ve created the Smart Action Import data on the collection products which contains two input fields: the CSV file and the Type. The Type input field is completely optional, we use it to import the list of products with the good type set.

When clicking on the Import data Smart Action, an API call to /forest/products/actions/import-data is triggered as indicated in the endpoint parameter of the action.

Lumber
Rails
Express/Sequelize
Express/Mongoose

We handle this call in the routes/products.jsfile.

The CSV file passed into the body of the API call is serialized using a base64 encoding Data URI scheme.

To deserialize the base64 encoded CSV file, we use the NPM package parse-data-uri. We also use the csv parser NPM package to iterate over each line of the CSV file.

You can find a sample CSV file we use here to feed our products table on the Live demo Github repository.

You may find below the coding examples you need to make this Smart action work:

/forest/products.js
const Liana = require('forest-express-sequelize');
const models = require('../models');
Liana.collection('products', {
actions: [{
name: 'Import data',
endpoint: '/forest/products/actions/import-data',
type: 'global',
fields: [{
field: 'CSV file',
description: 'A semicolon separated values file stores tabular data (numbers and text) in plain text',
type: 'File',
isRequired: true
}, {
field: 'Type',
description: 'Specify the product type to import',
type: 'Enum',
enums: ['phone', 'dress', 'toy'],
isRequired: true
}]
}],
// ...
});
/routes/products.js
const P = require('bluebird');
const express = require('express');
const router = express.Router();
const Liana = require('forest-express-sequelize');
const faker = require('faker');
const parseDataUri = require('parse-data-uri');
const csv = require('csv');
const models = require('../models');
router.post('/products/actions/import-data', Liana.ensureAuthenticated,
(req, res) => {
let parsed = parseDataUri(req.body.data.attributes.values['CSV file']);
let productType = req.body.data.attributes.values['Type'];
csv.parse(parsed.data, { delimiter: ';' }, function (err, rows) {
if (err) {
res.status(400).send({
error: `Cannot import data: ${err.message}` });
} else {
return P
.each(rows, (row) => {
// Random price for the example purpose. In a real situation, the price
// should certainly be available in the CSV file.
let price = faker.commerce.price(5, 1000) * 100;
return models.products.create({
label: row[0],
price: price,
picture: row[1]
});
})
.then(() => {
res.send({ success: 'Data successfuly imported!' });
});
}
});
});
module.exports = router;

We handle this call in the config/routes.rb file.

The CSV file passed into the body of the API call is serialized using a base64 encoding Data URI scheme.

To deserialize the base64 encoded CSV file, we use the NPM package parse-data-uri. We also use the csv parser NPM package to iterate over each line of the CSV file.

You can find a sample CSV file we use here to feed our products table on the Live demo Github repository.

You may find below the coding examples you need to make this Smart action work:

/lib/forest_liana/collections/product.rb
class Forest::Product
include ForestLiana::Collection
collection :Product
has_many :buyers, type: ['String'], reference: 'Customer.id'
action 'Import data', endpoint: '/forest/products/actions/import-data', type: 'global', fields: [{
field: 'CSV file',
description: 'A semicolon separated values file stores tabular data (numbers and text) in plain text',
type: 'File',
isRequired: true
}, {
field: 'Type',
description: 'Specify the product type to import',
type: 'Enum',
enums: ['phone', 'dress', 'toy'],
isRequired: true
}]
# ...
end
/controllers/forest/products_controller.rb
require 'data_uri'
class Forest::ProductsController < ForestLiana::ApplicationController
# ...
def import_data
uri = URI::Data.new(params.dig('data', 'attributes', 'values', 'CSV file'))
CSV.parse(uri.data, { col_sep: ';' }).each do |row|
price = 0
case params.dig('data', 'attributes', 'values', 'Type')
when 'phone'
price = rand(300..1000) * 100
when 'dress'
price = rand(10..200) * 100
when 'toy'
price = rand(5..100) * 100
end
Product.create({
label: row[0],
price: price,
picture: row[1].gsub('//i5.walmartimages.com/asr/', "//s3-#{ENV['S3_REGION']}.amazonaws.com/#{ENV['S3_BUCKET']}/livedemo/"),
})
end
render json: { success: 'Data successfuly imported!' }
end
end
/config/routes.rb
Rails.application.routes.draw do
namespace :forest do
# ...
post '/products/actions/import-data' => 'products#import_data'
end
# ...
end

We handle this call in the routes/products.jsfile.

The CSV file passed into the body of the API call is serialized using a base64 encoding Data URI scheme.

To deserialize the base64 encoded CSV file, we use the NPM package parse-data-uri. We also use the csv parser NPM package to iterate over each line of the CSV file.

You can find a sample CSV file we use here to feed our products table on the Live demo Github repository.

You may find below the coding examples you need to make this Smart action work:

/forest/products.js
const Liana = require('forest-express-sequelize');
const models = require('../models');
Liana.collection('products', {
actions: [{
name: 'Import data',
endpoint: '/forest/products/actions/import-data',
type: 'global',
fields: [{
field: 'CSV file',
description: 'A semicolon separated values file stores tabular data (numbers and text) in plain text',
type: 'File',
isRequired: true
}, {
field: 'Type',
description: 'Specify the product type to import',
type: 'Enum',
enums: ['phone', 'dress', 'toy'],
isRequired: true
}]
}],
// ...
});
/routes/products.js
const P = require('bluebird');
const express = require('express');
const router = express.Router();
const Liana = require('forest-express-sequelize');
const faker = require('faker');
const parseDataUri = require('parse-data-uri');
const csv = require('csv');
const models = require('../models');
router.post('/products/actions/import-data', Liana.ensureAuthenticated,
(req, res) => {
let parsed = parseDataUri(req.body.data.attributes.values['CSV file']);
let productType = req.body.data.attributes.values['Type'];
csv.parse(parsed.data, { delimiter: ';' }, function (err, rows) {
if (err) {
res.status(400).send({
error: `Cannot import data: ${err.message}` });
} else {
return P
.each(rows, (row) => {
// Random price for the example purpose. In a real situation, the price
// should certainly be available in the CSV file.
let price = faker.commerce.price(5, 1000) * 100;
return models.products.create({
label: row[0],
price: price,
picture: row[1]
});
})
.then(() => {
res.send({ success: 'Data successfuly imported!' });
});
}
});
});
module.exports = router;

We handle this call in the routes/products.jsfile.

The CSV file passed into the body of the API call is serialized using a base64 encoding Data URI scheme.

To deserialize the base64 encoded CSV file, we use the NPM package parse-data-uri. We also use the csv parser NPM package to iterate over each line of the CSV file.

You can find a sample CSV file we use here to feed our products table on the Live demo Github repository.

You may find below the coding examples you need to make this Smart action work:

/forest/products.js
const Liana = require('forest-express-mongoose');
const models = require('../models');
Liana.collection('products', {
actions: [{
name: 'Import data',
endpoint: '/forest/products/actions/import-data',
type: 'global',
fields: [{
field: 'CSV file',
description: 'A semicolon separated values file stores tabular data (numbers and text) in plain text',
type: 'File',
isRequired: true
}, {
field: 'Type',
description: 'Specify the product type to import',
type: 'Enum',
enums: ['phone', 'dress', 'toy'],
isRequired: true
}]
}],
// ...
});
/routes/products.js
const P = require('bluebird');
const express = require('express');
const router = express.Router();
const Liana = require('forest-express-mongoose');
const faker = require('faker');
const parseDataUri = require('parse-data-uri');
const csv = require('csv');
const models = require('../models');
router.post('/products/actions/import-data', Liana.ensureAuthenticated,
(req, res) => {
let parsed = parseDataUri(req.body.data.attributes.values['CSV file']);
let productType = req.body.data.attributes.values['Type'];
csv.parse(parsed.data, { delimiter: ';' }, function (err, rows) {
if (err) {
res.status(400).send({
error: `Cannot import data: ${err.message}` });
} else {
return P
.each(rows, (row) => {
// Random price for the example purpose. In a real situation, the price
// should certainly be available in the CSV file.
let price = faker.commerce.price(5, 1000) * 100;
return models.products.create({
label: row[0],
price: price,
picture: row[1]
});
})
.then(() => {
res.send({ success: 'Data successfuly imported!' });
});
}
});
});
module.exports = router;

Uploading large files

For large file uploads, you should add an option in your Express Server in your app.js file:

app.use(bodyParser.urlencoded({ extended: true ,limit: '50mb' }));
app.use(bodyParser.json({ limit: '50mb' }));

If you're using rails, there is no native limitation, so no need to do anything.