How do you extract form data (form[method="post"]
) and file uploads sent from the HTTP POST
method in Node.js?
I've read the documentation, googled and found nothing.
function (request, response) {
//request.post????
}
Is there a library or a hack?
querystring
module is considered legacy and not maintained.
You can use the querystring
module:
var qs = require('querystring');
function (request, response) {
if (request.method == 'POST') {
var body = '';
request.on('data', function (data) {
body += data;
// Too much POST data, kill the connection!
// 1e6 === 1 * Math.pow(10, 6) === 1 * 1000000 ~~~ 1MB
if (body.length > 1e6)
request.connection.destroy();
});
request.on('end', function () {
var post = qs.parse(body);
// use post['blah'], etc.
});
}
}
Now, for example, if you have an input
field with name age
, you could access it using the variable post
:
console.log(post.age);
If you use Express (high-performance, high-class web development for Node.js), you can do this:
HTML:
<form method="post" action="/">
<input type="text" name="user[name]">
<input type="text" name="user[email]">
<input type="submit" value="Submit">
</form>
API client:
fetch('/', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
user: {
name: "John",
email: "john@example.com"
}
})
});
Node.js: (since Express v4.16.0)
// Parse URL-encoded bodies (as sent by HTML forms)
app.use(express.urlencoded());
// Parse JSON bodies (as sent by API clients)
app.use(express.json());
// Access the parse results as request.body
app.post('/', function(request, response){
console.log(request.body.user.name);
console.log(request.body.user.email);
});
Node.js: (for Express <4.16.0)
const bodyParser = require("body-parser");
/** bodyParser.urlencoded(options)
* Parses the text as URL encoded data (which is how browsers tend to send form data from regular forms set to POST)
* and exposes the resulting object (containing the keys and values) on req.body
*/
app.use(bodyParser.urlencoded({
extended: true
}));
/**bodyParser.json(options)
* Parses the text as JSON and exposes the resulting object on req.body.
*/
app.use(bodyParser.json());
app.post("/", function (req, res) {
console.log(req.body.user.name)
});
app.use(express.bodyParser());
.
A lot of answers here are not good practices anymore or don't explain anything, so that's why I'm writing this.
Basics
When the callback of http.createServer is called, is when the server has actually received all the headers for the request, but it's possible that the data has not been received yet, so we have to wait for it. The http request object(a http.IncomingMessage instance) is actually a readable stream. In readable streams whenever a chunk of data arrives, a data
event is emitted(assuming you have registered a callback to it) and when all chunks have arrived an end
event is emitted. Here's an example on how you listen to the events:
http.createServer((request, response) => {
console.log('Now we have a http message with headers but no data yet.');
request.on('data', chunk => {
console.log('A chunk of data has arrived: ', chunk);
});
request.on('end', () => {
console.log('No more data');
})
}).listen(8080)
Converting Buffers to Strings
If you try this you will notice the chunks are buffers. If you are not dealing with binary data and need to work with strings instead I suggest use request.setEncoding method which causes the stream emit strings interpreted with the given encoding and handles multi-byte characters properly.
Buffering Chunks
Now you are probably not interested in each chunk by it's own, so in this case probably you want to buffer it like this:
http.createServer((request, response) => {
const chunks = [];
request.on('data', chunk => chunks.push(chunk));
request.on('end', () => {
const data = Buffer.concat(chunks);
console.log('Data: ', data);
})
}).listen(8080)
Here Buffer.concat is used, which simply concatenates all buffers and return one big buffer. You can also use the concat-stream module which does the same:
const http = require('http');
const concat = require('concat-stream');
http.createServer((request, response) => {
concat(request, data => {
console.log('Data: ', data);
});
}).listen(8080)
Parsing Content
If you are trying to accept HTML forms POST submission with no files or handing jQuery ajax calls with the default content type, then the content type is application/x-www-form-urlencoded
with utf-8
encoding. You can use the querystring module to de-serialize it and access the properties:
const http = require('http');
const concat = require('concat-stream');
const qs = require('querystring');
http.createServer((request, response) => {
concat(request, buffer => {
const data = qs.parse(buffer.toString());
console.log('Data: ', data);
});
}).listen(8080)
If your content type is JSON instead, you can simply use JSON.parse instead of qs.parse.
If you are dealing with files or handling multipart content type, then in that case, you should use something like formidable which removes all the pain from dealing with it. Have a look at this other answer of mine where I have posted helpful links and modules for multipart content.
Piping
If you don't want to parse the content but rather pass it to somewhere else, for example send it to another http request as the data or save it to a file I suggest piping it rather than buffering it, as it'll be less code, handles back pressure better, it'll take less memory and in some cases faster.
So if you want to save the content to a file:
http.createServer((request, response) => {
request.pipe(fs.createWriteStream('./request'));
}).listen(8080)
Limiting the Amount of Data
As other answers have noted keep in my mind that malicious clients might send you a huge amount of data to crash your application or fill your memory so to protect that make sure you drop requests which emit data past a certain limit. If you don't use a library to handle the incoming data. I would suggest using something like stream-meter which can abort the request if reaches the specified limit:
limitedStream = request.pipe(meter(1e7));
limitedStream.on('data', ...);
limitedStream.on('end', ...);
or
request.pipe(meter(1e7)).pipe(createWriteStream(...));
or
concat(request.pipe(meter(1e7)), ...);
NPM Modules
While I described above on how you can use the HTTP request body, for simply buffering and parsing the content, I suggesting using one of these modules rather implementing on your own as they will probably handle edge cases better. For express I suggest using body-parser. For koa, there's a similar module.
If you don't use a framework, body is quite good.
request
is re-used and the request.on('end')
got invoked multiple times? How can I avoid that?
request.on('end', ...)
will be called.
Make sure to kill the connection if someone tries to flood your RAM!
var qs = require('querystring');
function (request, response) {
if (request.method == 'POST') {
var body = '';
request.on('data', function (data) {
body += data;
// 1e6 === 1 * Math.pow(10, 6) === 1 * 1000000 ~~~ 1MB
if (body.length > 1e6) {
// FLOOD ATTACK OR FAULTY CLIENT, NUKE REQUEST
request.connection.destroy();
}
});
request.on('end', function () {
var POST = qs.parse(body);
// use POST
});
}
}
var POST = qs.parse(body); // use POST
only for noobs: when the name of the input text field is "user", Post.user will show the data of that field. e.g. console.log(Post.user);
Here's a very simple no-framework wrapper based on the other answers and articles posted in here:
var http = require('http');
var querystring = require('querystring');
function processPost(request, response, callback) {
var queryData = "";
if(typeof callback !== 'function') return null;
if(request.method == 'POST') {
request.on('data', function(data) {
queryData += data;
if(queryData.length > 1e6) {
queryData = "";
response.writeHead(413, {'Content-Type': 'text/plain'}).end();
request.connection.destroy();
}
});
request.on('end', function() {
request.post = querystring.parse(queryData);
callback();
});
} else {
response.writeHead(405, {'Content-Type': 'text/plain'});
response.end();
}
}
Usage example:
http.createServer(function(request, response) {
if(request.method == 'POST') {
processPost(request, response, function() {
console.log(request.post);
// Use request.post here
response.writeHead(200, "OK", {'Content-Type': 'text/plain'});
response.end();
});
} else {
response.writeHead(200, "OK", {'Content-Type': 'text/plain'});
response.end();
}
}).listen(8000);
response.post
rather than the more logical request.post
. I updated the post.
It will be cleaner if you encode your data to JSON, then send it to Node.js.
function (req, res) {
if (req.method == 'POST') {
var jsonString = '';
req.on('data', function (data) {
jsonString += data;
});
req.on('end', function () {
console.log(JSON.parse(jsonString));
});
}
}
qs.parse()
, JSON.parse()
turned the body into something usable. Example: var post = JSON.parse(body);
, then access the data with post.fieldname
. (Moral of the story, if you're confused about what you're seeing, don't forget about typeof
!)
request.setEncoding
to make this work properly else it might not handle non ascii characters properly.
For anyone wondering how to do this trivial task without installing a web framework I managed to plop this together. Hardly production ready but it seems to work.
function handler(req, res) {
var POST = {};
if (req.method == 'POST') {
req.on('data', function(data) {
data = data.toString();
data = data.split('&');
for (var i = 0; i < data.length; i++) {
var _data = data[i].split("=");
POST[_data[0]] = _data[1];
}
console.log(POST);
})
}
}
You can use body-parser
, the Node.js body parsing middleware.
First load body-parser
$ npm install body-parser --save
Some example code
var express = require('express')
var bodyParser = require('body-parser')
var app = express()
app.use(bodyParser.urlencoded({ extended: false }))
app.use(bodyParser.json())
app.use(function (req, res) {
var post_data = req.body;
console.log(post_data);
})
More documentation can be found here
Here is how you can do it if you use node-formidable:
var formidable = require("formidable");
var form = new formidable.IncomingForm();
form.parse(request, function (err, fields) {
console.log(fields.parameter1);
console.log(fields.parameter2);
// ...
});
Reference: https://nodejs.org/en/docs/guides/anatomy-of-an-http-transaction/
let body = [];
request.on('data', (chunk) => {
body.push(chunk);
}).on('end', () => {
body = Buffer.concat(body).toString();
// at this point, `body` has the entire request body stored in it as a string
});
If you prefer to use pure Node.js then you might extract POST data like it is shown below:
// Dependencies const StringDecoder = require('string_decoder').StringDecoder; const http = require('http'); // Instantiate the HTTP server. const httpServer = http.createServer((request, response) => { // Get the payload, if any. const decoder = new StringDecoder('utf-8'); let payload = ''; request.on('data', (data) => { payload += decoder.write(data); }); request.on('end', () => { payload += decoder.end(); // Parse payload to object. payload = JSON.parse(payload); // Do smoething with the payload.... }); }; // Start the HTTP server. const port = 3000; httpServer.listen(port, () => { console.log(`The server is listening on port ${port}`); });
1) Install 'body-parser'
from npm.
2) Then in your app.ts
var bodyParser = require('body-parser');
3) then you need to write
app.use(bodyParser.json())
in app.ts module
4) keep in mind that you include
app.use(bodyParser.json())
in the top or before any module declaration.
Ex:
app.use(bodyParser.json())
app.use('/user',user);
5) Then use
var postdata = req.body;
If you don't want to chunk your data together with the data
callback you can always use the readable
callback like this:
// Read Body when Available
request.on("readable", function(){
request.body = '';
while (null !== (request.body += request.read())){}
});
// Do something with it
request.on("end", function(){
request.body //-> POST Parameters as String
});
This approach modifies the incoming request, but as soon as you finish your response the request will be garbage collected, so that should not be a problem.
An advanced approach would be to check the body size first, if you're afraid of huge bodies.
request
is a normal node.js stream, so you can check the request.headers
for the body length and abort the request if necessary.
You need to receive the POST
data in chunks using request.on('data', function(chunk) {...})
const http = require('http');
http.createServer((req, res) => {
if (req.method == 'POST') {
whole = ''
req.on('data', (chunk) => {
# consider adding size limit here
whole += chunk.toString()
})
req.on('end', () => {
console.log(whole)
res.writeHead(200, 'OK', {'Content-Type': 'text/html'})
res.end('Data received.')
})
}
}).listen(8080)
You should consider adding a size limit at the indicated position as thejh suggested.
setTimeout
that ends the connection after a certain period of time, if the full request is not received within that window.
There are multiple ways to do it. However, the quickest way I know is to use the Express.js library with body-parser.
var express = require("express");
var bodyParser = require("body-parser");
var app = express();
app.use(bodyParser.urlencoded({extended : true}));
app.post("/pathpostdataissentto", function(request, response) {
console.log(request.body);
//Or
console.log(request.body.fieldName);
});
app.listen(8080);
That can work for strings, but I would change bodyParser.urlencoded to bodyParser.json instead if the POST data contains a JSON array.
More info: http://www.kompulsa.com/how-to-accept-and-parse-post-requests-in-node-js/
Express v4.17.0
app.use(express.urlencoded( {extended: true} ))
console.log(req.body) // object
If you are using Express.js, before you can access to the req.body, you must add middleware bodyParser:
app.use(express.bodyParser());
Then you can ask for
req.body.user
And if you don't want to use the entire framework like Express, but you also need different kinds of forms, including uploads, then formaline may be a good choice.
It is listed in Node.js modules
if you receive the data from POST in JSON. :
import http from 'http';
const hostname = '127.0.0.1';
const port = 3000;
const httpServer: http.Server = http.createServer((req: http.IncomingMessage, res:
http.ServerResponse) => {
if(req.method === 'POST') {
let body: string = '';
req.on('data',(chunck) => {
body += chunck;
});
req.on('end', () => {
const body = JSON.parse(body);
res.statusCode = 200;
res.end('OK post');
});
}
});
httpServer.listen(port, hostname, () => {
console.info(`Server started at port ${port}`);
})
you can extract post parameter without using express.
1: nmp install multiparty
2: import multiparty . as var multiparty = require('multiparty');
3: `
if(req.method ==='POST'){
var form = new multiparty.Form();
form.parse(req, function(err, fields, files) {
console.log(fields['userfile1'][0]);
});
}
4: and HTML FORM IS .
<form method=POST enctype=multipart/form-data>
<input type=text name=userfile1><br>
<input type=submit>
</form>
I hope this will work for you. Thanks.
I found a video which explains on how to achieve this: https://www.youtube.com/watch?v=nuw48-u3Yrg
It uses default "http" module together with "querystring" and "stringbuilder" modules. The application takes two numbers (using two textboxes) from a web page and upon submit, returns sum of those two (along with persisting the values in the textboxes). This is the best example I could find anywhere else.
Related source code:
var http = require("http");
var qs = require("querystring");
var StringBuilder = require("stringbuilder");
var port = 9000;
function getCalcHtml(req, resp, data) {
var sb = new StringBuilder({ newline: "\r\n" });
sb.appendLine("<html>");
sb.appendLine(" <body>");
sb.appendLine(" <form method='post'>");
sb.appendLine(" <table>");
sb.appendLine(" <tr>");
sb.appendLine(" <td>Enter First No: </td>");
if (data && data.txtFirstNo) {
sb.appendLine(" <td><input type='text' id='txtFirstNo' name='txtFirstNo' value='{0}'/></td>", data.txtFirstNo);
}
else {
sb.appendLine(" <td><input type='text' id='txtFirstNo' name='txtFirstNo' /></td>");
}
sb.appendLine(" </tr>");
sb.appendLine(" <tr>");
sb.appendLine(" <td>Enter Second No: </td>");
if (data && data.txtSecondNo) {
sb.appendLine(" <td><input type='text' id='txtSecondNo' name='txtSecondNo' value='{0}'/></td>", data.txtSecondNo);
}
else {
sb.appendLine(" <td><input type='text' id='txtSecondNo' name='txtSecondNo' /></td>");
}
sb.appendLine(" </tr>");
sb.appendLine(" <tr>");
sb.appendLine(" <td><input type='submit' value='Calculate' /></td>");
sb.appendLine(" </tr>");
if (data && data.txtFirstNo && data.txtSecondNo) {
var sum = parseInt(data.txtFirstNo) + parseInt(data.txtSecondNo);
sb.appendLine(" <tr>");
sb.appendLine(" <td>Sum: {0}</td>", sum);
sb.appendLine(" </tr>");
}
sb.appendLine(" </table>");
sb.appendLine(" </form>")
sb.appendLine(" </body>");
sb.appendLine("</html>");
sb.build(function (err, result) {
resp.write(result);
resp.end();
});
}
function getCalcForm(req, resp, data) {
resp.writeHead(200, { "Content-Type": "text/html" });
getCalcHtml(req, resp, data);
}
function getHome(req, resp) {
resp.writeHead(200, { "Content-Type": "text/html" });
resp.write("<html><html><head><title>Home</title></head><body>Want to some calculation? Click <a href='/calc'>here</a></body></html>");
resp.end();
}
function get404(req, resp) {
resp.writeHead(404, "Resource Not Found", { "Content-Type": "text/html" });
resp.write("<html><html><head><title>404</title></head><body>404: Resource not found. Go to <a href='/'>Home</a></body></html>");
resp.end();
}
function get405(req, resp) {
resp.writeHead(405, "Method not supported", { "Content-Type": "text/html" });
resp.write("<html><html><head><title>405</title></head><body>405: Method not supported</body></html>");
resp.end();
}
http.createServer(function (req, resp) {
switch (req.method) {
case "GET":
if (req.url === "/") {
getHome(req, resp);
}
else if (req.url === "/calc") {
getCalcForm(req, resp);
}
else {
get404(req, resp);
}
break;
case "POST":
if (req.url === "/calc") {
var reqBody = '';
req.on('data', function (data) {
reqBody += data;
if (reqBody.length > 1e7) { //10MB
resp.writeHead(413, 'Request Entity Too Large', { 'Content-Type': 'text/html' });
resp.end('<!doctype html><html><head><title>413</title></head><body>413: Request Entity Too Large</body></html>');
}
});
req.on('end', function () {
var formData = qs.parse(reqBody);
getCalcForm(req, resp, formData);
});
}
else {
get404(req, resp);
}
break;
default:
get405(req, resp);
break;
}
}).listen(port);
On form fields like these
<input type="text" name="user[name]" value="MyName">
<input type="text" name="user[email]" value="myemail@somewherefarfar.com">
some of the above answers will fail because they only support flat data.
For now I am using the Casey Chu answer but with the "qs" instead of the "querystring" module. This is the module "body-parser" uses as well. So if you want nested data you have to install qs.
npm install qs --save
Then replace the first line like:
//var qs = require('querystring');
var qs = require('qs');
function (request, response) {
if (request.method == 'POST') {
var body = '';
request.on('data', function (data) {
body += data;
// Too much POST data, kill the connection!
// 1e6 === 1 * Math.pow(10, 6) === 1 * 1000000 ~~~ 1MB
if (body.length > 1e6)
request.connection.destroy();
});
request.on('end', function () {
var post = qs.parse(body);
console.log(post.user.name); // should work
// use post['blah'], etc.
});
}
}
For those using raw binary POST upload without encoding overhead you can use:
client:
var xhr = new XMLHttpRequest();
xhr.open("POST", "/api/upload", true);
var blob = new Uint8Array([65,72,79,74]); // or e.g. recorder.getBlob()
xhr.send(blob);
server:
var express = require('express');
var router = express.Router();
var fs = require('fs');
router.use (function(req, res, next) {
var data='';
req.setEncoding('binary');
req.on('data', function(chunk) {
data += chunk;
});
req.on('end', function() {
req.body = data;
next();
});
});
router.post('/api/upload', function(req, res, next) {
fs.writeFile("binaryFile.png", req.body, 'binary', function(err) {
res.send("Binary POST successful!");
});
});
You can use the express middleware, which now has body-parser built into it. This means all you need to do is the following:
import express from 'express'
const app = express()
app.use(express.json())
app.post('/thing', (req, res) => {
console.log(req.body) // <-- this will access the body of the post
res.sendStatus(200)
})
That code example is ES6 with Express 4.16.x
Limit POST size avoid flood your node app. There is a great raw-body module, suitable both for express and connect, that can help you limit request by size and length.
If it involves a file upload, the browser usually send it as a "multipart/form-data"
content-type. You can use this in such cases
var multipart = require('multipart');
multipart.parse(req)
You need to use bodyParser() if you want the form data to be available in req.body. body-parser parses your request and converts it into a format from which you can easily extract relevant information that you may need.
For example, let’s say you have a sign-up form at your frontend. You are filling it, and requesting server to save the details somewhere.
Extracting username and password from your request goes as simple as below if you use body-parser.
…………………………………………………….
var loginDetails = {
username : request.body.username,
password : request.body.password
};
To elaborate on using URLSearchParams
:
Node.js knowledge: How can I read POST data?
Node.js documentation: Class: URLSearchParams
MDN: URLSearchParams
const http = require('http');
const POST_HTML =
'<html><head><title>Post Example</title></head>' +
'<body>' +
'<form method="post">' +
'Input 1: <input name="input1"><br>' +
'Input 2: <input name="input2"><br>' +
'Input 1: <input name="input1"><br>' +
'<input type="submit">' +
'</form>' +
'</body></html>';
const FORM_DATA = 'application/x-www-form-urlencoded';
function processFormData(body) {
const params = new URLSearchParams(body);
for ([name, value] of params.entries()) console.log(`${name}: ${value}`);
}
// req: http.IncomingMessage
// res: http.ServerResponse
//
function requestListener(req, res) {
const contentType = req.headers['content-type'];
let body = '';
const append = (chunk) => {
body += chunk;
};
const complete = () => {
if (contentType === FORM_DATA) processFormData(body);
res.writeHead(200);
res.end(POST_HTML);
};
req.on('data', append);
req.on('end', complete);
}
http.createServer(requestListener).listen(8080);
$ node index.js
input1: one
input2: two
input1: three
You can easily send and get the response of POST request by using "Request - Simplified HTTP client" and Javascript Promise.
var request = require('request');
function getData() {
var options = {
url: 'https://example.com',
headers: {
'Content-Type': 'application/json'
}
};
return new Promise(function (resolve, reject) {
var responseData;
var req = request.post(options, (err, res, body) => {
if (err) {
console.log(err);
reject(err);
} else {
console.log("Responce Data", JSON.parse(body));
responseData = body;
resolve(responseData);
}
});
});
}
Success story sharing
var POST = qs.parse(body); // use POST
only for noobs like me: when the name of the input text field is "user",Post.user
will show the data of that field. e.g.console.log(Post.user);
readable
callback instead of building the data into a body string. Once it is fired, the body is available throughrequest.read();
req.connection.destroy();
doesn't prevent the callbacks from being executed! For example the "on end" callback will be executed with the truncated body! This is not probably what you want...