How to set $_POST values in Node JS child process - php

I hosted my Slim app on AWS Lambda. For my PHP app to work, I followed this tutorial
My app runs fine until I try to submit a form with a POST method. My PHP cannot get the values from the form. When I dumped $_POST and file_get_contents('php://input'), both returned a null.
In the tutorial, Chris (the author) stated that this code spawns the child process and sets a bunch of environment variables which PHP CGI populates into the $_SERVER super global.
var php = spawn('./php-cgi', ['function.php'], {
env: Object.assign({
REDIRECT_STATUS: 200,
REQUEST_METHOD: requestMethod,
SCRIPT_FILENAME: 'function.php',
SCRIPT_NAME: '/function.php',
PATH_INFO: '/',
SERVER_NAME: serverName,
SERVER_PROTOCOL: 'HTTP/1.1',
REQUEST_URI: requestUri
}, headers)
});
I am not familiar with child processes, so I would like to ask if there is a way that I can also populate the $_POST superglobal? Because I think the POST data lives in the event object/variable in my handler function, meaning (I think) my NodeJS wrapper could access the POST data, but it didn't pass it to the PHP CGI?
exports.handler = function(event, context)
here is the full code of my NodeJS wrapper:
var spawn = require('child_process').spawn;
var parseHeaders, parseResponse, parseStatusLine;
parseResponse = function(responseString) {
var headerLines, line, lines, parsedStatusLine, response;
response = {};
lines = responseString.split('\r\n');
parsedStatusLine = parseStatusLine(lines.shift());
response['protocolVersion'] = parsedStatusLine['protocol'];
response['statusCode'] = parsedStatusLine['statusCode'];
response['statusMessage'] = parsedStatusLine['statusMessage'];
headerLines = [];
while (lines.length > 0) {
line = lines.shift();
if (line === "") {
break;
}
headerLines.push(line);
}
response['headers'] = parseHeaders(headerLines);
response['body'] = lines.join('\r\n');
return response;
};
parseHeaders = function(headerLines) {
var headers, key, line, parts, _i, _len;
headers = {};
for (_i = 0, _len = headerLines.length; _i < _len; _i++) {
line = headerLines[_i];
parts = line.split(":");
key = parts.shift();
headers[key] = parts.join(":").trim();
}
return headers;
};
parseStatusLine = function(statusLine) {
var parsed, parts;
parts = statusLine.match(/^(.+) ([0-9]{3}) (.*)$/);
parsed = {};
if (parts !== null) {
parsed['protocol'] = parts[1];
parsed['statusCode'] = parts[2];
parsed['statusMessage'] = parts[3];
}
return parsed;
};
exports.index = function(event, context) {
// Sets some sane defaults here so that this function doesn't fail when it's not handling a HTTP request from
// API Gateway.
var requestMethod = event.httpMethod || 'GET';
var serverName = event.headers ? event.headers.Host : '';
var requestUri = event.path || '';
var headers = {};
// Convert all headers passed by API Gateway into the correct format for PHP CGI. This means converting a header
// such as "X-Test" into "HTTP_X-TEST".
if (event.headers) {
Object.keys(event.headers).map(function (key) {
headers['HTTP_' + key.toUpperCase()] = event.headers[key];
});
}
// Spawn the PHP CGI process with a bunch of environment variables that describe the request.
var php = spawn('./php-cgi', ['slim/public/index.php'], {
env: Object.assign({
REDIRECT_STATUS: 200,
REQUEST_METHOD: requestMethod,
SCRIPT_FILENAME: 'slim/public/index.php',
SCRIPT_NAME: '/index.php',
PATH_INFO: '/',
SERVER_NAME: serverName,
SERVER_PROTOCOL: 'HTTP/1.1',
REQUEST_URI: requestUri
}, headers)
});
// Listen for output on stdout, this is the HTTP response.
var response = '';
php.stdout.on('data', function(data) {
response += data.toString('utf-8');
});
// When the process exists, we should have a complete HTTP response to send back to API Gateway.
php.on('close', function(code) {
// Parses a raw HTTP response into an object that we can manipulate into the required format.
var parsedResponse = parseResponse(response);
// Signals the end of the Lambda function, and passes the provided object back to API Gateway.
context.succeed({
statusCode: parsedResponse.statusCode || 200,
headers: parsedResponse.headers,
body: parsedResponse.body
});
});
};

In some cases it's necessary to set CONTENT_LENGTH and/or CONTENT_TYPE in the environment in order for php-cgi to process $_POST properly. For example (where postBody is a string like "field1=value1&field2=value2"):
var env = {
'SCRIPT_FILENAME': script_path,
'REQUEST_METHOD': 'POST',
'REDIRECT_STATUS': 1,
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': postBody.length
}
//if the URL has anything after "?", it should appear in $_GET even when the method is "POST"
if(queryString) env['QUERY_STRING'] = queryString;
The post body needs to be input into the stdin of the child.
Here is an async example:
var spawn = require('child_process').spawn;
var phpProcess = spawn (php_cgi_path, [script_path], {'env': env})
phpProcess.stdin.write( postBody );
var outputBuffer = [];
phpProcess.stdout.on('data', function(data) {
outputBuffer.push (data.toString());
})
phpProcess.stdout.on('end', function( ) {
var phpOutput = outputBuffer.join('') ;
// process php output
});
It's also possible to give the input data in a synchronous way, for example:
var spawnSync = require('child_process').spawnSync;
var phpProcessSync = spawnSync (php_cgi_path, [script_path], {'env': env, 'input': postBody})
var phpOutput = phpProcessSync.stdout.toString()
// process php output
Again the post data is input separately from "env".
It's also possible to modify the script so that it populates $_FILES as well.
For example, one can use Uint8Array (instead of string) to store post body, then set 'CONTENT_TYPE' to request.headers['content-type'] (Then we would have for example "Content-Type:multipart/form-data; boundary=----WebKitFormBoundarynGa8p8HMIQ8kWQLA")then use phpProcess.stdin.write( Buffer.from(postBody) );
It will have "tmp_name" etc in the $_FILES variable.

Related

Does not work passing values to php from Node Js and back

I have a php file on server which have a function, and I have Node Js API also. I want to pass Node Js value to php script then get back the function output to node js.
I tried this using cookie-parser as sugggested by Christian in here. But it does not work
php script
<?php
$max = $_COOKIE["usrMob"]; // Taken from cookie
$min = 1111;
$number = mt_rand($min, $max); // Find random number
echo $number; // Send back to Node Js
?>
Node.Js
const express = require("express");
const cookieParser = require('cookie-parser');
const app = express();
app.use(cookieParser('Your Secret'));
router.get('/cookie', function (req,res)
{
// Set cookie
res.cookie('userMax', '46556') // options is optional
res.end();
console.log("Cookie is : " + res.cookie);
})
I have a php file on server which have a function, and I have Node Js
API also. I want to pass Node Js value to php script then get back the
function output to node js.
I tried this using cookie-parser as sugggested by Christian in here. But it does not work
Short answer
Sharing COOKIES won't work because of CORS, your nodejs server must be in the allow origin list of the PHP server.
Long answer
COOKIES are very used when storing user settings/tokens/password or some sensitive data on your browser that allows the user browsing experience behave different mostly the user decisions.
Therefore they cannot be sent in requests when different servers communicates between them unless they are allowed to leave to an 'authorized-origin' otherwise that would be a major leak of data through cookies, say hello to CORS (unless you don't own the target server).
Example:
You have a script on a TargetServer(TS), that sets a cookie there when user does some stuff. After the user finishes with your script you want to send data back to YourServer(YS), when the AJAX triggers, cookies won't be sent with the request as you normally see when you develop on localhost.
Following your stack of tools, another problem issues, each request that you'll make to YS will generate a new id/session (i'm looking at you PHPSESSID), and that means, you won't know for example if the user is logged or not, and you know for sure that he already logged earlier (Yes - he is logged, but in another session file ... ).
HOW TO TACKLE THIS PROBLEM:
Find an appropriate mechanism for encrypt/decrypt strings that your script and php will know.
When you're sending a request from TS to YS add a custom
header that YS will expect.eg. REQUEST-CUSTOM-HEADER: encodedVersionOf('hey-give-me-the-session-id') , PHP will see the incoming header, will decodeVersionOf('hey-give-me-the-session-id') and will trigger some special if and send you a response with a different header RESPONSE-CUSTOM-HEADER: encodedVersionOf('here-is-the-session-id'). Your script will now save it in COOKIES so you won't have to request it again. and just append it to your header on future requests.
If PHP recognizes the incoming string as a valid session then php can load that session that you know you had data in it with session_id($incoming_id), make sure to set session_id before session_start
I highly advise using JWT for this kind of things or some encrypted stringify json, so you can have an object like {session_id : 12idn3oind, userInfo: {name: 'test'}}.
Exchanging data through headers is the next best thing when CORS is involved.
I tackled this example once, wasn't pretty to do, but worth it in the end.
You can send/receive data to/from php, only thing is that you should use headers so you won't affect php output.
Since you own both servers you can do something like:
MOST IMPORTANT :
npm install -S express
Make sure you have enabled headers_module/mod_headers on your webserver.
We will use custom headers so you should allow & expose them:
.htaccess
Header add Access-Control-Allow-Headers "node-request, node-response"
Header add Access-Control-Allow-Methods "PUT, GET, POST, DELETE, OPTIONS"
Header add Access-Control-Expose-Headers "node-request, node-response"
Header add Access-Control-Allow-Origin "*"
PHP
<?php
$max = #$_COOKIE["usrMob"]; // Taken from cookie
$min = 1111;
$number = rand($min, $max); // Find random number
echo $number; // Send back to Node Js
if( isset($_SERVER['HTTP_NODE_REQUEST'])){
$req = json_decode($_SERVER['HTTP_NODE_REQUEST'], true);
$data = array();
// 'givemeanumber' is sent from Node.js server
if( isset($req['givemeanumber']) ){
$data = array(
'number' => $number
);
}
header('NODE-RESPONSE: '. json_encode(array("req" => $req, "res"=> $data)));
}
?>
Node.JS
Don't forget to change these line to point to your php-server:
getFromPHP('localhost', '9999', '/path-to-php-script', {givemeanumber: 1})
index.js
const express = require("express");
const app = express();
const port = 9999;
const { getFromPHP } = require('./middleware.js');
const apachePHPconfig = {
host: 'localhost',
port: 80,
urlpath: 'path-to-php-script'
}
app.get(
'/',
getFromPHP(apachePHPconfig.host, apachePHPconfig.port, apachePHPconfig.urlpath , {givemeanumber: 1}),
function (req, res) {
// here is your php object
console.log('php', req.php);
res.end();
})
app.listen(port, () => {
console.clear();
console.log(`Example app listening on port ${port}!`)
})
middleware.js
/**
* Middleware to get data from PHP
*/
const getFromPHP = (phpHost, phpPort, phpPath, phpObject) => {
if (typeof phpHost === 'undefined') {
throw new Error('phpHost was not defined');
}
if (typeof phpPort === 'undefined') {
throw new Error('phpPort was not defined');
}
if (typeof phpPath === 'undefined') {
phpPath = '/';
}
if (typeof phpObject !== 'object' ) {
phpObject = {};
}
return (req, res, next) => {
if (typeof req.php === 'undefined') {
req.php = {};
}
const options = {
hostname: phpHost, // change this to your php server host
port: phpPort, // change this with your php server port
path: phpPath, // change this with your php server path to script
method: 'POST',
headers: {
// here we send 'NODE-REQUEST', it will be available in php unde $_SERVER global prefixed with HTTP_ string because is a custom client request header.
'NODE-REQUEST': JSON.stringify(phpObject)
}
};
const isJSON = (str ) => {
try {
let j = JSON.parse(str);
return typeof j === 'object' && j !== null;
} catch (e) {
return false;
}
};
const httpModule = require('http');
let reqHttp = httpModule.request(options, (response) => {
if( typeof response.headers['node-response'] === 'undefined' || !isJSON(response.headers['node-response'])){
req.php = {};
}else{
req.php = JSON.parse(response.headers['node-response']);
}
// START - Remove this code when everything runs as expected
let dataStack = [];
response.on('data', (data)=>{
dataStack.push(data.toString());
})
response.on('end', ()=>{
console.log("PHP HEADERS", response.headers)
console.log('PHP OUTPUT', dataStack.join(''));
})
// END
next();
});
reqHttp.on('error', (e) => {
console.error(`problem with request to php server: ${e.message}`);
next();
});
reqHttp.on('end', () => {
next();
});
reqHttp.end();
}
}
exports.getFromPHP = getFromPHP;

Downloading files using php [duplicate]

I have a javascript app that sends ajax POST requests to a certain URL. Response might be a JSON string or it might be a file (as an attachment). I can easily detect Content-Type and Content-Disposition in my ajax call, but once I detect that the response contains a file, how do I offer the client to download it? I've read a number of similar threads here but none of them provide the answer I'm looking for.
Please, please, please do not post answers suggesting that I shouldn't use ajax for this or that I should redirect the browser, because none of this is an option. Using a plain HTML form is also not an option. What I do need is to show a download dialog to the client. Can this be done and how?
Don't give up so quickly, because this can be done (in modern browsers) using parts of the FileAPI:
var xhr = new XMLHttpRequest();
xhr.open('POST', url, true);
xhr.responseType = 'blob';
xhr.onload = function () {
if (this.status === 200) {
var blob = this.response;
var filename = "";
var disposition = xhr.getResponseHeader('Content-Disposition');
if (disposition && disposition.indexOf('attachment') !== -1) {
var filenameRegex = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/;
var matches = filenameRegex.exec(disposition);
if (matches != null && matches[1]) filename = matches[1].replace(/['"]/g, '');
}
if (typeof window.navigator.msSaveBlob !== 'undefined') {
// IE workaround for "HTML7007: One or more blob URLs were revoked by closing the blob for which they were created. These URLs will no longer resolve as the data backing the URL has been freed."
window.navigator.msSaveBlob(blob, filename);
} else {
var URL = window.URL || window.webkitURL;
var downloadUrl = URL.createObjectURL(blob);
if (filename) {
// use HTML5 a[download] attribute to specify filename
var a = document.createElement("a");
// safari doesn't support this yet
if (typeof a.download === 'undefined') {
window.location.href = downloadUrl;
} else {
a.href = downloadUrl;
a.download = filename;
document.body.appendChild(a);
a.click();
}
} else {
window.location.href = downloadUrl;
}
setTimeout(function () { URL.revokeObjectURL(downloadUrl); }, 100); // cleanup
}
}
};
xhr.setRequestHeader('Content-type', 'application/x-www-form-urlencoded');
xhr.send($.param(params, true));
Or if using jQuery.ajax:
$.ajax({
type: "POST",
url: url,
data: params,
xhrFields: {
responseType: 'blob' // to avoid binary data being mangled on charset conversion
},
success: function(blob, status, xhr) {
// check for a filename
var filename = "";
var disposition = xhr.getResponseHeader('Content-Disposition');
if (disposition && disposition.indexOf('attachment') !== -1) {
var filenameRegex = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/;
var matches = filenameRegex.exec(disposition);
if (matches != null && matches[1]) filename = matches[1].replace(/['"]/g, '');
}
if (typeof window.navigator.msSaveBlob !== 'undefined') {
// IE workaround for "HTML7007: One or more blob URLs were revoked by closing the blob for which they were created. These URLs will no longer resolve as the data backing the URL has been freed."
window.navigator.msSaveBlob(blob, filename);
} else {
var URL = window.URL || window.webkitURL;
var downloadUrl = URL.createObjectURL(blob);
if (filename) {
// use HTML5 a[download] attribute to specify filename
var a = document.createElement("a");
// safari doesn't support this yet
if (typeof a.download === 'undefined') {
window.location.href = downloadUrl;
} else {
a.href = downloadUrl;
a.download = filename;
document.body.appendChild(a);
a.click();
}
} else {
window.location.href = downloadUrl;
}
setTimeout(function () { URL.revokeObjectURL(downloadUrl); }, 100); // cleanup
}
}
});
Create a form, use the POST method, submit the form - there's no need for an iframe. When the server page responds to the request, write a response header for the mime type of the file, and it will present a download dialog - I've done this a number of times.
You want content-type of application/download - just search for how to provide a download for whatever language you're using.
I faced the same issue and successfully solved it. My use-case is this.
"Post JSON data to the server and receive an excel file.
That excel file is created by the server and returned as a response to the client. Download that response as a file with custom name in browser"
$("#my-button").on("click", function(){
// Data to post
data = {
ids: [1, 2, 3, 4, 5]
};
// Use XMLHttpRequest instead of Jquery $ajax
xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
var a;
if (xhttp.readyState === 4 && xhttp.status === 200) {
// Trick for making downloadable link
a = document.createElement('a');
a.href = window.URL.createObjectURL(xhttp.response);
// Give filename you wish to download
a.download = "test-file.xls";
a.style.display = 'none';
document.body.appendChild(a);
a.click();
}
};
// Post data to URL which handles post request
xhttp.open("POST", excelDownloadUrl);
xhttp.setRequestHeader("Content-Type", "application/json");
// You should set responseType as blob for binary responses
xhttp.responseType = 'blob';
xhttp.send(JSON.stringify(data));
});
The above snippet is just doing following
Posting an array as JSON to the server using XMLHttpRequest.
After fetching content as a blob(binary), we are creating a downloadable URL and attaching it to invisible "a" link then clicking it.
Here we need to carefully set few things at the server side. I set few headers in Python Django HttpResponse. You need to set them accordingly if you use other programming languages.
# In python django code
response = HttpResponse(file_content, content_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
Since I download xls(excel) here, I adjusted contentType to above one. You need to set it according to your file type. You can use this technique to download any kind of files.
What server-side language are you using? In my app I can easily download a file from an AJAX call by setting the correct headers in PHP's response:
Setting headers server-side
header("HTTP/1.1 200 OK");
header("Pragma: public");
header("Cache-Control: must-revalidate, post-check=0, pre-check=0");
// The optional second 'replace' parameter indicates whether the header
// should replace a previous similar header, or add a second header of
// the same type. By default it will replace, but if you pass in FALSE
// as the second argument you can force multiple headers of the same type.
header("Cache-Control: private", false);
header("Content-type: " . $mimeType);
// $strFileName is, of course, the filename of the file being downloaded.
// This won't have to be the same name as the actual file.
header("Content-Disposition: attachment; filename=\"{$strFileName}\"");
header("Content-Transfer-Encoding: binary");
header("Content-Length: " . mb_strlen($strFile));
// $strFile is a binary representation of the file that is being downloaded.
echo $strFile;
This will in fact 'redirect' the browser to this download page, but as #ahren alread said in his comment, it won't navigate away from the current page.
It's all about setting the correct headers so I'm sure you'll find a suitable solution for the server-side language you're using if it's not PHP.
Handling the response client side
Assuming you already know how to make an AJAX call, on the client side you execute an AJAX request to the server. The server then generates a link from where this file can be downloaded, e.g. the 'forward' URL where you want to point to.
For example, the server responds with:
{
status: 1, // ok
// unique one-time download token, not required of course
message: 'http://yourwebsite.com/getdownload/ska08912dsa'
}
When processing the response, you inject an iframe in your body and set the iframe's SRC to the URL you just received like this (using jQuery for the ease of this example):
$("body").append("<iframe src='" + data.message +
"' style='display: none;' ></iframe>");
If you've set the correct headers as shown above, the iframe will force a download dialog without navigating the browser away from the current page.
Note
Extra addition in relation to your question; I think it's best to always return JSON when requesting stuff with AJAX technology. After you've received the JSON response, you can then decide client-side what to do with it. Maybe, for example, later on you want the user to click a download link to the URL instead of forcing the download directly, in your current setup you would have to update both client and server-side to do so.
Here is how I got this working
https://stackoverflow.com/a/27563953/2845977
$.ajax({
url: '<URL_TO_FILE>',
success: function(data) {
var blob=new Blob([data]);
var link=document.createElement('a');
link.href=window.URL.createObjectURL(blob);
link.download="<FILENAME_TO_SAVE_WITH_EXTENSION>";
link.click();
}
});
Updated answer using download.js
$.ajax({
url: '<URL_TO_FILE>',
success: download.bind(true, "<FILENAME_TO_SAVE_WITH_EXTENSION>", "<FILE_MIME_TYPE>")
});
For those looking for a solution from an Angular perspective, this worked for me:
$http.post(
'url',
{},
{responseType: 'arraybuffer'}
).then(function (response) {
var headers = response.headers();
var blob = new Blob([response.data],{type:headers['content-type']});
var link = document.createElement('a');
link.href = window.URL.createObjectURL(blob);
link.download = "Filename";
link.click();
});
For those looking for a more modern approach, you can use the fetch API. The following code shows how to download a spreadsheet file.
fetch(url, {
body: JSON.stringify(data),
method: 'POST',
headers: {
'Content-Type': 'application/json; charset=utf-8'
},
})
.then(response => response.blob())
.then(response => {
const blob = new Blob([response], {type: 'application/application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'});
const downloadUrl = URL.createObjectURL(blob);
const a = document.createElement("a");
a.href = downloadUrl;
a.download = "file.xlsx";
document.body.appendChild(a);
a.click();
})
I believe this approach to be much easier to understand than other XMLHttpRequest solutions. Also, it has a similar syntax to the jQuery approach, without the need to add any additional libraries.
Of course, I would advise checking to which browser you are developing, since this new approach won't work on IE. You can find the full browser compatibility list on the following [link][1].
Important: In this example I am sending a JSON request to a server listening on the given url. This url must be set, on my example I am assuming you know this part. Also, consider the headers needed for your request to work. Since I am sending a JSON, I must add the Content-Type header and set it to application/json; charset=utf-8, as to let the server know the type of request it will receive.
I see you've already found out a solution, however I just wanted to add some information which may help someone trying to achieve the same thing with big POST requests.
I had the same issue a couple of weeks ago, indeed it isn't possible to achieve a "clean" download through AJAX, the Filament Group created a jQuery plugin which works exactly how you've already found out, it is called jQuery File Download however there is a downside to this technique.
If you're sending big requests through AJAX (say files +1MB) it will negatively impact responsiveness. In slow Internet connections you'll have to wait a lot until the request is sent and also wait for the file to download. It isn't like an instant "click" => "popup" => "download start". It's more like "click" => "wait until data is sent" => "wait for response" => "download start" which makes it appear the file double its size because you'll have to wait for the request to be sent through AJAX and get it back as a downloadable file.
If you're working with small file sizes <1MB you won't notice this. But as I discovered in my own app, for bigger file sizes it is almost unbearable.
My app allow users to export images dynamically generated, these images are sent through POST requests in base64 format to the server (it is the only possible way), then processed and sent back to users in form of .png, .jpg files, base64 strings for images +1MB are huge, this force users to wait more than necessary for the file to start downloading. In slow Internet connections it can be really annoying.
My solution for this was to temporary write the file to the server, once it is ready, dynamically generate a link to the file in form of a button which changes between "Please wait..." and "Download" states and at the same time, print the base64 image in a preview popup window so users can "right-click" and save it. This makes all the waiting time more bearable for users, and also speed things up.
Update Sep 30, 2014:
Months have passed since I posted this, finally I've found a better approach to speed things up when working with big base64 strings. I now store base64 strings into the database (using longtext or longblog fields), then I pass its record ID through the jQuery File Download, finally on the download script file I query the database using this ID to pull the base64 string and pass it through the download function.
Download Script Example:
<?php
// Record ID
$downloadID = (int)$_POST['id'];
// Query Data (this example uses CodeIgniter)
$data = $CI->MyQueries->GetDownload( $downloadID );
// base64 tags are replaced by [removed], so we strip them out
$base64 = base64_decode( preg_replace('#\[removed\]#', '', $data[0]->image) );
// This example is for base64 images
$imgsize = getimagesize( $base64 );
// Set content headers
header('Content-Disposition: attachment; filename="my-file.png"');
header('Content-type: '.$imgsize['mime']);
// Force download
echo $base64;
?>
I know this is way beyond what the OP asked, however I felt it would be good to update my answer with my findings. When I was searching for solutions to my problem, I read lots of "Download from AJAX POST data" threads which didn't give me the answer I was looking for, I hope this information helps someone looking to achieve something like this.
Here is my solution using a temporary hidden form.
//Create an hidden form
var form = $('<form>', {'method': 'POST', 'action': this.href}).hide();
//Add params
var params = { ...your params... };
$.each(params, function (k, v) {
form.append($('<input>', {'type': 'hidden', 'name': k, 'value': v}));
});
//Make it part of the document and submit
$('body').append(form);
form.submit();
//Clean up
form.remove();
Note that I massively use JQuery but you can do the same with native JS.
I want to point out some difficulties that arise when using the technique in the accepted answer, i.e. using a form post:
You can't set headers on the request. If your authentication schema involves headers, a Json-Web-Token passed in the Authorization header, you'll have to find other way to send it, for example as a query parameter.
You can't really tell when the request has finished. Well, you can use a cookie that gets set on response, as done by jquery.fileDownload, but it's FAR from perfect. It won't work for concurrent requests and it will break if a response never arrives.
If the server responds with a error, the user will be redirected to the error page.
You can only use the content types supported by a form. Which means you can't use JSON.
I ended up using the method of saving the file on S3 and sending a pre-signed URL to get the file.
As others have stated, you can create and submit a form to download via a POST request. However, you don't have to do this manually.
One really simple library for doing exactly this is jquery.redirect. It provides an API similar to the standard jQuery.post method:
$.redirect(url, [values, [method, [target]]])
This is a 3 years old question but I had the same problem today. I looked your edited solution but I think that it can sacrifice the performance because it has to make a double request. So if anyone needs another solution that doesn't imply to call the service twice then this is the way I did it:
<form id="export-csv-form" method="POST" action="/the/path/to/file">
<input type="hidden" name="anyValueToPassTheServer" value="">
</form>
This form is just used to call the service and avoid to use a window.location(). After that you just simply have to make a form submit from jquery in order to call the service and get the file. It's pretty simple but this way you can make a download using a POST. I now that this could be easier if the service you're calling is a GET, but that's not my case.
I used this FileSaver.js. In my case with csv files, i did this (in coffescript):
$.ajax
url: "url-to-server"
data: "data-to-send"
success: (csvData)->
blob = new Blob([csvData], { type: 'text/csv' })
saveAs(blob, "filename.csv")
I think for most complicated case, the data must be processed properly. Under the hood FileSaver.js implement the same approach of the answer of Jonathan Amend.
see: http://www.henryalgus.com/reading-binary-files-using-jquery-ajax/
it'll return a blob as a response, which can then be put into filesaver
Here is my solution, gathered from different sources:
Server side implementation :
String contentType = MediaType.APPLICATION_OCTET_STREAM_VALUE;
// Set headers
response.setHeader("content-disposition", "attachment; filename =" + fileName);
response.setContentType(contentType);
// Copy file to output stream
ServletOutputStream servletOutputStream = response.getOutputStream();
try (InputStream inputStream = new FileInputStream(file)) {
IOUtils.copy(inputStream, servletOutputStream);
} finally {
servletOutputStream.flush();
Utils.closeQuitely(servletOutputStream);
fileToDownload = null;
}
Client side implementation (using jquery):
$.ajax({
type: 'POST',
contentType: 'application/json',
url: <download file url>,
data: JSON.stringify(postObject),
error: function(XMLHttpRequest, textStatus, errorThrown) {
alert(errorThrown);
},
success: function(message, textStatus, response) {
var header = response.getResponseHeader('Content-Disposition');
var fileName = header.split("=")[1];
var blob = new Blob([message]);
var link = document.createElement('a');
link.href = window.URL.createObjectURL(blob);
link.download = fileName;
link.click();
}
});
Below is my solution for downloading multiple files depending on some list which consists of some ids and looking up in database, files will be determined and ready for download - if those exist.
I am calling C# MVC action for each file using Ajax.
And Yes, like others said, it is possible to do it in jQuery Ajax.
I did it with Ajax success and I am always sending response 200.
So, this is the key:
success: function (data, textStatus, xhr) {
And this is my code:
var i = 0;
var max = 0;
function DownloadMultipleFiles() {
if ($(".dataTables_scrollBody>tr.selected").length > 0) {
var list = [];
showPreloader();
$(".dataTables_scrollBody>tr.selected").each(function (e) {
var element = $(this);
var orderid = element.data("orderid");
var iscustom = element.data("iscustom");
var orderlineid = element.data("orderlineid");
var folderPath = "";
var fileName = "";
list.push({ orderId: orderid, isCustomOrderLine: iscustom, orderLineId: orderlineid, folderPath: folderPath, fileName: fileName });
});
i = 0;
max = list.length;
DownloadFile(list);
}
}
Then calling:
function DownloadFile(list) {
$.ajax({
url: '#Url.Action("OpenFile","OrderLines")',
type: "post",
data: list[i],
xhrFields: {
responseType: 'blob'
},
beforeSend: function (xhr) {
xhr.setRequestHeader("RequestVerificationToken",
$('input:hidden[name="__RequestVerificationToken"]').val());
},
success: function (data, textStatus, xhr) {
// check for a filename
var filename = "";
var disposition = xhr.getResponseHeader('Content-Disposition');
if (disposition && disposition.indexOf('attachment') !== -1) {
var filenameRegex = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/;
var matches = filenameRegex.exec(disposition);
if (matches != null && matches[1]) filename = matches[1].replace(/['"]/g, '');
var a = document.createElement('a');
var url = window.URL.createObjectURL(data);
a.href = url;
a.download = filename;
document.body.append(a);
a.click();
a.remove();
window.URL.revokeObjectURL(url);
}
else {
getErrorToastMessage("Production file for order line " + list[i].orderLineId + " does not exist");
}
i = i + 1;
if (i < max) {
DownloadFile(list);
}
},
error: function (XMLHttpRequest, textStatus, errorThrown) {
},
complete: function () {
if(i===max)
hidePreloader();
}
});
}
C# MVC:
[HttpPost]
[ValidateAntiForgeryToken]
public IActionResult OpenFile(OrderLineSimpleModel model)
{
byte[] file = null;
try
{
if (model != null)
{
//code for getting file from api - part is missing here as not important for this example
file = apiHandler.Get<byte[]>(downloadApiUrl, token);
var contentDispositionHeader = new System.Net.Mime.ContentDisposition
{
Inline = true,
FileName = fileName
};
// Response.Headers.Add("Content-Disposition", contentDispositionHeader.ToString() + "; attachment");
Response.Headers.Add("Content-Type", "application/pdf");
Response.Headers.Add("Content-Disposition", "attachment; filename=" + fileName);
Response.Headers.Add("Content-Transfer-Encoding", "binary");
Response.Headers.Add("Content-Length", file.Length.ToString());
}
}
catch (Exception ex)
{
this.logger.LogError(ex, "Error getting pdf", null);
return Ok();
}
return File(file, System.Net.Mime.MediaTypeNames.Application.Pdf);
}
As long as you return response 200, success in Ajax can work with it, you can check if file actually exist or not as the line below in this case would be false and you can inform user about that:
if (disposition && disposition.indexOf('attachment') !== -1) {
To get Jonathan Amends answer to work in Edge I made the following changes:
var blob = typeof File === 'function'
? new File([this.response], filename, { type: type })
: new Blob([this.response], { type: type });
to this
var f = typeof File+"";
var blob = f === 'function' && Modernizr.fileapi
? new File([this.response], filename, { type: type })
: new Blob([this.response], { type: type });
I would rather have posted this as a comment but I don't have enough reputation for that
there is another solution to download a web page in ajax. But I am referring to a page that must first be processed and then downloaded.
First you need to separate the page processing from the results download.
1) Only the page calculations are made in the ajax call.
$.post("CalculusPage.php", { calculusFunction: true, ID: 29, data1: "a", data2: "b" },
function(data, status)
{
if (status == "success")
{
/* 2) In the answer the page that uses the previous calculations is downloaded. For example, this can be a page that prints the results of a table calculated in the ajax call. */
window.location.href = DownloadPage.php+"?ID="+29;
}
}
);
// For example: in the CalculusPage.php
if ( !empty($_POST["calculusFunction"]) )
{
$ID = $_POST["ID"];
$query = "INSERT INTO ExamplePage (data1, data2) VALUES ('".$_POST["data1"]."', '".$_POST["data2"]."') WHERE id = ".$ID;
...
}
// For example: in the DownloadPage.php
$ID = $_GET["ID"];
$sede = "SELECT * FROM ExamplePage WHERE id = ".$ID;
...
$filename="Export_Data.xls";
header("Content-Type: application/vnd.ms-excel");
header("Content-Disposition: inline; filename=$filename");
...
I hope this solution can be useful for many, as it was for me.
If response is an Array Buffer, try this under onsuccess event in Ajax:
if (event.data instanceof ArrayBuffer) {
var binary = '';
var bytes = new Uint8Array(event.data);
for (var i = 0; i < bytes.byteLength; i++) {
binary += String.fromCharCode(bytes[i])
}
$("#some_id").append("<li><img src=\"data:image/png;base64," + window.btoa(binary) + "\"/></span></li>");
return;
}
where event.data is response received in success function of xhr event.
I needed a similar solution to #alain-cruz's one, but in nuxt/vue with multiple downloads. I know browsers block multiple file downloads, and I also have API which returns a set of csv formatted data.I was going to use JSZip at first but I needed IE support so here is my solution. If anyone can help me improve this that would be great, but it's working for me so far.
API returns:
data : {
body: {
fileOne: ""col1", "col2", "datarow1.1", "datarow1.2"...so on",
fileTwo: ""col1", "col2"..."
}
}
page.vue:
<template>
<b-link #click.prevent="handleFileExport">Export<b-link>
</template>
export default = {
data() {
return {
fileNames: ['fileOne', 'fileTwo'],
}
},
computed: {
...mapState({
fileOne: (state) => state.exportFile.fileOne,
fileTwo: (state) => state.exportFile.fileTwo,
}),
},
method: {
handleExport() {
//exportFileAction in store/exportFile needs to return promise
this.$store.dispatch('exportFile/exportFileAction', paramsToSend)
.then(async (response) => {
const downloadPrep = this.fileNames.map(async (fileName) => {
// using lodash to get computed data by the file name
const currentData = await _.get(this, `${fileName}`);
const currentFileName = fileName;
return { currentData, currentFileName };
});
const response = await Promise.all(downloadPrep);
return response;
})
.then(async (data) => {
data.forEach(({ currentData, currentFileName }) => {
this.forceFileDownload(currentData, currentFileName);
});
})
.catch(console.error);
},
forceFileDownload(data, fileName) {
const url = window.URL
.createObjectURL(new Blob([data], { type: 'text/csv;charset=utf-8;' }));
const link = document.createElement('a');
link.href = url;
link.setAttribute('download', `${fileName}.csv`);
document.body.appendChild(link);
link.click();
},
}
I used Naren Yellavula's solution and got it working with few changes to the script, after trying several other solutions using jquery. But, jquery will not download a zip file properly. I can't unzip the file after download.
In my use case, I have to upload a zip file, which is unzipped in the Servlet, files are processed and zipped again before the zip file is downloaded to the client. This is what you need to do on client side.
$('#fileUpBtn').click(function (e){
e.preventDefault();
var file = $('#fileUpload')[0].files[0];
var formdata = new FormData();
formdata.append('file', file);
// Use XMLHttpRequest instead of Jquery $ajax to download zip files
xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (xhttp.readyState === 4 && xhttp.status === 200) {
var a = document.createElement('a');
a.href = window.URL.createObjectURL(xhttp.response);
a.download = "modified_" + file.name;
a.style.display = 'none';
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
window.URL.revokeObjectURL(a.href);
}
};
xhttp.open("POST", "<URL to Servlet>", true);
xhttp.responseType = 'blob';
xhttp.send(formdata);
});
<div class="form-group">
<label id="fileUpLabel" for="fileUpload"></label>
<input type="file" class="form-control" id="fileUpload" name="file" accept="" required/>
</div>
<button class="btn" type="submit" id="fileUpBtn"></button>

REST server responds with javascript browser error (HTML) instead of json

I'm using Node.js (on AWS Lambda for alexa skill) to request my web server for a JSON file. But my server responds with a 'Javascript' not supported error html.
This is my code:
function httpsGet(myData, callback) {
// Update these options with the details of the web service you would like to call
var options = {
host: 'alexa0788.byethost24.com',
port: 80,
path: '/sample.php',
method: 'GET',
};
var req = http.request(options, res => {
res.setEncoding('utf8');
var returnData = "";
res.on('data', chunk => {
returnData = returnData + chunk;
});
res.on('end', () => {
console.log(returnData);
var pop = JSON.parse(returnData).firstName + JSON.parse(returnData).lastName;
callback(pop); // this will execute whatever function the caller defined, with one argument
});
});
req.end();
}
How can I make my server respond with the intended json and not force the client to support javascript? At the moment, I'm having a php file output the json. I tried calling a .json file too directly, instead of making a php file output json, but I see the same error.

FileTransfer is not sending with parameters

I am using Appgyver to try and upload an image to my custom rest server using the built in Cordova FileTransfer plugin.
(ON ANDROID) Pretty much this is all of my code for trying to upload an image to a REST API. I can get the image and display it AND worse, it actually succeeds and fires the callback (and tells me how many bytes are being sent correctly), but for some reason the server is not getting any parameters with the file. It is currently returning an empty array/object as the list of parameters it's receiving.
(ON IOS) Nothing happens as far as callbacks and I have no idea if the server is receiving anything.
I am using Laravel 4.2 with the Dingo API, Intervention Image, and JWT-Auth plugins.
Server code
$scope.addPost = function(title, price, description, id, uri)
{
var fileURL = uri;
//$scope.encoded = toInternalURL(uri);
var options = new FileUploadOptions();
options.fileKey = "file";
options.fileName = fileURL.substr(fileURL.lastIndexOf('/') + 1);
options.httpMethod = "POST";
//options.mimeType = "image/png";
options.chunkedMode = false;
//options.trustAllHosts = true;
var myToken = JSON.parse(localStorage.getItem("token"));
var headers =
{
'Authorization': 'Bearer ' + myToken + '',
'Content-Type': 'multipart/form-data'
};
options.headers = headers;
var params =
{
item_title: title,
item_price: price,
item_description: description,
category_id: id
};
options.params = serialize(params);
var ft = new FileTransfer();
// name of api is redacted
ft.upload(fileURL, encodeURI("name of api"), $scope.success, $scope.fail, options);
};
$scope.success = function (r)
{
supersonic.logger.log("Code = " + r.responseCode);
supersonic.logger.log(r.response);
//$scope.encoded = r.response;
supersonic.logger.log("Sent = " + r.bytesSent);
$scope.encoded = "SUCCEED";
};
$scope.fail = function (error)
{
$scope.encoded = error.body.toString();
//supersonic.logger.log(error.body.toString());
$scope.encoded = "FAIL";
};
Server Code
public function store() {
return Input::all();
}
The function is so simple because when I use the intervention syntax Image::make('file')..... there is no input called file, and therefore just returns nothing.
Any help would be greatly appreciated.

Can't send post to PHP script from nodejs and receive echo

I got the following code from here
I've tried this code with variable data being equal to 'test=yes' or with data being equal to {test:'yes'}
Here is the php script
if(isset($_POST['test'])){
error_log("inside");
echo "and I'd also like to return this";
};
And here is the nodejs piece of code:
function postToPHP(data, path){
var httpreq = require('http');
var querystring = require("querystring");
var data = querystring.stringify(data);
var options = {
host : 'localhost',
path : 'www' + path, //path is well defined in the actual code
method : 'POST',
headers : {
'Content-Type' : 'application/x-www-form-urlencoded',
'Content-Length' : data.length
}
};
var buffer = "";
var reqPost = httpreq.request(options, function(res) {
res.on('data', function(d) {
buffer = buffer+data;
});
res.on('end', function() {
return buffer;
});
});
reqPost.write(data);
reqPost.end();
}
And the call of postToPhp
//treat message?
var message = "test=yes";
//OR
var message = "{test:'yes'}";
var buffer = postToPHP(message,"path");
console.log("buffer from PHP",buffer);
buffer is undefined
Nothing is shown in the error log and I assume something is not working with the code that I can't figure so I hope someone can help me figure this one out.

Categories