I'm using Node.js (on AWS Lambda for alexa skill) to request my web server for a JSON file. But my server responds with a 'Javascript' not supported error html.
This is my code:
function httpsGet(myData, callback) {
// Update these options with the details of the web service you would like to call
var options = {
host: 'alexa0788.byethost24.com',
port: 80,
path: '/sample.php',
method: 'GET',
};
var req = http.request(options, res => {
res.setEncoding('utf8');
var returnData = "";
res.on('data', chunk => {
returnData = returnData + chunk;
});
res.on('end', () => {
console.log(returnData);
var pop = JSON.parse(returnData).firstName + JSON.parse(returnData).lastName;
callback(pop); // this will execute whatever function the caller defined, with one argument
});
});
req.end();
}
How can I make my server respond with the intended json and not force the client to support javascript? At the moment, I'm having a php file output the json. I tried calling a .json file too directly, instead of making a php file output json, but I see the same error.
Related
I have a chat application created using flutter dart and php mysql as backend and also node js for socket.io
So what i want is if i send a message via flutter dart, i want to encrypt it and send to node js via socket.io which will send it to the other client's flutter app and decrypt it there for the user to see and the node.js will also send it to my php script via json format and stores it in my database via post request. Also I will soon create a website where those messages will be decryted and displayed on the user browser and when i send message using the browser i also want to encrypt it and store in the database so that both flutter and web user can see the decrypted format.
const https = require("https");
const http = require("http");
const qs = require("querystring");
function send_to_db(msg) {
console.log(msg);
var postData = qs.stringify(msg);
var options = {
hostname: "*****.com",
port: 443,
path: "/src/chats/post.php",
method: "POST",
rejectUnauthorized: true,
headers: {
"Content-Type": "application/x-www-form-urlencoded",
"Content-Length": postData.length,
},
checkServerIdentity: function (host, cert) {
return undefined;
},
};
var buffer = "";
var req = https.request(options, (res) => {
res.on("data", function (chunk) {
buffer += chunk;
});
res.on("end", function () {
console.log(buffer);
});
});
req.on("error", (e) => {
console.error(e);
});
req.write(postData);
req.end();
return buffer;
}
Right now is am only encrypting and decrypting using php and am afraid of man in the middle attack since my data can be intercepted from flutter app to node.js before reaching my php side
Please what do I do to achieve this
I'm trying to download a phar (PHP Archive) with my program in node.js. After some testing, I've discovered that the files are not the same when downloaded from a browser and from node.js.
I've tried a few things:
"binary" encoding
"utf8" encoding
None of theses works.
Do anyone know which encoding should I use to download a phar?
My code:
exports.download = function(urlStr, dest, cb) { // urlStr is the url link of the phar, dest is the destination file, and cb is the callback.
var options = {
headers: {
"User-Agent": "PSM (Pocketmine Server Manager, https://psm.mcpe.fun) User Requester"
}
}
var data = "";
var url = new URL(urlStr);
options.hostname = url.hostname;
options.path = url.pathname;
var request = http.get(options, function(response) {
// check if response is success
if (response.statusCode == 302 || response.statusCode == 301) {
exports.download(response.headers["location"], dest, cb);
return;
}
response.on("data", function(chunk) {
data += chunk.toString("binary");
})
response.on('end', function() {
fs.writeFileSync(dest, data, "binary");
cb();
});
}).on('error', function(err) { // Handle errors
fs.unlink(dest); // Delete the file async. (But we don't check the result)
if (cb) cb(err.message);
});
};
Phar used (valid): https://psm.mcpe.fun/download/PSMCore/1.1.phar
I noticed a few issues in the original code:
The main issue is that the binary data is being implicitly converted to a UTF-8 string, which will not preserve the content. Keep the data in Buffer form or just pipe the response to disk using streams instead of buffering the entire response in memory first.
When using asynchronous callbacks in node, it is by convention that you pass an actual Error object as the first argument instead of a string. Many times these objects provide much more information either not contained in the message itself, or is not as easily parseable from the error message (e.g. stack trace, libuv error code, contextual information such as http URIs, file paths, hostnames, etc.).
No 200 status code is being checked before saving the response to disk. You could end up saving error html pages (e.g. 400, 404, etc.) to disk instead of what you're expecting. You should also check that the Content-Type header is what you're expecting to further ensure the response is what you think it is.
An example with the first two items fixed is:
var res;
function onError(err) {
if (res) {
res.resume(); // Ensure response is drained
res = null;
}
if (cb) {
fs.unlink(dest, function() {});
cb(err);
cb = null;
}
}
http.get(options, function(response) {
// Check if response is success
if (response.statusCode === 302 || response.statusCode === 301) {
exports.download(response.headers["location"], dest, cb);
res.resume(); // Ensure response is drained
return;
}
res = response;
var out = fs.createWriteStream(dest);
out.on('error', onError);
response.pipe(out).on('close', function() {
if (cb) {
cb();
cb = null;
}
});
}).on('error', onError);
I'm working on an AWS Lambda function in Node.js to call a PHP
URL
that will return a JSON encoded response so that Alexa (Amazon Dot, etc.) can reply to a user query. I've got all my intents set up properly and Alexa (online test) replies to the question, but the tag contains "Unchanged" (see code), indicating it isn't getting anything from the http.get() function in Node.js.
Here's the function to get the text that Alexa should speak:
function getData(mypath) {
var http = require('http');
var options = {
host: 'www.gypsysticks.com',
port: 80,
path: mypath
};
var mydata = "Unchanged";
http.get(options, function(res) {
res.on("response", function(chunk) {
mydata = chunk;
});
});
return mydata;
}
I'm building this "Skill" for Alexa for my band, Gypsy Sticks. Currently the url and path points to www.gypsysticks.com/echo/tonight.php which is supposed to return the location and time of the show for tonight. Right now I've just got it returning a JSON string with { "response" : "Test Success" }
I'm not familiar with Node.js, or the http.get() function.
Problem:
Modifying a Gzipped HTML response gives a white-screen when using node-http-proxy to proxy requests to a vagrant box.
What I have so far:
My method of intercepting the requests & modifying the html (seen below) is working well for all requests that are NOT Gzipped. (I've tested using node, ruby, PHP & apache servers).
The confusing part:
I have a test suite that starts the proxy & makes a request to it. If I console.log the response I can clearly see that the HTML has been modified - it's just that it won't display in the browser at all...
The proxy setup
var server = httpProxy.createServer(function (req, res, proxy) {
var next = function () {
proxy.proxyRequest(req, res, {
host: "172.22.22.22",
port: 80,
changeOrigin: true
});
};
var write = res.write;
res.write = function (string, encoding) {
var body = string instanceof Buffer ? string.toString() : string;
body = body.replace(/<\/body>/, function (w) {
return "<script>console.log('injected')</script>\n" + w;
});
if (string instanceof Buffer) {
string = new Buffer(body);
} else {
string = body;
}
write.call(res, string, encoding);
};
next();
}).listen(3002);
// Remove content-length, defaults to 'chunked'
server.proxy.on("proxyResponse", function (req, res, response) {
if (response.headers.hasOwnProperty("content-length")) {
delete response.headers["content-length"];
}
});
My test case showing correctly modified HTML
it("can modify the html response", function (done) {
var data;
http.get(proxyHost, function (res) {
var chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk.toString());
});
res.on("end", function () {
data = chunks.join("");
console.log(data); // snippet can be seen in this response
done();
});
});
});
Any ideas?
I have solved this problem.
Can't upload the source, but I can give you some hints on this.
use server.proxy.on("proxyResponse" to check content-encoding
do nothing but gather chunk on res.write
on res.end, zlib.unzip gathered chunks if content-encoding was "gzip" then oldWrite.call(res,decoded_string), oldEnd.apply(res,arguments)
if you want to change something do it while #3
Hope this help you out.
I'm trying to send a post request but It is not sending. I dont get output in the console log of the browser.
My node server.js is running in x.x.x.x:8000 then I connect it with my client.html. x.x.x.x:8000/client.html
Here is my node.js server.
function handler (req, res) {
var filepath = '.' + req.url;
if (filepath == './')
filepath = './client.html';
var extname = path.extname(filepath);
var contentType = 'text/html';
switch(extname){
case '.js':
contentType = 'text/javascript';
break;
case '.css':
contentType = 'text/css';
break;
}
path.exists(filepath, function (exists){
if (exists){
fs.readFile(filepath, function(error, content){
if(error){
res.writeHead(500);
res.end();
}
else{
res.writeHead(200, { 'Content-Type': contentType });
res.end(content, 'utf-8');
}
});
}
else{
res.writeHead(404);
res.end();
}
});
}
JAVASCRIPT CODE - I'm using ajax call and sending request to COMMAND.php
$.post(
'/var/www/COMMAND.php',
{ cmd: "OUT" },
function( data ){
console.log(data);
});
PHP COMMAND.php - This writes to the the named pipe in linux. When it is done writing it echo success.
<?php
if ($_POST['cmd'] === 'OUT') {
$con = fopen("/tmp/myFIFO", "w");
fwrite($con, "OUT\n");
fclose($con);
echo "SUCCESS";
exit;
}
?>
Why is it not sending any post requests to COMMAND.php? Is there any way for me to call COMMAND.php and execute the commands in it?
Because NodeJS runs JS, not PHP. Also, unlike Apache which has a built-in file handling, in NodeJS, you need to build code or use an existing library to route your urls to files.
As for your question, it's either you:
Setup another server to execute that PHP. Your AJAX is calling to your NodeJS server. You could route that request from NodeJS to your PHP server (Apache or whatever), basically making NodeJS act like a proxy.
Or create code in JavaScript for NodeJS that runs a similar routine as your PHP script, and you won't need PHP or another server anymore.