I use php(laravel 5.8) to broadcast, and I use laravel-echo-server, it's work. But!!! recent I need to something by my self. And I write a socket service by ndoejs. This is my code
const jwt = require('jsonwebtoken');
const server = require('http').Server();
const io = require('socket.io')(server);
...
// ==== The question is here =====================
const Redis = require('ioredis');
const redis = new Redis({
port: REDIS_PORT,
host: REDIS_HOST,
password: REDIS_PASSWORD
});
redis.psubscribe('myTestChannel.*');
redis.on('pmessage', function(pattern, channel, message) {
console.log(channel, message);
const object_message = JSON.parse(message);
io.sockets.emit(channel, object_message.data);
});
// ==== The question is here =====================
io.sockets.use(function (socket, next) {
if (socket.handshake.query && socket.handshake.query.token){
// auth
} else {
next(new Error('Authentication error'));
}
}).on('connection', function(socket) {
console.log('==========connection============');
console.log('Socket Connect with ID: ' + socket.id);
socket.on('join', (data) => {
... do something
});
socket.on('disconnect', () => {
... do something
})
});
server.listen(LISTEN_PORT, function () {
console.log(`Start listen ${LISTEN_PORT} port`);
});
It's work, but running a long time, my php get a error message, phpredis read error on connection. I'm not sure the real reason. But I guess is about my socket, because I use laravel-echo-server is great.
I'm not sure my redis.psubscribe's position is right? Maybe this cause a long time connection and cause php read error on connection?
I should move the redis.psubscribe into on('connection') and unsubscribe when disconnection?
I want to know the redis.psubscribe is the main cause the problem? Thanks your help.
Related
please help me! sorry I don't know much english. I don't understand because my server was running for a while, then there was a problem. I have tried everything but redis won't work as I want.
Log:
Shutdown uncaughtException ReplyError: Ready check failed: NOAUTH Authentication required.
at parseError (/usr/src/app/node_modules/socket.io-redis/node_modules/redis-parser/lib/parser.js:179:12)
at parseType (/usr/src/app/node_modules/socket.io-redis/node_modules/redis-parser/lib/parser.js:302:14) {
command: 'INFO',
code: 'NOAUTH'
}
my code:
'use strict';
const redis = require('redis');
const config = require('../config');
exports.getClient = function (storeDb) {
const store = redis.createClient({
host: config.REDIS_HOST,
port: config.REDIS_PORT,
auth_pass: null
});
store.select(storeDb);
store.on('error', function (err) {
console.error('Redis error: ' + err);
});
process.on('exit', function (code) {
console.log('Exit with code:', code);
// store.flushdb(); //TODO Why we need this 1?
});
return store;
};
I am tried to use socket io on the live server and I got this error.
polling-xhr.js:264 GET http://sub.domain.com:3000/socket.io/?EIO=3&transport=polling&t=MFUVMS5 net::ERR_TIMED_OUT
But on my local server, the files worked perfectly. I am working with socket.io and PHP.
Here are my codes:
server.js
var socket = require('./node_modules/socket.io');
var express = require('./node_modules/express');
var app = express();
var server = require('http').createServer(app);
var io = socket.listen(server);
var port = process.env.PORT || 3000;
// server active console view
server.listen(port, function () {
console.log('Server listening at port %d', port);
});
io.on('connection', function (socket) {
// show new added online user
socket.on('now_online', function (data) {
io.sockets.emit('now_online',{
id: data.id,
name: data.name
});
});
});
main.js
var socket = io.connect('http://'+window.location.hostname+':3000');
socket.on('new_online_user', function (data) {
if (login_id != data.online_user) {
$('#contacts-list .contact[data-chat='+data.online_user+']'+' .contact-status').addClass('online');
}
});
package.json
{
"private": true,
"dependencies": {
"gulp": "^3.9.1",
"express": "^4.16.2",
"socket.io": "^2.0.4"
}
}
I was searching in google and StackOverflow about this issue but those solved didn't work for me.
Thanks so much.
Try connecting using only domain without http like:
var socket = io.connect(window.location.hostname+':3000', {transports: ["websocket", "xhr-polling", "htmlfile", "jsonp-polling"]});
It will automatically become ws://sub.domain.com:3000/socket.io/?EIO=3&transport=polling&t=MFUVMS5. I'm using similar to this and working fine.
I'm using laravel 5.3 + passport for authorization, Laravel is my back-end API which is restful.
front-end is written in angular.js which communicate with API with rest requests.
For Real-time notifications, I've used laravel broadcasting events + redis, and socket.io for socket server and socket client in angular.js.
I want to authorize these events and I've done it far as I could :
BroadcastServiceProvider :
public function boot()
{
Broadcast::routes(['middleware' => ['auth:api']]);
Broadcast::channel('App.User.*', function ($user, $userId)
{
return (int) $user->id === (int) $userId;
});
Broadcast::channel('notifs.*', function ($user, $userId) {
return $user->id === (int) $userId;
});
}
This is my socket.js code which runs my socket server :
var app = require('express')();
var http = require('http').Server(app);
var io = require('socket.io')(http);
var Redis = require('ioredis');
var redis = new Redis();
redis.psubscribe('*', function(err, count) {});
redis.on('pmessage', function(subscribed, channel, message) {
console.log(channel);
message = JSON.parse(message);
io.emit(channel + ':' + message.event, message.data);
});
http.listen(3000, function () {
console.log('Listening on Port 3000');
});
redis.on("error", function (err) {
console.log(err);
});
The problem is I don't know how to authenticate these broadcasting events in socket server and also how to authorize the user in angular.js (SPA) to listen to these events.
I'd appreciate any help.
I'd definitely take a look at socketio-auth.
This module provides hooks to implement authentication in socket.io
without using querystrings to send credentials, which is not a good
security practice.
Another approach I took recently was simple token based authentication using JWT tokens (njwt).
I did not want to recreate the authentication code that checks user credentials within Node.js. (which in my case can't even connect to the database anyway). Rather, I'd let the PHP application that was using the socket leverage its already established authentication system. Passing a signed token with the socket connect requests.
Your node.JS code might look something like...
primus.on('connection', function (spark) {
logger.debug('primus event connection. spark id: ' + spark.id);
spark.on('data', function(data) {
var action = data.action;
njwt.verify(data.token, JWT_SECRET, function(err, verifiedJwt) {
if (err) {
logger.warn('Bad JWT Token! ' + spark.id + ' Error: ' + err);
spark.user = {id:null, is_authed: false, is_admin: false, application_ini: null};
spark.end('Bad Token Request');
return; //->
}
spark.user = { 'id': verifiedJwt.body['user_id'],
'is_authed': verifiedJwt.body['is_authed'],
'application_ini': verifiedJwt.body['application_ini'],
'is_admin': verifiedJwt.body['is_admin']};
sockoasRooms.connect(spark.id, spark.user.application_ini, spark.user.id);
switch (action) {
...
And then on the PHP side, you'll need some code for generating the JWT tokens, but use is very simple. Something like:
<?php
$tokenPayload = [ 'user_id' => ($this->currentUser) ? $this->currentUser->getId() : 0,
'is_authed' => ($this->currentUser) ? true : false,
'application_ini' => (string) APPLICATION_INI,
'is_admin' => (bool) ($this->currentUser) ? $this->currentUser->isAdministrator() : false,
'now' => time()
];
$jwtToken = \OAS_JWT::encode($tokenPayload, SOCK_OAS_JWT_KEY);
?>
$(document).ready(function() {
primus = Primus.connect('ws://<?=SOCK_OAS_IP?>:<?=SOCK_OAS_PORT?>/_admin');
primus.on('open', function () {
showConnected();
// Send request to subscribe
primus.write({action: 'dashboard-dump', token: '<?=$jwtToken?>'});
consoleWrite('Connected to dashboard.');
});
You can evaluate the time component to avoid replay attacks. Anyway, sounds like this approach might meet your needs.
Off topic but I'd also suggest taking a look at primus. It acts as a "universal wrapper for real-time frameworks". This lets you abstract things in a way so you could swap out the socket libraries with no hassle. Might be a little lower level (engine.IO) than what you are using though.
I am developing a website which uses a private messaging system using php + socket.io.
From the beginning i passed the sender_id, recipient_id and text to socket.io using socket.emit but later realized that this could be easily tampered with and wanted to use my php sessions in some way to be sure that the sender_id is indeed the sender_id.
I have the following setup right now but i dont really understand how to pass the session from index.php to app.js and then connect to redis-server in app.js to get the PHPSESSID which holds the user_id.
Server 1 running nginx + php-fpm (index.php)
Server 2 running node.js with socket.io (app.js)
Server 3 running redis for session management
My code right now looks like the following but is obviously missing the redis part right now which i would really appriciate some help with.
Thanks!
index.php
<?php
session_start();
if ($_SESSION['user_id'] == false){
header("Location:login.php");die;
}
?>
<script>
var socket = io('https://app01.dev.domain.com:8895');
socket.on('connect', function(){
console.log("Connected to websockets");
});
socket.on('event', function(data){});
socket.on('disconnect', function(){});
$('.chat-message').keypress(function (e) {
if (e.which == 13) {
console.log("send message");
var friend_id = $(this).attr('id');
friend_id = friend_id.split("-");
friend_id = friend_id[3];
var obj = {
recipient_id: friend_id,
text: $(this).val()
};
socket.emit('chat_message', obj);
$(this).val('');
return false;
}
});
</script>
app.js
var https = require("https"), fs = require("fs");
var options = {
key: fs.readFileSync('/etc/letsencrypt/live/domain/privkey.pem'),
cert: fs.readFileSync('/etc/letsencrypt/live/domain/cert.pem'),
ca: fs.readFileSync('/etc/letsencrypt/live/domain/chain.pem')
};
var app = https.createServer(options);
var io = require("socket.io")(app);
var redis = require("redis");
// This i want to fill with for example PHPSESSION:user_id that i get from redis and later use it as sender
// var all_clients = {};
io.set("transports", ["websocket", "polling"]);
io.on("connection", function(client){
console.log("Client connected");
// Here i would like to connect to redis in some way and get the user_id but dont really understand how
//all_clients[USER_ID_FROM_REDIS] = client.id;
//var user_id = USER_ID_FROM_REDIS;
client.on("chat_message", function(data){
var obj = {
to: data.recipient_id,
text: data.text
};
console.log("Message inbound from socket: "+client.id+" from: "+data.user_id+" to: "+data.recipient_id+" with text: "+data.text);
});
client.on("disconnect", function(){
console.log("Client disconnected ");
//delete all_clients[USER_ID_FROM_REDIS];
});
});
app.listen(8895, function(){
console.log("listening on *:8895");
});
var recursive = function () {
//console.log("Connected clients: "+Object.keys(all_clients).length);
//console.log(JSON.stringify(all_clients));
setTimeout(recursive,2000);
}
recursive();
HTTP in itself does not protect against MITM attacks, to protect against MITM the server certificate needs to be pined.
To protect against a user being spoofed you need authentication such as logging-in or a secret token like Dropbox.
Add certificate pinning, that is just jargon for validating that you are connecting to the correct server and not a MITM by verifying the certificate that is sent by the server. MITM used to be harder but WiFi has made it easy to connect to the wrong end-point at Hot Sports, even at home I have seen this.
My application stack:
On my server runs a Redis server. The PHP backend communicates with Predis library with the Redis server. It will publish messages. These messages will be fetched by my Redis client (node.js) and pushed to the connected websocket clients (with SockJS).
My problem:
It runs well. At least for broadcast messages. Now I came to the point I need to send a unicast message and I'm stuck... How to connect the user on the backend side (sender of messages) with the connected client of the websocket?
Code snippets:
PHP
$redis = new Client();
$redis->publish('updates', Random::getUniqueString());
Redis client on node.js server
redis.subscribe('updates');
redis.on('message', function(channel, data) {
for (var id in sockets) {
if (sockets.hasOwnProperty(id)) {
sockets[id].write(data);
}
}
});
SockJS client
mySocketFactory.setHandler('message', function(event) {
console.log(event.data);
});
Like I said. Working well but the id used for the socket connection is not known by the PHP backend.
Edit: One idea I got in mind is to use cookies.
I found a way to solve my problem. When the socket connection is established I sent a request to my PHP backend and ask for the user id. This is stored on the node.js server. When messages are incoming there is a check if they are for specific user and handle them only for them.
So, what do I store exactly on my node server?
var sockets = {}; // {connection_id: socket_connection}
var connIdToUser = {}; // {connection_id: user_id}
var connIdsForUser = {}; // {user_id: [connection_id_1, connection_id_2 ,...]}
socketServer.on('connection', function(conn) {
sockets[conn.id] = conn;
var options = {
host: os.hostname(),
port: 80,
path: '/user/id',
method: 'GET'
};
var req = http.request(options, function(res) {
res.setEncoding('utf8');
res.on('data', function (chunk) {
var userId = JSON.parse(chunk).id;
connIdToUser[conn.id] = userId;
if (!connIdsForUser.hasOwnProperty(userId)) {
connIdsForUser[userId] = [];
}
connIdsForUser[userId].push(conn.id);
console.log('connection id ' + conn.id + ' related to user id ' + userId);
});
});
req.end();
conn.on('close', function() {
console.log('connection lost ' + conn.id);
// remove connection id from stack for user
var connections = connIdsForUser[connIdToUser[conn.id]];
var index = connections.indexOf(conn.id);
if (index > -1) {
connections.splice(index, 1);
}
// remove connection at all
delete sockets[conn.id];
// remove relation between connection id and user
delete connIdToUser[conn.id];
});
});
The reason for storing the relation between user id an connection id twice is the different use case I need either for sending a message or deleting the connection for the close event. Otherwise I would have to use a nested loop.
As you can see deleting a socket is fairly easy. Although deleting the connection from the connection stack of an user is a little bit complicated.
Let's continue with the sending of a message. Here I defined a structure of the message I get from the Redis server:
{
targets: [], // array of unit ids (can be empty)
data: <mixed> // the real data
}
Sending the data to the sockets looks like:
redis.on('message', function(channel, message) {
message = JSON.parse(message);
// unicast/multicast
if (message.targets.length > 0) {
message.targets.forEach(function(userId) {
if (connIdsForUser[userId] !== undefined) {
connIdsForUser[userId].forEach(function(connId) {
sockets[connId].write(message.data);
});
}
});
// broadcast
} else {
for (var id in sockets) {
if (sockets.hasOwnProperty(id)) {
sockets[id].write(message.data);
}
}
}
});
Since I store the connection stack per user it is quite easy to send the data to all sockets related to a specific user. So what I can do now is unicast (array with one user id), multicast (array with more than one user id) and broadcast (empty array).
It's working well for my use case.