Authentication in node js REST API using JWT & Passport js

In current article I will discuss node js REST API basic authntication / authorization. Some good npm modules helps us to do it.

jsonwebtoken: Node js wrapper to handle json web tokens 
passport : standard module for authentication / authorization 
passport-jwt for JWT based passport based authorization
passport-local for Password based passport based authentication

So first install it in your express based App

$ npm install jsonwebtoken passport passport-jwt passport-local

Also install bcrypt for password hashing / comparing.

First we should have our model ready ( I am using mongoose base models ). let called models/user.js

var mongoose = require('mongoose');
const bcrypt = require('bcrypt');
var UserSchema = new mongoose.Schema({
  firstName: { type: String },
  lastName: { type: String },
  email: { type: String },
  password: { type: String }
});
UserSchema.pre('save', function (next) {
  var user = this;
  bcrypt.hash(user.password, 10, function (err, hashedPassword){
    if (err) {
      return next(err);
    }
    user.password = hashedPassword;
    next();
 });
});

UserSchema.methods.comparePassword = function (password, callback) {
  bcrypt.compare(password, this.password, function (err, isMatch) {
    if (err) {
      return callback(err);
    }
    callback(null, isMatch);
 });
};

mongoose.model('User', UserSchema);
module.exports = mongoose.model('User');

We have a pre save hook in user model, So when saving password into mongo db, Password bcrypt hashed automatically. Also their are comparePassword method, which we will use for password comparing during authentication.

Now we create a library file for handling passport js specific handling. Let us create services/passport.js

const passport = require('passport');
const localStrategy = require('passport-local').Strategy;
const userModel = require('../models/user');

passport.use('local', new localStrategy({
  usernameField: 'email',
  passwordField: 'password'
}, function(email, password, callback) {
  return userModel.findOne({email: email}).then(user => {
    if(!user) {
      return callback(true, false, {msg: 'Incorrect email'})
    }

    return user.comparePassword(password, function (err, isMatch) {
      if (isMatch && !err) {
        return callback(false, user, {msg: 'Success'})
      } else {
        return callback(true, false, {msg: 'Authentication failed. Wrong password.'});
      }
    });
  });
}));

In above file we defined two passport strategy, One local strategy for password authentication other jwt strategy for token authorization. In first function we first check email & then check password using model’s comparePasword call.

Let see how our express controller handle authentication request. lets called controllers/auth.js

var jwt = require('jsonwebtoken');
var passport = require('passport');
require('../services/passport');
var User = require('../models/user');

router.post('/signIn', function (req, res, next) {
  passport.authenticate('local', {session: false}, (err, user, info) => {
    if (err && !user) {
      res.status(401).send({success: false, msg: 'Login Failed'}); 
    } else {
      req.login(user, {session: false}, function(err) {
        if(err) {
          res.status(401).send({success: false, msg: err}); 
        } else {
          var token = jwt.sign({ id: user.id, email: user.email, fullName: user.firstName + ' ' + user.lastName }, config.jwt.secret, {
            expiresIn: 86400 // expires in 24 hours
          });
          return res.json({success: true, token: token}); 
        }
      });
    }
  })(req,res,next);
});

Here we are using passport authentication with local strategy to login user. After successfully login, Code generate a JWT Token as our api session token using sign method and send along with response.

Now next part is authorization. Add following code into passport.js for authorization method.

const jwt = require('jsonwebtoken');
const passportJWT = require('passport-jwt');
const extractJwt = passportJWT.ExtractJwt;
const jwtStrategy = require('passport-jwt').Strategy;
const jwtSecret = 'SOMETOKEN';

passport.use('jwt', new jwtStrategy({
 jwtFromRequest: extractJwt.fromAuthHeaderAsBearerToken(),
 secretOrKey: jwtSecret
}, function(jwtPayload, callback) {
 return userModel.findById(jwtPayload.id)
 .then(user => {
   return callback(null, user)
 })
 .catch(err => {
   return callback(err);
 })
}));

 

This function uses JWT’s fromAuthHeaderAsBearerToken to get token from authorization header from rest request.
Request header must be like that for proper authorization

authorization: Bearer TOKEN

It will be used as middleware in our app.js. So let change our controller invoke  from

app.use('/users', require('./controllers/UserController'));

to

const passport = require('passport');
require('./services/passport');
app.use('/users', passport.authenticate('jwt', {session: false}), require('./controllers/UserController'));

This middleware invoke our jwtStrategy and verify token from http authentcation header. So all requests who do not have header / wrong format of header / wrong data in token will be discarded as unauthenticated requests.

Mongoose ORM basic usage

Mongoose is an ODM ( Object Document mapper ) for node js & mongo db. It map mongo collections to node js models to provide object oriented way of db operations. It can be installed by using npm in current project directory.

$ npm install mongoose --save

Connection can be made in out app/controller files by

var mongoose = require('mongoose');
 mongoose.connect('mongodb://HOSTNAME/DBNAME');

Now we need to add model file for our collections.

We will create following models ->

models/test.js

Let see test model code

const mongoose = require('mongoose')
let Schema = mongoose.Schema;

const TestSchema = new Schema(
{
 string_col: {type: String, default: ''},
 number_col: {type: Number, default: 0},
 bool_col: {type: Boolean, default: null},
 mixed_col: {type: Schema.Types.Mixed, default: ''},
 oid_col: {type: Schema.Types.ObjectId, default: null},
 array_col: [{type: String, default: ''}],
 date_col: { type: Date, default: Date.now() }
})

const testModel = mongoose.model('test', TestSchema);
module.exports = testModel;

Here we describe a schema for mongo collection. Key column _id is automatically added and need not to be described. Fields are described by their types. Following types are supported.

a) String
String type defined for string type fields.

b) Number
Number type defined for number (both) type fields.

c) Bool
Bool type can have boolean values true & false.

d) mixed
Mixed type can have any type. It is mainly used when data is uncertain.

e) ObjectId
Mongo key object id field. Mainly used for foreign key implementation from same / other collection key.

f) Date
Date type handle ISO Date formatted data like ISODate(“2018-03-19T12:51:01.585Z”)

g) Array
This type used to insert array type of data. We can define other type as valid values in array like
[{type: String}]
[{type: Number}]
[{type: Mixed}]
[{type: Date}]

We can create multi level array by using following syntax

[[{type: String}]]

If we do not define type and used syntax like [] or [[]]. Field can have any type of data as values of array.

We can used mongoose save API call to save data into collection.

var testModel = require('./models/test');
var testObj = new testModel();
testObj.string_col = 'Kuldeep';
testObj.number_col = 1234;
testObj.mixed_col = {'age': 45};
testObj.array_col = ['John', 'Doe'];
testObj.bool_col = true;
testObj.oid_col = mongoose.Types.ObjectId("5906d4017362098740b23eeb");
testObj.date_col = new Date();
testObj.save((err, res) => {
if (err) return console.error(err);
   console.log('SAVED;')
   mongoose.disconnect(); 
});

This call create new Object for test model with required properties and save call insert data to collection.

If we need to update data we can use either findOneAndUpdate or Save call

USING findOneAndUpdate

testModel.findOneAndUpdate(
  {_id: mongoose.Types.ObjectId("5aafb50bd1443f49a873784c")}, {
  { $set: {
    string_col: "New Str"
  }
 }, {
   upsert: false
 }, (err, res) => {
    if (err) return console.error(err);
    console.log('SAVED;')
    mongoose.disconnect();
}
);

First argument is condition on which single record is chosen to save.
Second argument have fields to update.
Third argument is optional. If set upsert true, It insert the data as new record if no matching record with condition was found.

findOne + Save Method

testModel.findOne({_id: mongoose.Types.ObjectId("5aafb50bd1443f49a873784c")}, (err, testObj) => {
  if (err) return console.error(err);
  testObj.string_col = 'NEW DATA';
  testObj.save((err, res) => {
  if (err) return console.error(err);
     console.log('SAVED;')
     mongoose.disconnect();
  });
});

This method is similar to insert method as we used same save call to update data, Except we do not used new Model, but get object from DB by using findOne call.

Node JS Promises, Practical uses

In last article I discussed the node js callback & native (es6) promises.

Their are more cases where promises come handy.

Let a case where we need to get a user information for given id, but information is in different sql tables/mongo collections.

// As promise passed resolved object into chain, So we need to use that one

getUserInfo(userId) {
  // Return Promised Info
  return userModel.findOne({_id: userId}).exec();
}
getUserAddressInfo(preResults) {
  // Return Promised having info merged with preResults 
  return userAddrModel.findOne({_id: preResults._id}).exec().then((results) => {
   return [preResults, results]
  });
}
getUserExperienceInfo(preResults) {
  // Return Promised having info merged with preResults 
  return userExpModel.findOne({_id: preResults._id}).exec().then((results) => {
   return [preResults, results]
 });
}

getUserEducationInfo(preResults) {
 // Return Promised having info merged with preResults 
 return userEduModel.findOne({_id: preResults._id}).exec().then((results) => {
   return [preResults, results]
 });
}

getUserInfo(userId)
 .then(getUserAddressInfo)
 .then(getUserExperienceInfo)
 .then(getUserEducationInfo)
 .then((results) => {
    console.log(results)
 }).catch(err => {
    console.error('Getting error')
 });

Chain approach in last article is not very effective as that will block the node js event model which reduce performance.

We need parallel execution of functions but still need their output at final place. We need to change our code some thing like that

getUserInfo(userId) {
  return userModel.findOne({_id: userId}).exec();
}
getUserAddressInfo(userId) {
  return userAddrModel.findOne({_id: userId}).exec();
}
getUserExperienceInfo(userId) {
  return userExpModel.findOne({_id: userId}).exec();
}

getUserEducationInfo(userId) {
  return userEduModel.findOne({_id: userId}).exec();
}

NOTE : exec is Mongoose API function for return promised data.

Promise.all([
  getUserInfo(userId), 
  getUserAddressInfo(userId), 
  getUserExperienceInfo(userId),
  getUserEducationInfo(userId)
]).then((results) => {
  console.log(results)
}).catch(err => {
  console.error('Getting error')
});

Here all functions called parallel by Promise.all api method in iterator (array) way. Result would be an array where first element return the result of getUserInfo, second on getUserAddressInfo and so on…

If we need to apply function to an array’s all values, We can still use promises using map (or other you prefer). Let we need some user profile content from public url for group of users in array.

const promises = userIds.map((userId) => {
 return new Promise((resolve, reject) => {
   http.get('http://www.example.com/user' + userId, function(response) {
     resolve(response);
   }); 
 }); 
});

Promise.all(promises).then((results) => {
  console.log(results);
}).catch(err => {
  console.error(err);
});

Other promise api function than Promise.all is Promise.race which return the first function who completed its execution.

Promise.all([
  getSite1Feeds(), 
  getSite2Feeds(), 
  getSite3Feeds(), 
  getSite4Feeds(), 
]).then((results) => {
  console.log(results); // Whoever comes first
}).catch(err => {
  console.error('Getting error')
});

Mozilla Promises API Documentation

 

 

Callbacks & Promise in node js

Today i will share the Node JS callback and promise system. Node JS asynchronous event based language which mean system not wait for task for completion and forward to next instruction. So as an example if we try to read a file from file system and then print read contents that may not work.

const fs = require ("fs");
let content = fs.reafile ("file.txt"):
console.log (content):

In above code, interpretor jump to console log even before read file call completion due to asynchronous behavior.
This issue is resolved by Node JS by making all its core APIs async friendly through callbacks. Above example actually work like below.

const fs = require ("fs");
let content = fs.reafile ("file.txt", function (err, data){
     if (err ){
          console.log (err);
     }
     console.log (data):
}):

Here readfile api provide a callback function which executed after file read operation. It have two arguments err and data. Data have the success response, in case of readfile api, file contents.
Node JS core APIs work like that. Provide a callback function with error and response as parameters. We can implement our callbacks similar to this for asynchronous tasks.

funtion asyncfunction (inp, callback ){
     // do some asyns task
     const http = require ("http");
     http.get (url, function (err, response){
         callback (response);
     });
}

Above function can be used like

asyncfunction( data, function (httpResponse) {
      console.log( httpResponse);
});

In this case we can get interesting pattern like below.

 func1(data, function() {
    data.mod1 = 1;
    func2(data, function() {
        data.mod2 = 2;
        func3(data, function() {
            data.mod3 = 3;
            func4(data, function() {
                 data.mod4 = 4;
                 console.log( data);
            })
        });
    });
});

Above code show hierarchy of callbacks, Also famously called Callback Hell. This is problematic due to Poor readability & Poor manageability. Their are multiple solutions for ignore callback hells.
First to use modular code like I previously done in asyncfunction.

Second approach is to use Promises. Promise give us better control of asynchronous code.
Promise have two results states. Resolve & Reject. In Core Node JS we can use promise like that

function MyPromisedFunc() {
    return new Promise( function(resolve, reject) {
         http.get(url, function(err, response) {
             if( err ) {
                  reject(‘Error in http response’)
             }
             resolve(response);
        });
   });
 }

return MyPromisedFunc() .then(function(response) {
     console.log(response);
 }).catch(function(err) {
     console.log(err);
 });

Above code is more readable and manageable then callback code. Any time of our async code when we get actual response, we only need to resolve promise, If we get error the we reject the promise.
This further handle by then & catch blocks.
This is more look clear in multiple async function which need to run synchronously.

return MyPromisedFunc1()
     .then(MyPromisedFunc2)
     .then(MyPromisedFunc3)
     .then(MyPromisedFunc4)
     .then(function(response) {
          console.log(response);
     }).catch(function(err) {
          console.log(err);
     });

Now we executed four asynchronous functionalities in sync manner with better manageability.
Promise core api functionality is still very much limited in terms of functionality compare to third party Promise libraries like bluebird, Q Promise and more.

Example in bluebird support execution in parallel with better control.

const bluebirdPromise = require(‘bluebird’);
return bluebirdPromise.all([
     MyPromisedFunc1(),
     MyPromisedFunc2(),
     MyPromisedFunc3(),
     MyPromisedFunc4() ],
     .then(function(responseArray) {
           console.log(responseArray[0]);
           console.log(responseArray[1]);
           console.log(responseArray[2]);
           console.log(responseArray[3]);
     }).catch(function(err) {
          console.log(err);
     });

Node REST API Testing with Jasmine

In last article REST using Node/Express & Mongoose , I talk about setting REST APIs in NodeJS, Express & Mongoose. Today I will discuss unit testing for REST APIs using Jasmine.

Jasmine is behavioral testing framework for javascript applications. It provide easy to write & understand testing rules. While it can be downloaded from https://github.com/jasmine/jasmine. In node js we would use npm package named jasmine-node.

$ npm install --save jasmine-node

We also install request package for easy http calls.

$ npm install --save request

Now create a test folder let we name it spec. We can define our test cases in this folder. Let create a file names hello.spec.js and Put following code into it

describe("Hello World Suite", function() {
    it("check if text is 'Hello World'", function(done) {
        let text = "Hello World";
        expect(text).toBe("Hello World");
        done();
    });
   it("check if another text is 'Hello World'", function(done) {
        let text = "Not Hello World";
        expect(text).toBe("Hello World");
        done();
    });
});

Now we run command

$ node_modules/.bin/jasmine-node spec

This command generate assert failures for second test like below

Failures:

1) Hello World Suite check if another text is 'Hello World'
 Message:
 Expected 'Not Hello World' to be 'Hello World'.

2 tests, 2 assertions, 1 failure, 0 skipped

If we change text in second test from “Not Hello World” to “Hello World”  and re run command. We will get assert success like below .

2 tests, 2 assertions, 0 failures, 0 skipped

In above test “describe” work as Test Suite, So it can have many related tests, Let example for particular form field data validations, So all validation like not-empty, data type, duplicate can be part of single suite. describe have definition which show use of suite

individual tests inside described via “it”, which have suite description as first argument and callback as second.

We need to statement for compile a test. First a expect statement which is test assertion and other done function which notice the framework to completion of test. We can do many types of assertions

assert().toBe()
assert().toBeDefined() AND assert().toBeUndefined()
assert().toBeFalsy() AND assert().toBeTruthy()
assert().toBeNull()
assert().toEqual()
assert().toMatch()
assert().toThrow()

AND More....

Let we write test for previously REST APIs. Create a file called users.spec.js in spec folder.

var request = require("request");

var base_url = "http://localhost:3000/users"

let User = {
   _id:1,
   name:'',
   email:'',
   password:''
};

describe("Users List API Exists", function() {
     describe("GET /users", function() {
         it("returns status code 200", function(done) {
             request.get(base_url, function(error, response, body) {
             expect(response.statusCode).toBe(200);
             done();
         });
    });
 });

Above test check whether List API is available, It verify this by checking response status code is equal to 200 or not. Add another test to inner describe

it("API Response should be valid json", function(done) {
    request.get(base_url, function(error, response, body) {
        expect(() => {
            JSON.parse(body);
        }).not.toThrow();
        done();
    });
 });

Above test check if response is valid json or not. When invalid json as response return the native JSON parse function throw error which handled by expect ” not -> toThrow”. So assertion failed while in case of valid json assertion passed.

it("API Response should be valid array of user objects", function(done) {
     request.get(base_url, function(error, response, body) {
        let users = JSON.parse(body);
        const userRows = users.map((userRow) => {
            expect(JSON.stringify(Object.keys(User).sort()) ===     JSON.stringify(Object.keys(userRow).sort())).toBeTruthy();
        });
        done();
    });
 });

Above test goes further more and check return fields is valid. This test can be ignored in case of no restriction for fields returned in response.

I will discuss more on jasmine on API testing on further articles.

All code is hosted on Github

REST using Node/Express & Mongoose

NodeJS is fast growing language in today environment. For web their are several frameworks but Express web framework  is most used framework for web development in node js.

So how to make a sample Rest API in express / Node JS ?

First setup express using npm. So we use following commands to get npm init & require packages install.

# add package json
$ npm init

$ npm install --save express
$ npm install --save mongoose body-parser

Above commands install express framework in node modules. Then it install http request parser package plus ODM package for mongo db called mongoose. Now it is time to do some coding. First create main application file app.js & put following code.

var express = require('express');
var app = express();
var mongoose = require('mongoose');
mongoose.connect('mongodb://localhost/test');


// var UserController = require('./controllers/UserController');
// app.use('/users', UserController);

var port = process.env.PORT || 3000;
var server = app.listen(port, function() {
      console.log('Express server listening on port ' + port);
});

module.exports = app;

Update package.json to set tell about main script

"scripts": {
       "start": "node app.js"
 }

You can run this code by

$ npm start

Which start the server at 3000 port. You can check http://localhost:3000 which show “Cannot GET /” because we did not setup router. So we add a User Module. for this we need to add controller & model. So create two directories controllers & models for better code maintaining.  Add Following mongoose based code in models/User.js file.

var mongoose = require('mongoose');
var UserSchema = new mongoose.Schema({
    name: String,
    email: String,
    password: String
});
mongoose.model('User', UserSchema);
module.exports = mongoose.model('User');

Now Setup controller file controllers/UserController.js which also work as router for User APIs.

var express = require('express');
var router = express.Router();
var bodyParser = require('body-parser');
router.use(bodyParser.urlencoded({ extended: true }));

var User = require('../models/User');

router.post('/', function (req, res) {

    User.create({
        name : req.body.name,
        email : req.body.email,
        password : req.body.password
    },
    function (err, user) {
        if (err) return res.status(500).send("There was a problem adding the information to the database.");
        res.status(200).send(user);
    });

});

// RETURNS ALL THE USERS IN THE DATABASE
router.get('/', function (req, res) {

    User.find({}, function (err, users) {
        if (err) return res.status(500).send("There was a problem finding the users.");
        res.status(200).send(users);
     });

});

// RETURNS USER DETAILS IN THE DATABASE
router.get('/:id', function (req, res) {

    User.findOne({"_id":req.params.id}, function (err, users) {
        if (err) return res.status(500).send("There was a problem finding the users.");
        res.status(200).send(users);
    });

});

// UPDATE USER DETAILS IN THE DATABASE
router.put('/:id', function (req, res) {

    User.update({"_id":req.params.id}, {
        name : req.body.name,
        email : req.body.email,
        password : req.body.password
    }, function (err, users) {
        if (err) return res.status(500).send("There was a problem finding the users.");
        res.status(200).send(users);
    });

});

// DELETE USER FROM THE DATABASE
router.delete('/:id', function (req, res) {
    User.remove({"_id":req.params.id}, function (err, users) {
        if (err) return res.status(500).send("There was a problem finding the users.");
        res.status(200).send(users);
     });

});

module.exports = router;

Now we have out two user APIs ready for insert a single record ( POST ) & get all records ( GET ). But still we need to register router with app, So uncomment lines in app.js

var UserController = require('./controllers/UserController');
app.use('/users', UserController);

Now we have http://localhost:3000/users working as POST & GET method for our APIs. We can test it by invoking respective methods via CURL CLI.

$ curl http://localhost:3000/users
$ curl -X POST -d 'email=kuldeep@gmail.com&name=Kuldeep&password=kamboj' http://localhost:3000/users

$ curl http://localhost:3000/users/5a47d942cb4f00c95de8f511

$ curl -X PUT -d 'email=kuldeepk@gmail.com&name=KuldeepK&password=kamboj' http://localhost:3000/users/5a47d942cb4f00c95de8f511

$ curl -X DELETE  http://localhost:3000/users/5a47d942cb4f00c95de8f511

 

leaflet maps with websocket/mqtt integration

Leaflet (http://leafletjs.com) is a javascript library for maps. It have lot of options for maps. Let we setup & try this.

You can download and configure directly as given examples, I choose option with yarn.

$ mkdir client

$ cd client

$ yarn add leaflet

Above command create node_modules folder in current directory & download leaflet and add into this.

Then write code html/javascript map code for leaflet map

<link rel="stylesheet" href="node_modules/leaflet/dist/leaflet.css">
 < script src="node_modules/leaflet/dist/leaflet.js" type="text/javascript"></ script>

< script type="text/javascript">

var map = L.map('map').setView([28.7041, 77.1025 ], 12);

L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y} png', {
 attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors'
 }).addTo(map);

L.marker([ 28.7041, 77.1025 ]).addTo(map)
 .bindPopup('Marker Here')
 .openPopup();

</ script>

Above code successfully show the map centered at new delhi, india. It also have a marker set with info popup.

This is simple case of map with marker and info popup, But some time we need to show some real time data. Like add marker on fly when new location updated. We may have lot of solutions for achieving this. One good solution to use Web Socket Protocol get/update data.

Let first set our Data Sender. I use MQTT Mosquitto broker & php script for achieving this. MQTT protocol work on pubsub model. So our server php script work as publisher while js application would work as subscriber.

But first we would need MQTT Broker. Mosquitto is recommended broker. on ubuntu ( or other deb based systems ) we need following commands to install mosquitto.

$ sudo apt-get install mosquitto mosquitto-clients

Also create a custom mosquitto config file at /etc/mosquitto/conf.d/mosquitto.conf & put following setting into it.

port 1883
protocol mqtt

listener 9001
protocol websockets

autosave_interval 10
autosave_on_changes false

Above setting setup the mqtt ( for publish ) port to 1883, & 9001 port for subscribe. Restart mosuitto service.

$ service mosquitto restart

We may test subscribe/publish with inbuilt commands

# Test Subscriber
$ mosquitto_sub -h localhost -t test

Here we are subscribing on localhost on topic “test”

# Test Publisher
$ mosquitto_pub -h localhost -t test -m "hello world"

Here we are publishing message “hello world” on broker on localhost on topic “test”

Now we setup php script. We use dependancy solver composer for install script dependencies. You can download composer.phar from https://getcomposer.org.

$ mkdir server

$ cd server

$ php composer.phar init

$ php composer.phar require bluerhinos/phpmqtt dev-master

Above command create vendor folder and install package phpmqtt into it. This is php package for mqtt.

Then create php script for publish mqtt data let called it publish.php & put following code into it.

require("vendor/autoload.php");
 use Bluerhinos\phpMQTT;
 $server = "localhost"; // change if necessary
 $port = 1883; // change if necessary
 $username = ""; // set your username
 $password = ""; // set your password
 $client_id = "mqtt-publisher"; // make sure this is unique for connecting to sever - you could use uniqid()

$coords = [
 [ 28.6442033, 77.1118256, 'Location 1' ],
 [ 28.5501396, 77.1882317, 'Location 2' ],
 [ 28.6359866, 77.2608032, 'Location 3' ],
 [ 28.6805603, 77.1991786, 'Location 4' ]
 ];
 $mqtt = new phpMQTT($server, $port, $client_id);
 if ($mqtt->connect(true, NULL, $username, $password)) {
    for($i=0;$i<count($coords);$i++) {
       $mqtt->publish("map/coordinates", json_encode($coords[$i]), 0);
       sleep(5);
    }
    $mqtt->close();
 } else {
    echo "Time out!\n";
 }

Above script connect to mqtt broker and then some latitude/longtitude after every 5 seconds. Real applications can build their logic for publish data on actual moment.

Now we make changes in our client script to subscribe mqtt, but first install mqtt packge for javascript.

$ cd client

$ yarn add paho-mqtt

Now time to change client script.

<link rel="stylesheet" href="node_modules/leaflet/dist/leaflet.css">http://node_modules/leaflet/dist/leaflet.js
< script src="node_modules/paho-mqtt/paho-mqtt-min.js" type="text/javascript">< /script>

< script type="text/javascript">

var map = L.map('map').setView([28.7041, 77.1025 ], 12);

L.tileLayer('http://{s}.tile.osm.org/{z}/{x}/{y}.png', {
 attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors'
 }).addTo(map);

document.addEventListener('DOMContentLoaded', function() {
   mqtt_init();
}, false);

function mqtt_init() {
    // Create a client instance
    client = new Paho.MQTT.Client('localhost', 9001, "mqtt-spublisher");

   // set callback handlers
   client.onConnectionLost = onConnectionLost;
   client.onMessageArrived = onMessageArrived;

   // connect the client
    client.connect({onSuccess:onConnect});

   // called when the client connects
   function onConnect() {
    // Once a connection has been made, make a subscription and send a message.
      console.log("onConnect");
      client.subscribe("map/coordinates");
   }

   // called when the client loses its connection
   function onConnectionLost(responseObject) {
       if (responseObject.errorCode !== 0) {
          console.log("onConnectionLost:"+responseObject.errorMessage);
       }
    }

    // called when a message arrives
    function onMessageArrived(message) {
       console.log("onMessageArrived:" + message.payloadString);
       msg = JSON.parse(message.payloadString);
       L.marker([msg[0], msg[1] ]).addTo(map)
          .bindPopup(msg[2])
          .openPopup();

    }
 }

< /script>

 

When we publish our serve script, We see marker coming after duration (5 seconds ), which can become effective real time solution for maps.

Full code can be found at https://github.com/kuldeep-k/leaflet-mqtt

 

color Shades jQuery Plugin

colorShades is a jQuery plugin developed by me. It generate a html/css component for showing color shades for selected color for html input container.

Following code is needed to show widget.

$(field).colorShades({
   l_items: LIGHTER_SHADES_COUNT,
   r_items: DARKER_SHADES_COUNT,
   step: STEPS,
   base_color: BASE_COLOR,
   onSelect : CALLBACK
});

Exmaple :

$('#fieldId').colorShades({
   l_items: 10,
   r_items: 10,
   step: 3,
   base_color: '#FF0000',
   onSelect : function(color) {
       console.log('Selected Color '  + color);
   }
});

BASE_COLOR is hex color code for color which shades are needed
LIGHTER_SHADES_COUNT is color shades lighter than base color.
DARKER_SHADES_COUNT is color shades darker than base color
STEPS define the how much color change during next palette color. More steps means bigger change.
CALLBACK set the custom functionality on selection of color.

Screen Shot

color-shades-demo
Project Home Page : https://github.com/kuldeep-k/colorShades

Note : Plugin requires following 3rd party libraries.

jQuery
TinyColor

Composite key type duplicate key check with ZF2 Doctrine

Their are instances when we need to add some composite keys in our DBs. Then we also need a check on application side too. This is also same as duplicate check on multiple fields.

In ZF2, we know about Zend\Validator\Db\RecordExists & Zend\Validator\Db\NoRecordExists combo for duplicate check on database side (Zend DB). Their are also respective relative in doctrine called DoctrineModule\Validator\ObjectExists & DoctrineModule\Validator\NoObjectExists. They do the same job as Zend DB counter part with one caveat.

They do not support multiple fields when work with InputFilters. You can pass array but that gave error "zf2 Provided values count is 1, while expected number of fields to be matched is 2". The solution are using your custom validator.

I tried here to write a Validator Plugin which extends DoctrineModule\Validator\NoObjectExists and have support for multiple fields. See implementation code .

$inputFilter->add($factory->createInput(array(
   'name' => 'studentId',
   'required' => true,
   'filters' => array(
   array('name' => 'StripTags'),
   array('name' => 'StringTrim'),
 ),
 'validators' => array(
    array(
     'name' => 'Student\Validate\NoObjectExists',
     'options' => array(
     'object_repository' => $this->getObjectManager()->getRepository('Student\Entity\Student'),
     'fields' => array('studentId', 'class'),
    )
  )
 ),
 )));

Validator Plugin Code are as :

namespace Student\Validate;

use DoctrineModule\Validator\NoObjectExists as DoctrineModuleNoObjectExists;

class NoObjectExists extends DoctrineModuleNoObjectExists
{
  protected $messageTemplates = array(
   self::ERROR_OBJECT_FOUND => "An object matching combination of fields was found",
  );

  public function isValid($value, $context = null)
  {
    $valueArray = array();
    foreach($this->fields as $name => $val)
    {
       $valueArray[] = $context[$val];
    }
    $value = $this->cleanSearchValue($valueArray);

    $match = $this->objectRepository->findOneBy($value);

    if (is_object($match)) {
       $this->error(self::ERROR_OBJECT_FOUND, $value);
       return false;
    }

    return true;
  }
}

Further link can be found at : Github link

PHP 5.4 new features

PHP have introduced lot of interesting features in last few major versions. Some are obviously inspired from latest programming trends & from other languages. But these features make php more elegant and promising to code. Now I will discuss some of features in php version 5.4 I liked in following sections.

  • Short Array Syntax
  • Binary Numbers
  • Class::{expr}() and (new foo)->bar()) syntax.
  • Traits
  • Array Dereferencing

Short Array Syntax

This is my personal favorite feature. PHP have long history for define array through php function like this

[code lang=php]
$single_level = array(1, 2, "a" => "bcd");
$multi_level = array('first' => array(1, 2), 'second' => array(3,4), 'third' => array('3f' => array(5,6), '3s' => array(7,8)));
[/code]

Not we have short syntax which eliminate the using array keyword using more mainstream operator for this.

[code lang=php]
$single_level = [1, 2, "a" => "bcd"];
$multi_level = ['first' => [1, 2], 'second' => [3,4], 'third' => ['3f' => [5,6], '3s' => [7,8]]];
[/code]

Binary Numbers

PHP have support for octal and hexdecimal numbers for long time. But same privilege is not available for binary numbers. functions like decbin & bindec is only deal with string representation for binary numbers. Now we have binary number support they can be defined like.

[code lang=php]
$bin = 0b11011;
[/code]

See 0b prefix which define for binary number. We already have prefix 0 and 0x for octal and hexadecimal repectively.

Class::{expr}()

If we need to call dynamic method for a class in earlier versions we something similar code.

[code lang=php]
class A
{
public static function functA()
{
echo "call A".PHP_EOL;
}
public static function functB()
{
echo "call B".PHP_EOL;
}
}

$vars = array('A, 'B');
foreach($vars as $var)
{
$c = 'funct'.$var;
A::$c();
}
[/code]

Now in 5.4 version instead of variable for method name we can code like

(new foo)->bar())

If we need to call a method for a class in earlier versions we something similar code.

[code lang=php]
class A
{
public $var = 10;
public function methd()
{
echo $this->var.PHP_EOL;
}
}

$a = new A();
$a->methd();
[/code]

Now in 5.4 version instead of variable for method name we can code like this

[code lang=php]
(new A)->methd();
[/code]

Like previous example this one too eliminate the use of variable for just one method calling.

Traits

I write a separate blog for introduce trait at https://kuldeep15.wordpress.com/2015/03/17/php-traits/

Array De-referencing

PHP provide a way to avoid creating variable when we need to access a particular member of array, specially when we accessing data dirtecly returned from function.

In previous versions :

[code lang=php]
function foo
{
return array("a" => "First", "b" => "Second");
}

$a = foo();
echo $a[1];
[/code]

Using array de-referencing

[code lang=php]
function foo
{
return array("a" => "First", "b" => "Second");
}

echo foo()[1];
[/code]

You can also use it for some special cases like below.

[code lang=php]
$str = "A,B,C,D,E";
echo explode(',',$str)[2];
[/code]

In further articles I will explore interesting features from 5.5 & 5.6