NodeJS / Express on Azure Web Sites with a Azure Blob Session Store

Getting NodeJS and Express running on Azure Web Sites is really easy.  The Node/Express/Azure community is fantastic and provides wonderful guidance.  However, I ran into a few challenges and I wanted to document them here just in case it helps anyone else.

Express Sessions

Out of the box Express gives you in memory sessions.  When you scale your sites in Azure (love that feature) in memory sessions don’t work so great anymore.  I needed a central place to store the sessions.  Azure Cache or Redis makes a fast and expensive session provider.  I really wanted something that would be very cheap.  So I choose Azure Blob Storage and needed to write a customer provider.

The connect session documentation was a huge help and I used TJ Holowaychuk’s implementation of a Redis session store as a guide.  To get started I created an object with three functions get, set, and destroy.  Each function implements a call to Azure Blob Storage for basic CRUD operations for the session object.


//Implemented from https://github.com/expressjs/session
//                 https://github.com/tj/connect-redis

var azure = require("azure-storage");

function azureSessionStoreFactory(session) {
    var Store = session.Store;
    function AzureSessionStore(options) {
        this.config = options;        
        this.blobService = azure.createBlobService(options.storageAccountKey);
        options = options || {};
        Store.call(this, options);
    }
    AzureSessionStore.prototype.__proto__ = Store.prototype
    
    var azureSessionPrototype = AzureSessionStore.prototype;
    azureSessionPrototype.get = function (sid, callback) {
        var self = this;
        self.blobService.getBlobToText('websessions', sid, function (err, result) {
            if (err) {
                callback(err, null);
            } 
            else {
                callback(null, JSON.parse(result));
            }
        });
    }
    
    azureSessionPrototype.set = function (sid, session, callback) {
        var self = this;
        self.blobService.createBlockBlobFromText('websessions', sid, JSON.stringify(session), function (err, results) {
            if (err) {
                callback(err, null);
            } else {
                callback(null, session);
            }
        });
    }
    
    azureSessionPrototype.destroy = function (sid, callback) {
        var self = this;
        self.blobService.deleteBlobIfExists('websessions', sid , function (err) {
            if (err) {
                callbak(err, null);
            }
            else {
                callbak();
            }
        });
    }

    return AzureSessionStore;
}

module.exports = azureSessionStoreFactory;

To use our new Azure Blob session store we can plug it into Express with the following code.

</pre>
var app = express();
var sessionStore = new sessionStorage({ storageAccountKey: cloudStorage.primaryStorageAccount });

app.use(session({
store: sessionStore,
secret: '5up3r s3cret p@ssw0rd',
resave : true,
saveUninitialized : true
}));

While this is not the fastest session store.  It is a whole lot cheaper than Azure Cache or hosting Redis on a Virtual Machine.

Check out the code on GitHub https://github.com/rocketcoder/connect-session-azure-blob

Getting NodeJS/Express on Azure Web Sites to only serve https

Azure has done a fantastic job of making it easy to deploy a NodeJS application to Azure Web Sites.  However, one thing that took a bit of searching to find is how to force Azure Web Sites to only serve https requests.  Fortunately this is really easy.  I found the answer on Scott Hanselman’s blog but I am also posting it here.

Add the following to you Web.config.  (In Azure Web Sites nodejs is hosted behind IIS and the web.config controls how IIS behaves.)

    <rewrite>
      <rules>
        <clear />
        <rule name="Redirect to https">
          <match url="(.*)"/>
          <conditions>
            <add input="{HTTPS}" pattern="Off"/>
            <add input="{REQUEST_METHOD}" pattern="^get$|^head$|^post$" />
          </conditions>
          <action type="Redirect" url="https://{HTTP_HOST}/{R:1}"/>
        </rule>
        <rule name="app" enabled="true" patternSyntax="ECMAScript" stopProcessing="true">
          <match url="iisnode.+" negate="true" />
          <conditions logicalGrouping="MatchAll" trackAllCaptures="false" />
          <action type="Rewrite" url="bin\www" />
        </rule>
      </rules>
    </rewrite>

NodeJS / Express / PassportJS building a local security strategy

Getting security setup in Express with PassportJS had a slight learning curve to it for me.  PassportJS has a variety of strategies that they use to implement security.  I needed a custom security provider based on Azure Table Storage.  Azure Table Storage is cheap and lightweight, so it was a great fit for a developer on a budget.  With Passport the key object to implement is the User object.  User implements a few key functions findOne, save, and isValidPassword.  I also used the bcrypt package to encrypt the passwords.  If you are looking for a more indepth tutorial on how to use PassportJS check out http://scotch.io/tutorials/javascript/easy-node-authentication-setup-and-local

var azure = require("azure-storage");
var q = require('q');
var config = require('../config.js');
var bCrypt = require('bcrypt-nodejs');

function User() { }
   
User.UserFactory = function() {
    var user = {
        userName : "",
        password : "",
        enabled : true,
        email : "",
        validated : false
    };
    user.isValidPassword = User.isValidPassword;
    return user;
}
   

User.findOne = function (userName) {
    var tableService = azure.createTableService(config.storageAccountKey);
    var deferred = q.defer();   
    tableService.retrieveEntity('CloudUser', userName, 'true', function (err, result) {
        if (err) {
            if (err.statusCode === 404)
                deferred.resolve(undefined);
            else
                deferred.reject(err);
        }
        else {
            deferred.resolve(User.toUser(result));               
        }
    });
    return deferred.promise;
}
 
User.toUser = function (userEntity) {
    var user = User.UserFactory();
    user.userName = userEntity.PartitionKey._;
    user.enabled = userEntity.RowKey._;
    user.password = userEntity.password._;
    user.email = userEntity.email._;
    user.validated = userEntity.validated._;
    return user;
};
   
User.toUserEntity = function (user) {
    return {
        PartitionKey: { '_': user.userName },
        RowKey: { '_': 'true' },
        password: { '_': user.password },
        email: { '_': user.email },
        validated: { '_': user.validated }
    };
};
   
User.createUser = function (user) {
    var deferred = q.defer();
    var tableService = azure.createTableService(config.storageAccountKey);
    function create(user) {                       
        tableService.insertEntity('CloudUser', User.toUserEntity(user), function (err, result) {
            if (err) {
                deferred.reject({ status: false, reason: &quot;error creating user&quot;, err: err });
            }
            else {
                deferred.resolve({ status: true, reason: &quot;&quot; });
            }
        });
    }

    User.findOne(user.userName).then(function (result) {
        if (result && result.length > 1) {
            deferred.resolve({ status: false, reason: &quot;username exists&quot; });
        }
        else {
            create(user);
        }
    }).
    fail(function (err) {
            deferred.reject({ status: false, reason: "error creating user";, err: err});
    });

    return deferred.promise;
}
   
User.save = function (user) {
    var tableService = azure.createTableService(config.storageAccountKey);
    tableService.updateEntity('CloudUser', user, function (err, result) {
        if (err) {
            deferred.reject({ status: false, reason: "error creating user" });
        }
        else {
            deferred.resolve({ status: true, reason: "" });
        }
    });
}
   
User.isValidPassword = function (user, password) {
    return bCrypt.compareSync(password, user.password);
}
   
// Generates hash using bCrypt
User.createHash = function (password) {
    return bCrypt.hashSync(password);
}

module.exports = User;

Now that we a User object that can find, save and validate passwords – we need plug it into a local strategy.

module.exports = function (passport) {

passport.use('login', new LocalStrategy({
passReqToCallback : true
},
function (req, username, password, done) {
// check if a user with username exists or not
User.findOne(username).then(function(user){
if (!user) {
console.log('User Not Found with username ' + username);
return done(null, false, req.flash('message', 'User Not found.'));
}
// User exists but wrong password, log the error
if (!User.isValidPassword(user, password)) {
console.log('Invalid Password');
return done(null, false, req.flash('message', 'Invalid Password')); // redirect back to login page
}
// User and password both match, return user from done method
// which will be treated like success
return done(null, user);
}).fail(function (err) {
// In case of any error, return using the done method
return done(err);
});

})
);
}

Of course we will also need a strategy to sign up new users.

&lt;/pre&gt;
module.exports = function (passport) {
passport.use('signup', new LocalStrategy({
passReqToCallback : true // allows us to pass back the entire request to the callback
},
function (req, userName, password, done) {

findOrCreateUser = function () {
// find a user in Mongo with provided username
User.findOne(userName).then(function (user) {
// already exists
if (user) {
console.log('User already exists with username: ' + username);
return done(null, false, req.flash('message', 'User Already Exists'));
} else {
// if there is no user with that email
// create the user
var newUser = User.UserFactory();

// set the user's local credentials
newUser.userName = userName;
newUser.password = User.createHash(password);
newUser.email = req.param('email');
newUser.firstName = req.param('firstName');
newUser.lastName = req.param('lastName');
// save the user
User.createUser(newUser).then(function (err) {
console.log('User Registration succesful');
return done(null, newUser);
}).fail(function (err) {
if (err) {
console.log('Error in Saving user: ' + err);
throw err;
}
});
}
}).fail(function (err) {
// In case of any error, return using the done method
if (err) {
console.log('Error in SignUp: ' + err);
return done(err);
}
});
};
// Delay the execution of findOrCreateUser and execute the method
// in the next tick of the event loop
process.nextTick(findOrCreateUser);
})
);
}

And finally plug the strategies into Passport with the following code.


// Initialize Passport
app.use(passport.initialize());
app.use(passport.session());

// Passport needs to be able to serialize and deserialize users to support persistent login sessions
passport.serializeUser(function (user, done) {
done(null, user.userName);
});

passport.deserializeUser(function (id, done) {
User.findOne(id).then(function (user) {
done(null, user);
}).fail(function (err) {
done(err);
});
});

// Setting up Passport Strategies for Login and SignUp/Registration
login(passport);
signup(passport);

You can checkout the code at https://github.com/rocketcoder/passportjs-AzureTableStorge

Sharepoint 2013 App with an External List sourced from WebAPI and OData

Sharepoint 2013 Apps

Sharepoint has gotten a bad repution over the years. And it is well deserved. I have always hated working with it. But Sharepoint 2013 is differnt. The new App model is developer friendly and is easy to pick up. To completely over simplify a Sharepoint 2013 App – it allows you to write a modern web app using jquery, or knockout, or angular or any other client side javascript framework and plug it into Sharepoint 2013 or Office 2013. So all the goodness of Sharepoint 2013 + all the goodness of the modern web.

Sharepoint 2013 also offers External Lists. An External List is a Sharepoint 2013 abstraction over an external data source. In this case that external data source is a OData service that will be surfacing data in our SQL Azure database.

These two Sharepoint 2013 features provide an excellent opertunity to integrate your web application’s data into your Sharepoint 2013 / Office 365 sites.  Sharepoint 2013 becomes a hub for your back office to integrate data from multiple cloud platforms into a single portal. And OData is the pipe used to connect the data sinks.

Creating an OData service in 5 easy steps

  1. In Visual Studion 2012 create a new ASP.NET MVC 4 project. Update all of the NuGet packages and add the Microsoft.AspNet.WebApi.OData package.
  2. Add a new Empty WebApi Controller.
  3. Open the controller and change the class to inherit from an EntitySetController instead of an ApiController. The EntitySetController takes two arguments the type of the Entity being served and the type of the primary key.
  4. Add methods for retrieving a list of items.

(See http://msdn.microsoft.com/en-us/magazine/dn201742.aspx by Julie Lerman for more information. The following code on setting up an Odata service on WebApi was influenced by her article. )

public class AirlinesController : EntitySetController<Airline, int>
{
 private AirlineContext db = new AirlineContext();
 List<Airline> airlines = new List<Airline>();
 public AirlinesController() : base()
 {
     airlines.Add(new Airline() { ID = 1, Description = "southwest flight 1", Name = "flight 1" });
     airlines.Add(new Airline() { ID = 2, Description = "southwest flight 2", Name = "flight 2" });
 }
 public override IQueryable<Airline> Get()
 {
     return airlines.AsQueryable();
 }
}
  1. Add routes to the WebApiConfig.cs
public static void Register(HttpConfiguration config)
{
  ODataModelBuilder modelBuilder = new ODataConventionModelBuilder();
  modelBuilder.EntitySet<Airline>("Airlines");
  Microsoft.Data.Edm.IEdmModel model = modelBuilder.GetEdmModel();
  config.Routes.MapODataRoute("ODataRoute", "odata", model);
  config.Routes.MapHttpRoute( name: "DefaultApi", routeTemplate: "api/{controller}/{id}", defaults: new { id = RouteParameter.Optional } );
}

You will need to deploy this to someplace that Sharepoint server will be able to access. Azure is a great cloud platform and works well. Another option is to setup a service bus relay to your local machine. Either way Sharepoint needs to be able to hit your newly created OData Service.

Sharepoint 2013 App

So now we have a OData service that will allow Sharepoint to integrate with our awesome airline web application. (The remainder of the application assumes you have a Office 365 developer account.  If you have MSDN then you got a free Office 365 Dev account or you can sign up for one at http://msdn.microsoft.com/en-us/library/fp179924.aspx)

  1. Open Visual Studio 2012 and create a new Sharepoint App (selected the Sharepoint Hosted option)
  2. Right click on the project and create a new content type for an external data source aka External Content Type. Point the external content type to your deployed Odata service.
  3. Now create a new page in the Pages folder
  4. Add the following code:
<script
    type="text/javascript"
    src="//ajax.aspnetcdn.com/ajax/jQuery/jquery-1.7.2.min.js">
</script>
<script
    type="text/javascript"
    src="/_layouts/15/sp.runtime.js">
</script>
<script
    type="text/javascript"
    src="/_layouts/15/sp.js">
</script>
<script>
    $(document).ready(function () {
        // Get the URI decoded URLs.
        var hostweburl = decodeURIComponent(getQueryStringParameter("SPHostUrl"));

        // Get the URI decoded URLs.
        var appweburl = decodeURIComponent(getQueryStringParameter("SPAppWebUrl"));

        jQuery.ajax({
            url: appweburl + "/_api/web/lists/getbytitle('Airlines')/items",
            type: "GET",
            headers: { "Accept": "application/json;odata=verbose" },
            success: function (data, textStatus, xhr) {
                var dataResults = data.d.results;
                var data = "";
                data += dataResults[0].Name;
                $("body").append("<p> the data is " + data + "</p>");
            },
            error: function (xhr, textStatus, errorThrown) {
                alert("error:" + JSON.stringify(xhr));
            }
        });

        // Function to retrieve a query string value.
        // For production purposes you may want to use
        // a library to handle the query string.
        function getQueryStringParameter(paramToRetrieve) {
            var params =
                document.URL.split("?")[1].split("&amp;");
            var strParams = "";
            for (var i = 0; i < params.length; i = i + 1) {
                var singleParam = params[i].split("=");
                if (singleParam[0] == paramToRetrieve)
                    return singleParam[1];
            }
        }
    });
</script>

Now start debugging the solution.  You should get prompted for your Sharepoint user name and password.   Visual Studio will automatically deploy the application to Sharepoint.  There that is it.  Sharepoint 2013 + OData.

Why ignoring the cloud is killing your profits

Your IT guy is lying to you. There. I said it. The truth is – the cloud is not bad for your business. The cloud is bad for him. Email, servers, web sites, even databases can all be put in the cloud and require little to no maintenance. And it can be done for with significant savings over your existing on premise solutions. And it comes with better service. Does your IT guy offer a 99.9% uptime garuntee?

The cloud puts your IT guy out of a job and he knows it. Oh sure, he could study and work hard and drive innovative solutions to improve your competitness. But that is really hard and most IT Pros aren’t going to do that. Instead they lie and insist that on premise is better and more secure. For most small businesses nothing could be further from the truth.

Let me prove my point:

Lets look at a basic office setup: Email, document management, calendars, project management and collaboration.

Microsoft Office 365 for small business
  • Exchange for email and calendars
  • Sharepoint for document management and project management
  • Lync for collaboration
  • $5.00 per month per user
  • 99% uptime garuntee
  • disater recovery
  • 25 employees annual cost $1,500
Microsoft Small Business
  • Servers Hardware
  • Exchange Licensing
  • Windows Server Licensing
  • Sharepoint Licensing
  • SQL Server Licencing
  • Server Administrator
  • no uptime garuntee
  • limited disaster reovery
  • 25 employees total cost is a lot more than the cloud!

Did you notice one very large cost is missing from the cloud costs – yep server administrators. That just got outsourced to Microsoft.

Not the worst of it

The financial cost is not the worst costs. The worst costs come from decreased competiveness. With on premise solutions you have to keep the hardware and software for 3 – 5 years to justify the return on investment. 3 – 5 years is a prision sentance.

Think about the business climate five years ago? Did you need a mobile app? Smart phones and tablets were not mainstream in 2008. But they are today. With a cloud solution you pay as you go. No long term ROI. You pay for what your business needs. Period.

The other critical disadvantage is flexabiltiy. If your business doubled or even tripled next year could your existing technology inventment support that? At the very least you would need more licensing and possibly more hardware. The cloud is made to scale. And you can pay as you go.

What next?

You need experts. Hire some talented folks to come in and do an analysis of your business. Ask them deliverer an ROI for moving to the cloud. For most businesses this will be a very simple decision.

As for your IT guy. You will still need him – but in new ways. He doesn’t need to do upgrades or security patches or maintenance any more. But what he needs to be doing is becoming an expert on the cloud and applying the technology to solve business problems. And that is a very valuable person to have on your team.

NodeJS – more than just a totally awesome web app platform.

So I needed to create a monitoring app that would hit a RESTful interface that required authentication. The only way to hit this RESTful service was to POST to a login page and then manage the authentication cookies for the remainder of the session. And I needed to get this done fast.

This is why I love NodeJS. NoneJS is more than a totally awesome web app platform. It makes for a great client application too.

NodeJS for the command line

The following is the basic code for kicking of an https request. The options argument contains the url, method, and any headers (including cookies). The data argument is for any data that needs to be POSTed with the request. (Make sure you set the appropriate headers for the data like content-type).


var cookie = "";
function processRequest(options, data){
  var req = https.request(options, function(res) {
    res.on('data', function(data) {
      cookie = processResponse(data);
    });
  });
  if(data){
    //POST data get written here.
    //Not included in the options like Jquery
    req.write(data);
  }
  req.end();
  req.on('error', function(e) {
    console.error(e);
  });
}

The magic is in the processResponse function. This is where we capture the cookie and store it for the next request.


function processResponse(){
  var cookies = null;
  if(res.headers && res.headers["set-cookie"]){
    cookies = res.headers["set-cookie"].join(";");
    cookies = cookies.replace(/path=\/; HttpOnly;/g, "");
    cookies = cookies.replace(/ path=\/; HttpOnly/g, "");
    cookies = cookies.trim();
    cookies = cookies + " otherCookie=oatmealraisen";'
    return cookies;
  }
}

Finally we can create the options objects and pass them to the processRequest function.


var loginData = "UserName=XXXXXXXXX&Password=XXXXXXXX&RememberMe=false";
function Login(cookies){
 var headers = { 'Content-Type': 'application/x-www-form-urlencoded', 'Content-Length': loginData.length };
 var options = { hostname: 'ACME.COM', port: 443, path: '/LogOn', method: 'POST', headers : headers }; return options;
}
function dataSummary(cookies){
  var headers = { Cookie: cookies };
  var options = { hostname: 'ACME.com', port: 443, path: '/DataSummary', method: 'GET', headers : headers };
  return options;
}
processRequest(login());
processRequest(dataSummary(cookie));

NodeJS as a https client. Now the next step is to hook up a send email function. And that is for another blog.

How to use nitrous.io to create a quick and easy web server in less than 5 minutes

Nitrous.io is a cloud service provider that gives you a linux vm running node.js with an I?DE and terminal –  all from your browser.   In less than 5 minutes you can have a NodeJS web server up and running in the cloud.  Here is how:

1.  Sign up for a Free Nitrous.io account.

2.  Create a new box.  Be sure to select the NodeJs stack.

3.  From the terminal type:

mkdir web

cd web

npm install -g express

express

npm install

4.  Now you can start uploading your static content files to the public folder

5.  Start the node server, make sure you are in the web folder

cd /web

node app.js

Now your web site is running.  On the main menu click Preview and then select port 3000.  Now you can browse to your web page.

nitriuos

Why?  Why not use a real IDE?

Nitrous.IO provides a full web app development environment from your browser.  Try running NodeJS on your IPad, Android or Win RT tablet – not an option.  This is powerful platform because we can be anywhere and within minutes can be hacking on a new idea from a mobile device.