In this article I will teach you how to create a simple API that once called via an url like:

Throughout the article I will assume that you know how to properly install node and have a certain knowledge of npm.

First of all you may want to create a file named package.json and place these contents inside of it:

  "name": "richlink-middleman",
  "version": "1.0.0",
  "description": "",
  "main": "app.js",
  "scripts": {
    "test": "echo \"Error: no test specified\" && exit 1"
  "author": "",
  "license": "ISC",
  "dependencies": {
    "express": "^4.14.1",
    "mongoose": "^4.8.4",
    "open-graph-scraper": "^2.4.2",
    "rxjs": "^5.2.0"

After that you can run npm install
This will install all the needed dependencies.

At this point we are ready to create our main file named app.js

var express = require('express');

var Scraper = require('./scraper.js');

var scraper = new Scraper();

var Rx = require('rxjs/Rx.js');

var app = express();

app.get('/api/', function (req, res) {

    var url = req.param('url');

    var scrapedData = scraper.scrape(url);

    var requestStream = Rx.Observable.of(scrapedData);

    var responseStream = requestStream
        .flatMap(function(scrapedData) {
            return Rx.Observable.fromPromise((scrapedData));

        function(response) {res.json(response); },
        function (err) { console.log('Error: %s', err); },
        function () { console.log('Completed'); }


app.listen(5100, function () {
  console.log('Example app listening on port 5100!');

module.exports = app;

At this point we need to create another file, named scraper.js, which calls a library (and passes it some parameters) that we installed when we launched npm install

var ogs = require('open-graph-scraper');

var scraper  = function () {

    var self = this;

    self.scrape = function (input_url) {
        var this_url = input_url;
      var myOgs = ogs(
          { url: this_url }, // Settings object first
          function(er, res) { return (er, res); }  // Callback
      return myOgs;

module.exports = scraper;

What pushed me to do this is that I see how tricky something like this is with PHP. You gotta use CURL and if you have many concurrent requests of this kind with a user base of thousands of users that posts their rich links, etc. the server performance gets heavily hit.



Need a custom made app?

I can help you automatize your company’s workflow.

Want a quote? Now is the right moment!

Related Post