Skip to content

Commit

Permalink
Merge pull request #183 from dadi/feature/provider-rewrite
Browse files Browse the repository at this point in the history
Provider rewrite
  • Loading branch information
jimlambie authored Jun 19, 2017
2 parents 9f6851b + 476ad5a commit 6664b28
Show file tree
Hide file tree
Showing 17 changed files with 700 additions and 370 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,11 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).

# [1.11.2] - 2017-06-19

## Changed
* modify the controller to make it initialise each datasource's data provider just before it is used, then destroy it after the data is returned

# [1.11.0] - 2017-06-11

## Added
Expand Down
2 changes: 1 addition & 1 deletion dadi/lib/auth/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ var log = require('@dadi/logger')
// This attaches middleware to the passed in app instance
module.exports = function (server) {
server.app.use(function (req, res, next) {
log.info({module: 'auth'}, 'Retrieving access token for "' + req.url + '"')
log.debug({module: 'auth'}, 'Retrieving access token for "' + req.url + '"')
help.timer.start('auth')

return help.getToken().then(function (bearerToken) {
Expand Down
176 changes: 101 additions & 75 deletions dadi/lib/cache/datasource.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,19 @@ var merge = require('deepmerge')
var path = require('path')
var url = require('url')

var Cache = require(path.join(__dirname, '/index.js'))
// var Cache = require(path.join(__dirname, '/index.js'))
var DadiCache = require('@dadi/cache')
var config = require(path.join(__dirname, '/../../../config.js'))
var log = require('@dadi/logger')

/**
* Creates a new DatasourceCache singleton for caching datasource results
* @constructor
*/
var DatasourceCache = function () {
this.cache = Cache().cache
// this.cache = Cache().cache
this.cacheOptions = config.get('caching')
this.cache = new DadiCache(this.cacheOptions)

DatasourceCache.numInstances = (DatasourceCache.numInstances || 0) + 1
// console.log('DatasourceCache:', DatasourceCache.numInstances)
Expand All @@ -27,32 +30,112 @@ var DatasourceCache = function () {
this.enabled = !(directoryEnabled === false && redisEnabled === false)
}

/**
* Get datasource data from the cache if it exists
* @param {object} datasource - a datasource schema object containing the datasource settings
* @param {fn} done - the method to call when finished, accepts 1 arg:
* if the cache key was found, returns {Buffer} data
* if the cache key was not found, returns false
*/
DatasourceCache.prototype.getFromCache = function (opts, done) {
debug('get (%s)', opts.name)

if (!this.cachingEnabled(opts)) {
return done(false)
}

if (this.stillCaching) {
return done(false)
}

var filename = this.getFilename(opts)
var options = this.getOptions(opts)

var buffers = []

// attempt to get from the cache
this.cache.get(filename, options).then((stream) => {
debug('serving %s from cache (%s)', opts.name, filename)
log.info('serving %s from cache (%s)', opts.name, filename)

stream.on('data', (chunk) => {
if (chunk) {
buffers.push(chunk)
}
})

stream.on('end', () => {
return done(Buffer.concat(buffers))
})
}).catch(() => {
// key doesn't exist in cache
return done(false)
})
}

/**
* Cache the supplied data it caching is enabled for the datasource
*
* @param {Object} datasource - the datasource instance
* @param {Buffer} data - the body of the response as a Buffer
* @param {fn} done - the method to call when finished, accepts args (Boolean written)
*/
DatasourceCache.prototype.cacheResponse = function (opts, data, done) {
var enabled = this.cachingEnabled(opts)

if (!enabled) {
return done(false)
}

if (this.stillCaching) {
// console.log('stillCaching...')
return done(false)
}

debug('write to cache (%s)', opts.name)

var filename = this.getFilename(opts)
var options = this.getOptions(opts)

// console.log('> CACHE RESPONSE')
// console.log('is Buffer?', Buffer.isBuffer(data))
// console.log(filename, opts.endpoint)

this.stillCaching = true

this.cache.set(filename, data, options).then(() => {
// console.log('< CACHE RESPONSE', filename)
this.stillCaching = false
return done(true)
})
}

/**
*
* @param {object} datasource - a datasource schema object containing the datasource settings
*/
DatasourceCache.prototype.cachingEnabled = function (datasource) {
DatasourceCache.prototype.cachingEnabled = function (opts) {
var enabled = this.enabled

// check the querystring for a no cache param
if (typeof datasource.provider.endpoint !== 'undefined') {
var query = url.parse(datasource.provider.endpoint, true).query
if (typeof opts.endpoint !== 'undefined') {
var query = url.parse(opts.endpoint, true).query
if (query.cache && query.cache === 'false') {
enabled = false
}
}

if (datasource.source.type === 'static') {
enabled = false
}
// if (datasource.source.type === 'static') {
// enabled = false
// }

if (config.get('debug')) {
enabled = false
}

var options = this.getOptions(datasource)
var options = this.getOptions(opts)

debug('options (%s): %o', datasource.name, options)
debug('options (%s): %o', opts.name, options)

// enabled if the datasource caching block says it's enabled
return enabled && (options.directory.enabled || options.redis.enabled)
Expand All @@ -66,13 +149,13 @@ DatasourceCache.prototype.cachingEnabled = function (datasource) {
* a unique cacheKey instead
* @param {object} datasource - a datasource schema object containing the datasource settings
*/
DatasourceCache.prototype.getFilename = function (datasource) {
var filename = crypto.createHash('sha1').update(datasource.name).digest('hex')
DatasourceCache.prototype.getFilename = function (opts) {
var filename = crypto.createHash('sha1').update(opts.name).digest('hex')

if (datasource.provider.cacheKey) {
filename += '_' + crypto.createHash('sha1').update(datasource.provider.cacheKey).digest('hex')
if (opts.cacheKey) {
filename += '_' + crypto.createHash('sha1').update(opts.cacheKey).digest('hex')
} else {
filename += '_' + crypto.createHash('sha1').update(datasource.provider.endpoint).digest('hex')
filename += '_' + crypto.createHash('sha1').update(opts.endpoint).digest('hex')
}

return filename
Expand All @@ -83,73 +166,16 @@ DatasourceCache.prototype.getFilename = function (datasource) {
* @param {object} datasource - a datasource schema object containing the datasource settings
* @returns {object} options for the cache
*/
DatasourceCache.prototype.getOptions = function (datasource) {
var options = merge(this.cacheOptions, datasource.schema.datasource.caching || {})
DatasourceCache.prototype.getOptions = function (opts) {
var options = merge(this.cacheOptions, opts.caching || {})

options.directory.extension = 'json'

return options
}

/**
*
* @param {object} datasource - a datasource schema object containing the datasource settings
*/
DatasourceCache.prototype.getFromCache = function (datasource, done) {
debug('get (%s)', datasource.name)

if (!this.cachingEnabled(datasource)) {
return done(false)
}

var filename = this.getFilename(datasource)
var options = this.getOptions(datasource)

var data = ''

// attempt to get from the cache
this.cache.get(filename, options).then((stream) => {
debug('serving %s from cache (%s)', datasource.name, filename)

stream.on('data', (chunk) => {
if (chunk) data += chunk
})

stream.on('end', () => {
return done(data)
})
}).catch(() => {
// key doesn't exist in cache
return done(false)
})
}

/**
*
*/
DatasourceCache.prototype.cacheResponse = function (datasource, data, done) {
var enabled = this.cachingEnabled(datasource)

if (!enabled) {
return done(false)
}

debug('write to cache (%s)', datasource.name)

var filename = this.getFilename(datasource)
var options = this.getOptions(datasource)

this.cache.set(filename, data, options).then(() => {
return done(true)
})
}
module.exports._reset = function () {}

module.exports = function () {
return new DatasourceCache()
}

module.exports._reset = function () {

}

module.exports.DatasourceCache = DatasourceCache
54 changes: 40 additions & 14 deletions dadi/lib/controller/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ var log = require('@dadi/logger')

var Datasource = require(path.join(__dirname, '/../datasource'))
var Event = require(path.join(__dirname, '/../event'))
var Providers = require(path.join(__dirname, '/../providers'))
var View = require(path.join(__dirname, '/../view'))

// helpers
Expand Down Expand Up @@ -293,9 +294,9 @@ Controller.prototype.loadData = function (req, res, data, done) {

_.each(self.datasources, function (ds, key) {
if (ds.chained) {
chainedDatasources[key] = ds
chainedDatasources[key] = _.clone(ds)
} else {
primaryDatasources[key] = ds
primaryDatasources[key] = _.clone(ds)
}
})

Expand Down Expand Up @@ -328,23 +329,39 @@ Controller.prototype.loadData = function (req, res, data, done) {
})
}

processSearchParameters(ds.schema.datasource.key, ds, req)

help.timer.start('datasource: ' + ds.name)

ds.provider = new Providers[ds.source.type]()
ds.provider.initialise(ds, ds.schema)

// var requestUrl = processSearchParameters(ds.schema.datasource.key, ds, req)
processSearchParameters(ds.schema.datasource.key, ds, req)

/**
* Call the data provider's load method to obtain data
* for this datasource
* @returns err, {Object} result, {Object} dsResponse
*/
// ds.provider.load(requestUrl, function (err, result, dsResponse) {
ds.provider.load(req.url, function (err, result, dsResponse) {
help.timer.stop('datasource: ' + ds.name)
if (err) return done(err)

if (dsResponse) {
return done(null, result, dsResponse)
if (ds.provider.destroy) {
ds.provider.destroy()
}

ds.provider = null

if (err) return done(err)

if (dsResponse) return done(null, result, dsResponse)

// TODO: simplify this, doesn't require a try/catch
if (result) {
try {
data[ds.schema.datasource.key] = (typeof result === 'object' ? result : JSON.parse(result))
data[ds.schema.datasource.key] = result
} catch (e) {
console.log('Provider Load Error:', ds.name, ds.provider.endpoint)
console.log('Provider Load Error:', ds.name, req.url)
console.log(e)
}
}
Expand Down Expand Up @@ -460,17 +477,25 @@ Controller.prototype.processChained = function (chainedDatasources, data, req, d
chainedDatasource.schema.datasource.filter = JSON.parse(filter)
}

chainedDatasource.provider.buildEndpoint(chainedDatasource.schema, function () {})
chainedDatasource.provider = new Providers[chainedDatasource.source.type]()
chainedDatasource.provider.initialise(chainedDatasource, chainedDatasource.schema)

// var requestUrl = chainedDatasource.provider.buildEndpoint(chainedDatasource.schema.datasource)
chainedDatasource.provider.buildEndpoint(chainedDatasource.schema.datasource)

// debug('datasource (load): %s %s', chainedDatasource.name, requestUrl)
debug('datasource (load): %s %s', chainedDatasource.name, chainedDatasource.provider.endpoint)

debug('datasource (load): %s %o', chainedDatasource.name, chainedDatasource.schema.datasource.filter)
chainedDatasource.provider.load(req.url, function (err, result) {
// chainedDatasource.provider.load(requestUrl, (err, chainedData) => {
chainedDatasource.provider.load(req.url, (err, chainedData) => {
if (err) log.error({module: 'controller'}, err)

help.timer.stop('datasource: ' + chainedDatasource.name + ' (chained)')

if (result) {
// TODO: simplify this, doesn't require a try/catch
if (chainedData) {
try {
data[chainedKey] = (typeof result === 'object' ? result : JSON.parse(result))
data[chainedKey] = chainedData
} catch (e) {
log.error({module: 'controller'}, e)
}
Expand All @@ -488,6 +513,7 @@ Controller.prototype.processChained = function (chainedDatasources, data, req, d
function processSearchParameters (key, datasource, req) {
// process each of the datasource's requestParams, testing for their existence
// in the querystring's request params e.g. /car-reviews/:make/:model
// return datasource.processRequest(key, req)
datasource.processRequest(key, req)
}

Expand Down
Loading

0 comments on commit 6664b28

Please sign in to comment.