diff --git a/Dockerfile b/Dockerfile
index 4f02923a5ee7b504d9d9f57f0d3ebbad4a90d72f..a20f2d36324fee46ce3a90f029ba9361839ab550 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -18,7 +18,6 @@ FROM node:8-alpine
 ARG PORT
 ENV PORT=$PORT
 ENV NODE_ENV=production
-ENV DOCKER_BUILD=true
 
 RUN apk --no-cache add ca-certificates
 RUN mkdir /iv-app
diff --git a/deploy/datasets/index.js b/deploy/datasets/index.js
index 70d2d38bc516f8b71f4015d7cf0211b3c7894dce..1d9ac9c01a25faba6f28e75a1bc519f04fe71922 100644
--- a/deploy/datasets/index.js
+++ b/deploy/datasets/index.js
@@ -1,111 +1,37 @@
 const express = require('express')
-const fs = require('fs')
-const path = require('path')
 const datasetsRouter = express.Router()
+const { init, getDatasets } = require('./query')
 
-const cachedFilePath = path.join(__dirname, '..', 'res', 'cachedKgDS.20190225.json')
-let cachedData = null
-let cachedSpaces = null
+init()
 
-fs.readFile(cachedFilePath, 'utf-8', (err, data) => {
-  if (err)
-    throw err
-  const json = JSON.parse(data)
-  cachedData = json.results.filter(ds => ds.embargoStatus.some(s => s === 'Free'))
-
-  const allPNames = cachedData.filter(v => v.parcellationRegion.length > 0).map(v => v.parcellationRegion[0].name)
-  const noPNameNoRefSpace = cachedData.filter(v => v.parcellationRegion.length === 0 && v.referenceSpaces.length === 0)
-})
-
-datasetsRouter.get('/templateName/:templateName', (req, res) => {
+datasetsRouter.get('/templateName/:templateName', (req, res, next) => {
   const { templateName } = req.params
-  /**
-   * temporary use cached data. in future, live fetch data and/or apply caching
-   */
-  const filteredData = cachedData.filter(ds => {
-    return templateName === 'undefined'
-      ? ds.referenceSpaces.length === 0
-      : ds.referenceSpaces.some(rs => rs.name === templateName)
-  })
-  res.status(200).send(JSON.stringify(filteredData))
-})
-
-
-const readConfigFile = (filename) => new Promise((resolve, reject) => {
-  const filepath = path.join(__dirname, '..', 'res', filename)
-  fs.readFile(filepath, 'utf-8', (err, data) => {
-    if(err) reject(err)
-    resolve(data)
-  })
+  getDatasets({ templateName })
+    .then(ds => {
+      res.status(200).send(JSON.stringify(ds))
+    })
+    .catch(error => {
+      next({
+        code: 500,
+        error
+      })
+    })
 })
 
-const flattenArray = (array) => {
-  return array.filter(item => item.children.length === 0).concat(
-    ...array.filter(item => item.children.length > 0).map(item => flattenArray(item.children))
-  )
-}
-
-let juBrain = null
-let shortBundle = null
-let longBundle = null
-
-readConfigFile('colin.json')
-  .then(data => JSON.parse(data))
-  .then(json => {
-    juBrain = flattenArray(json.parcellations[0].regions)
-  })
-  .catch(console.error)
 
-readConfigFile('MNI152.json')
-  .then(data => JSON.parse(data))
-  .then(json => {
-    longBundle = flattenArray(json.parcellations[0].regions)
-    shortBundle = flattenArray(json.parcellations[1].regions)
-  })
-  .catch(console.error)
 
 datasetsRouter.get('/parcellationName/:parcellationName', (req, res) => {
   const { parcellationName } = req.params
-  let returnArr
-  switch (parcellationName) {
-    case 'JuBrain Cytoarchitectonic Atlas':
-      returnArr = juBrain
-        ? cachedData
-          .filter(ds => !/infant/i.test(ds.name))
-          .filter(ds =>  
-            ds.parcellationRegion.length > 0 &&
-            ds.parcellationRegion.some(pr => {
-              const regex = new RegExp(pr.name)
-              return juBrain.some(juBR => regex.test(juBR.name))
-            }))
-        : []
-      break;
-    case 'Fibre Bundle Atlas - Long Bundle':
-      returnArr = longBundle
-        ? cachedData
-            .filter(ds =>  
-              ds.parcellationRegion.length > 0 &&
-              ds.parcellationRegion.some(pr => {
-                const regex = new RegExp(pr.name)
-                return longBundle.some(lbr => regex.test(lbr.name))
-              }))
-        : []
-      break;
-    case 'Fibre Bundle Atlas - Short Bundle':
-      returnArr = shortBundle
-      ? cachedData
-          .filter(ds =>  
-            ds.parcellationRegion.length > 0 &&
-            ds.parcellationRegion.some(pr => {
-              const regex = new RegExp(pr.name)
-              return shortBundle.some(sbr => regex.test(sbr.name))
-            }))
-      : []
-      break;
-    default:
-      returnArr = []
-  }
-  res.status(200).send(JSON.stringify(returnArr))
+  getDatasets({ parcellationName })
+    .then(ds => {
+      res.status(200).send(JSON.stringify(ds))
+    })
+    .catch(error => {
+      next({
+        code: 500,
+        error
+      })
+    })
 })
 
 module.exports = datasetsRouter
\ No newline at end of file
diff --git a/deploy/datasets/query.js b/deploy/datasets/query.js
new file mode 100644
index 0000000000000000000000000000000000000000..213dee9b3972bc847524d62dda91329c12ea56a4
--- /dev/null
+++ b/deploy/datasets/query.js
@@ -0,0 +1,118 @@
+const fs = require('fs')
+const request = require('request')
+const path = require('path')
+
+let cachedData = null
+let otherQueryResult = null
+const queryUrl = process.env.KG_DATASET_QUERY_URL || `https://kg-int.humanbrainproject.org/query/minds/core/dataset/v1.0.0/interactiveViewerKgQuery/instances/public?size=450&vocab=https%3A%2F%2Fschema.hbp.eu%2FmyQuery%2F`
+const timeout = process.env.TIMEOUT || 5000
+
+const fetchDatasetFromKg = () => new Promise((resolve, reject) => {
+  request(queryUrl, (err, resp, body) => {
+    if (err)
+      return reject(err)
+    const json = JSON.parse(body)
+    resolve(json)
+  })
+})
+
+const getDs = () => Promise.race([
+  new Promise((rs, rj) => {
+    setTimeout(() => {
+      if (cachedData) {
+        rs(cachedData)
+      } else {
+        /**
+         * cached data not available, have to wait
+         */
+      }
+    }, timeout)
+  }),
+  fetchDatasetFromKg()
+    .then(({results, ...rest}) => {
+      cachedData = results
+      otherQueryResult = rest
+      return cachedData
+    })
+])
+
+/**
+ * Needed by filter by parcellation
+ */
+
+const flattenArray = (array) => {
+  return array.filter(item => item.children.length === 0).concat(
+    ...array.filter(item => item.children.length > 0).map(item => flattenArray(item.children))
+  )
+}
+
+const readConfigFile = (filename) => new Promise((resolve, reject) => {
+  const filepath = path.join(__dirname, '..', 'res', filename)
+  fs.readFile(filepath, 'utf-8', (err, data) => {
+    if(err) reject(err)
+    resolve(data)
+  })
+})
+
+let juBrain = null
+let shortBundle = null
+let longBundle = null
+
+readConfigFile('colin.json')
+  .then(data => JSON.parse(data))
+  .then(json => {
+    juBrain = flattenArray(json.parcellations[0].regions)
+  })
+  .catch(console.error)
+
+readConfigFile('MNI152.json')
+  .then(data => JSON.parse(data))
+  .then(json => {
+    longBundle = flattenArray(json.parcellations[0].regions)
+    shortBundle = flattenArray(json.parcellations[1].regions)
+  })
+  .catch(console.error)
+
+const filterByPRs = (prs, atlasPr) => atlasPr
+  ? prs.some(pr => {
+      const regex = new RegExp((pr.name))
+      return atlasPr.some(aPr => regex.test(aPr.name))
+    })
+  : false
+
+const filter = (datasets, {templateName, parcellationName}) => datasets.filter(ds => {
+  if (templateName) {
+    return templateName === 'undefined'
+      ? ds.referenceSpaces.length === 0
+      : ds.referenceSpaces.some(rs => rs.name === templateName)
+  }
+  if (parcellationName) {
+    return ds.parcellationRegion.length > 0
+      ? filterByPRs(
+          ds.parcellationRegion, 
+          parcellationName === 'JuBrain Cytoarchitectonic Atlas' && juBrain && !/infant/.test(ds.name)
+            ?  juBrain
+            : parcellationName === 'Fibre Bundle Atlas - Long Bundle' && longBundle
+              ?  longBundle
+              : parcellationName === 'Fibre Bundle Atlas - Short Bundle' && shortBundle
+                ?  shortBundle
+                : null
+        )
+      : false
+  }
+
+  return false
+})
+
+/**
+ * on init, populate the cached data
+ */
+exports.init = () => fetchDatasetFromKg()
+  .then(json => {
+    cachedData = json
+  })
+
+exports.getDatasets = ({ templateName, parcellationName }) => getDs()
+    .then(json => filter(json, {templateName, parcellationName}))
+
+
diff --git a/deploy/package.json b/deploy/package.json
index fe22fd3a98bad52e51abf25f7763a6e9105293f0..0dfc574576eb6939f44b3e00b6c5d6bd91e17603 100644
--- a/deploy/package.json
+++ b/deploy/package.json
@@ -11,7 +11,8 @@
   "license": "ISC",
   "dependencies": {
     "express": "^4.16.4",
-    "openid-client": "^2.4.5"
+    "openid-client": "^2.4.5",
+    "request": "^2.88.0"
   },
   "devDependencies": {
     "cors": "^2.8.5",
diff --git a/deploy/server.js b/deploy/server.js
index 6dd4b914fbb81239e450c1561a6219cc44eb0f18..d740f148761158f4dbb514b61a30e2a82b0295b0 100644
--- a/deploy/server.js
+++ b/deploy/server.js
@@ -14,7 +14,7 @@ const nehubaConfigRouter = require('./nehubaConfig')
 const datasetRouter = require('./datasets')
 const catchError = require('./catchError')
 
-const publicPath = process.env.DOCKER_BUILD
+const publicPath = process.env.NODE_ENV === 'production'
   ? path.join(__dirname, 'public')
   : path.join(__dirname, '..', 'dist', 'aot')
 
diff --git a/src/components/pagination/pagination.component.ts b/src/components/pagination/pagination.component.ts
index fc962a2ae61f8423b9b898a7ad40bca1e7e5de0d..1723d4540cef7683c09f8ca7f5e81986c6003f80 100644
--- a/src/components/pagination/pagination.component.ts
+++ b/src/components/pagination/pagination.component.ts
@@ -8,12 +8,13 @@ import { Component, Input, Output, EventEmitter } from '@angular/core'
   ]
 })
 
-export class PaginationComponent{
+export class PaginationComponent {
   @Input() total : number = 0
   @Input() hitsPerPage : number = 15
   @Input() currentPage : number = 0
 
   @Output() paginationChange : EventEmitter<number> = new EventEmitter()
+  @Output() outOfBound: EventEmitter<number> = new EventEmitter()
 
   goto(pgnum:number){
     const emitValue = pgnum < 0 ? 
diff --git a/src/ui/databrowser/databrowser.component.ts b/src/ui/databrowser/databrowser.component.ts
index bee9c53cf554c960d200b4d33d0e0dc657572ef5..aa889e93b111861b12ee659466452047d9653cec 100644
--- a/src/ui/databrowser/databrowser.component.ts
+++ b/src/ui/databrowser/databrowser.component.ts
@@ -3,7 +3,7 @@ import { Store, select } from "@ngrx/store";
 import { DataStateInterface, Property, safeFilter, DataEntry, File, SELECT_REGIONS, getLabelIndexMap, isDefined, SPATIAL_GOTO_PAGE, CHANGE_NAVIGATION, UPDATE_SPATIAL_DATA_VISIBLE, DESELECT_REGIONS, DESELECT_LANDMARKS, SELECT_LANDMARKS } from "../../services/stateStore.service";
 import { map, filter, distinctUntilChanged } from "rxjs/operators";
 import { HasPathProperty } from "../../util/pipes/pathToNestedChildren.pipe";
-import { Observable, Subscription, combineLatest } from "rxjs";
+import { Observable, Subscription, combineLatest, Subject } from "rxjs";
 import { FileViewer } from "../fileviewer/fileviewer.component";
 import { WidgetServices } from "../../atlasViewer/widgetUnit/widgetService.service";
 import { AtlasViewerConstantsServices } from "../../atlasViewer/atlasViewer.constantService.service";
@@ -47,6 +47,12 @@ export class DataBrowserUI implements OnDestroy,OnInit{
   private spatialDataEntries$ : Observable<any[]>
   private spatialPagination$ : Observable<{spatialSearchPagination:number,spatialSearchTotalResults:number}>
 
+  /**
+   * TODO filter types
+   */
+  public filterApplied$: Observable<any>
+  private typeVisibility$: Subject<Set<String>> = new Subject()
+
   private subscriptions : Subscription[] = []
 
   get showDataTypes(){
@@ -86,6 +92,10 @@ export class DataBrowserUI implements OnDestroy,OnInit{
       map(results => [...results[0], ...results[1]])
     )
     
+    this.filterApplied$ = combineLatest(
+      this.selectedRegions$,
+      this.typeVisibility$
+    )
 
     this.metadataMap$ = this.store.pipe(
       select('dataStore'),
@@ -150,6 +160,10 @@ export class DataBrowserUI implements OnDestroy,OnInit{
     this.subscriptions.push(
       this.spatialPagination$.subscribe(this.handleSpatialPaginationChange.bind(this))
     )
+
+    this.subscriptions.push(
+      this.filterApplied$.subscribe(() => this.currentPage = 0)
+    )
   }
 
   ngOnDestroy(){
@@ -285,6 +299,8 @@ export class DataBrowserUI implements OnDestroy,OnInit{
     this.hideDataTypes = new Set(
       [...this.hideDataTypes]
     )
+
+    this.typeVisibility$.next(new Set([...this.hideDataTypes]))
   }
 
   gothere(event:MouseEvent,position:any){
diff --git a/src/util/pipes/filterDataEntriesByRegion.pipe.ts b/src/util/pipes/filterDataEntriesByRegion.pipe.ts
index 9236d20bd695bc7e59e55a52cc3265febe7122d7..1924a87ccae557fbf348092dad369e1c7e67ce68 100644
--- a/src/util/pipes/filterDataEntriesByRegion.pipe.ts
+++ b/src/util/pipes/filterDataEntriesByRegion.pipe.ts
@@ -14,10 +14,11 @@ export class FilterDataEntriesByRegion implements PipeTransform{
             /**
              * TODO: temporary hack, some dataset region name is not exactly the same as region
              */
-            const regex = new RegExp(pr.name)
+            /* https://stackoverflow.com/a/9310752/6059235 */
+            const regex = new RegExp(pr.name.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'))
             return selectedRegions.some(sr => regex.test(sr.name))
             /**
-             * more correct
+             * more correct, but probably should use UUID in the future
              */
             return selectedRegions.some(sr => sr.name === pr.name)
           })