diff --git a/README.md b/README.md index 8deba5e6a1d2e2bb2b9e38b325e75c2338bf8d69..b42183d872a0d4fdafd0b1704c27c7ef6e8618bf 100644 --- a/README.md +++ b/README.md @@ -115,14 +115,9 @@ It is recommended to manage your environments with `.env` file. | name | description | default | example | | --- | --- | --- | --- | -| `OBJ_STORAGE_AUTH_URL` | -| `OBJ_STORAGE_IDP_NAME` | -| `OBJ_STORAGE_IDP_PROTO` | -| `OBJ_STORAGE_IDP_URL` | -| `OBJ_STORAGE_USERNAME` | -| `OBJ_STORAGE_PASSWORD` | -| `OBJ_STORAGE_PROJECT_ID` | | `OBJ_STORAGE_ROOT_URL` | +| `HBP_V2_REFRESH_TOKEN` | +| `HBP_V2_ACCESS_TOKEN` | ##### Test deploy denvironments diff --git a/deploy/auth/hbp-oidc-v2.js b/deploy/auth/hbp-oidc-v2.js index 655dfe504471156f0ec9a11e3cd2f1dd866ccdbd..25ac50606a1a835bc247d8b6e51d12323e2972eb 100644 --- a/deploy/auth/hbp-oidc-v2.js +++ b/deploy/auth/hbp-oidc-v2.js @@ -8,6 +8,7 @@ const clientSecret = process.env.HBP_CLIENTSECRET_V2 || 'no hbp client secret' const discoveryUrl = 'https://iam.ebrains.eu/auth/realms/hbp' const redirectUri = `${HOSTNAME}${HOST_PATHNAME}/hbp-oidc-v2/cb` const cb = (tokenset, {sub, given_name, family_name, ...rest}, done) => { + console.log({ tokenset }) return done(null, { id: `hbp-oidc-v2:${sub}`, name: `${given_name} ${family_name}`, @@ -17,9 +18,15 @@ const cb = (tokenset, {sub, given_name, family_name, ...rest}, done) => { }) } -module.exports = async (app) => { - try { - const { oidcStrategy } = await configureAuth({ +let oidcStrategy, client, pr + +const memoizedInit = () => { + if (pr) return pr + pr = (async () => { + if (client) { + return + } + const re = await configureAuth({ clientId, clientSecret, discoveryUrl, @@ -31,14 +38,29 @@ module.exports = async (app) => { response_types: [ 'code' ] } }) - - passport.use('hbp-oidc-v2', oidcStrategy) - app.get('/hbp-oidc-v2/auth', passport.authenticate('hbp-oidc-v2')) - app.get('/hbp-oidc-v2/cb', passport.authenticate('hbp-oidc-v2', { - successRedirect: `${HOST_PATHNAME}/`, - failureRedirect: `${HOST_PATHNAME}/` - })) - } catch (e) { - console.error('oidcv2 auth error', e) + oidcStrategy = re.oidcStrategy + client = re.client + })() + return pr +} + +module.exports = { + bootstrapApp: async (app) => { + try { + await memoizedInit() + passport.use('hbp-oidc-v2', oidcStrategy) + app.get('/hbp-oidc-v2/auth', passport.authenticate('hbp-oidc-v2')) + app.get('/hbp-oidc-v2/cb', passport.authenticate('hbp-oidc-v2', { + successRedirect: `${HOST_PATHNAME}/`, + failureRedirect: `${HOST_PATHNAME}/` + })) + return { client } + } catch (e) { + console.error('oidcv2 auth error', e) + } + }, + getClient: async () => { + await memoizedInit() + return client } } diff --git a/deploy/auth/index.js b/deploy/auth/index.js index 732c522b16ac31ad55a74deee9c65ccc90961a65..ebb1172d85e9abc8b553f2cc0c31f453f238c74f 100644 --- a/deploy/auth/index.js +++ b/deploy/auth/index.js @@ -11,7 +11,7 @@ const ready = async () => isReady const configureAuth = async (app) => { console.log('configure Auth') const hbpOidc = require('./hbp-oidc') - const hbpOidc2 = require('./hbp-oidc-v2') + const { bootstrapApp: boostrapOidcV2 } = require('./hbp-oidc-v2') const { initPassportJs, objStoreDb } = require('./util') @@ -21,7 +21,7 @@ const configureAuth = async (app) => { await hbpOidc(app) }, { timeout: 1000, retries: 3 }) await retry(async () => { - await hbpOidc2(app) + await boostrapOidcV2(app) }, { timeout: 1000, retries: 3 }) isReady = true diff --git a/deploy/auth/oidc.js b/deploy/auth/oidc.js index dbe9a4f6eaf11eacdb4845ac3de9b281b3af20ef..c55a09bd4d4d9f926df9e4e42b6caaa970a692f3 100644 --- a/deploy/auth/oidc.js +++ b/deploy/auth/oidc.js @@ -1,5 +1,4 @@ const { Issuer, Strategy } = require('openid-client') -const jwtDecode = require('jwt-decode') const defaultCb = (tokenset, {id, ...rest}, done) => { return done(null, { @@ -8,7 +7,14 @@ const defaultCb = (tokenset, {id, ...rest}, done) => { }) } -exports.jwtDecode = jwtDecode +exports.jwtDecode = input => { + if (!input) throw new Error(`jwtDecode must have an input!`) + const payload = input.split('.')[1] + if (!payload) { + throw new Error(`jwt token does not have enough components`) + } + return JSON.parse(Buffer.from(payload, 'base64').toString()) +} exports.configureAuth = async ({ discoveryUrl, clientId, clientSecret, redirectUri, clientConfig = {}, cb = defaultCb, scope = 'openid' }) => { if (!discoveryUrl) diff --git a/deploy/csp/index.js b/deploy/csp/index.js index 1b702ad034f2d9dde5adab17bea2584011e98e3f..b02e7fede98bef36b4940c360d4668083c8167bd 100644 --- a/deploy/csp/index.js +++ b/deploy/csp/index.js @@ -1,5 +1,5 @@ const csp = require('helmet-csp') -const bodyParser = require('body-parser') +const express = require('express') const crypto = require('crypto') let WHITE_LIST_SRC, CSP_CONNECT_SRC, SCRIPT_SRC @@ -103,15 +103,19 @@ module.exports = (app) => { })) if (!CSP_REPORT_URI) { - app.post('/report-violation', bodyParser.json({ - type: ['json', 'application/csp-report'] - }), (req, res) => { - if (req.body) { - console.warn(`CSP Violation: `, req.body) - } else { - console.warn(`CSP Violation: no data received!`) + app.post( + '/report-violation', + express.json({ + type: ['json', 'application/csp-report'] + }), + (req, res) => { + if (req.body) { + console.warn(`CSP Violation: `, req.body) + } else { + console.warn(`CSP Violation: no data received!`) + } + res.status(204).end() } - res.status(204).end() - }) + ) } } \ No newline at end of file diff --git a/deploy/datasets/index.js b/deploy/datasets/index.js index 4f63e80b831696154b557f37fc2cb81c56aedc39..b1897f45cb28568fcc4712c8ba878b4c15d3c2b4 100644 --- a/deploy/datasets/index.js +++ b/deploy/datasets/index.js @@ -10,11 +10,10 @@ const archiver = require('archiver') const { getHandleErrorFn } = require('../util/streamHandleError') const { IBC_SCHEMA } = require('./importIBS') -const bodyParser = require('body-parser') const { getIdFromFullId } = require('../../common/util') -datasetsRouter.use(bodyParser.urlencoded({ extended: false })) -datasetsRouter.use(bodyParser.json()) +datasetsRouter.use(express.urlencoded({ extended: false })) +datasetsRouter.use(express.json()) let readyFlag = false @@ -229,7 +228,7 @@ datasetsRouter.get('/downloadKgFiles', checkKgQuery, async (req, res) => { } }) -datasetsRouter.post('/bulkDownloadKgFiles', bodyParser.urlencoded({ extended: false }), async (req, res) => { +datasetsRouter.post('/bulkDownloadKgFiles', express.urlencoded({ extended: false }), async (req, res) => { try{ const { body = {}, user } = req const { kgIds } = body diff --git a/deploy/package.json b/deploy/package.json index 26715ffd65b659399a60a1fcbd5e489c9e42c62f..29c7b1ba225785e12e677f2eb5d81f2c914d4c47 100644 --- a/deploy/package.json +++ b/deploy/package.json @@ -13,20 +13,19 @@ "license": "ISC", "dependencies": { "archiver": "^3.0.0", - "body-parser": "^1.19.0", "cookie-parser": "^1.4.5", "express": "^4.16.4", "express-rate-limit": "^5.1.1", "express-session": "^1.15.6", - "hbp-seafile": "0.0.6", + "hbp-seafile": "^0.1.0", "helmet-csp": "^2.8.0", - "jwt-decode": "^2.2.0", "lru-cache": "^5.1.1", "memorystore": "^1.6.1", "nomiseco": "0.0.2", "openid-client": "^2.4.5", "passport": "^0.4.0", "rate-limit-redis": "^1.7.0", + "redis": "^3.0.2", "request": "^2.88.0", "showdown": "^1.9.1", "soswrap": "^0.0.2", diff --git a/deploy/saneUrl/index.js b/deploy/saneUrl/index.js index b3adb89077db2b1fbf110f9183c95f30d3ca4d28..521782b39de95f8e46201cdbf5ae47091541e8e1 100644 --- a/deploy/saneUrl/index.js +++ b/deploy/saneUrl/index.js @@ -1,8 +1,8 @@ -const router = require('express').Router() +const express = require('express') +const router = express.Router() const RateLimit = require('express-rate-limit') const RedisStore = require('rate-limit-redis') const { Store, NotFoundError } = require('./store') -const bodyParser = require('body-parser') const { readUserData, saveUserData } = require('../user/store') const store = new Store() @@ -109,7 +109,7 @@ router.post('/:name', else return res.status(500).send(e) } }, - bodyParser.json(), + express.json(), async (req, res) => { const { name } = req.params const { body, user } = req diff --git a/deploy/saneUrl/store.js b/deploy/saneUrl/store.js index 9765a6181053223b891e05d5d1c04172b7ffa1b9..dd3cafd145beb1afc62cf330f327d922ff35a6e7 100644 --- a/deploy/saneUrl/store.js +++ b/deploy/saneUrl/store.js @@ -1,58 +1,257 @@ -const { SamlOpenstackWrapper } = require('soswrap') const request = require('request') +const redis = require('redis') +const { promisify } = require('util') +const { Seafile } = require('hbp-seafile') +const { getClient } = require('../auth/hbp-oidc-v2') +const { jwtDecode } = require('../auth/oidc') +const { Readable } = require('stream') + +const HBP_OIDC_V2_REFRESH_TOKEN_KEY = `HBP_OIDC_V2_REFRESH_TOKEN_KEY` // only insert valid refresh token. needs to be monitored to ensure always get new refresh token(s) +const HBP_OIDC_V2_ACCESS_TOKEN_KEY = `HBP_OIDC_V2_ACCESS_TOKEN_KEY` // only insert valid access token. if expired, get new one via refresh token, then pop & push +const HBP_SEAFILE_TOKEN_KEY = `HBP_SEAFILE_TOKEN_KEY` // only insert valid seafile token +const HBP_SEAFILE_UPDATE_KEY = `HBP_SEAFILE_UPDATE_KEY` // stringified JSON key val of above three, with time stamp const { - OBJ_STORAGE_AUTH_URL, - OBJ_STORAGE_IDP_NAME, - OBJ_STORAGE_IDP_PROTO, - OBJ_STORAGE_IDP_URL, - OBJ_STORAGE_USERNAME, - OBJ_STORAGE_PASSWORD, - OBJ_STORAGE_PROJECT_ID, OBJ_STORAGE_ROOT_URL, + + HBP_V2_REFRESH_TOKEN, + HBP_V2_ACCESS_TOKEN, + + REDIS_PROTO, + REDIS_ADDR, + REDIS_PORT, + + REDIS_RATE_LIMITING_DB_EPHEMERAL_PORT_6379_TCP_PROTO, + REDIS_RATE_LIMITING_DB_EPHEMERAL_PORT_6379_TCP_ADDR, + REDIS_RATE_LIMITING_DB_EPHEMERAL_PORT_6379_TCP_PORT, + + REDIS_USERNAME, + REDIS_PASSWORD, + } = process.env +const redisProto = REDIS_PROTO || REDIS_RATE_LIMITING_DB_EPHEMERAL_PORT_6379_TCP_PROTO || 'redis' +const redisAddr = REDIS_ADDR || REDIS_RATE_LIMITING_DB_EPHEMERAL_PORT_6379_TCP_ADDR || null +const redisPort = REDIS_PORT || REDIS_RATE_LIMITING_DB_EPHEMERAL_PORT_6379_TCP_PORT || 6379 + +const userPass = (() => { + let returnString = '' + if (REDIS_USERNAME) returnString += REDIS_USERNAME + if (REDIS_PASSWORD) returnString += `:${REDIS_PASSWORD}` + if (returnString.length > 0) returnString += `@` + return returnString +})() + +const redisURL = redisAddr && `${redisProto}://${userPass}${redisAddr}:${redisPort}` + class NotFoundError extends Error{} class Store { - constructor({ - authUrl, - idPName, - idPProto, - idPUrl, - username, - password, - - objStorateRootUrl, - } = {}){ - - this.wrapper = new SamlOpenstackWrapper({ - authUrl: authUrl || OBJ_STORAGE_AUTH_URL, - idPName: idPName || OBJ_STORAGE_IDP_NAME, - idPProto: idPProto || OBJ_STORAGE_IDP_PROTO, - idPUrl: idPUrl || OBJ_STORAGE_IDP_URL, + constructor(){ + + this.healthFlag = false + this.seafileHandle = null + this.seafileRepoId = null + + /** + * setup redis(or mock) client + */ + this.redisClient = redisURL + ? redis.createClient({ + url: redisURL + }) + : ({ + onCbs: [], + keys: {}, + get: async (key, cb) => { + await Promise.resolve() + cb(null, this.keys[key]) + }, + set: async (key, value, cb) => { + await Promise.resolve() + this.keys[key] = value + cb(null) + }, + on(eventName, cb) { + if (eventName === 'message') { + this.onCbs.push(cb) + } + }, + publish(channel, message){ + for (const cb of this.onCbs){ + cb(channel, message) + } + }, + quit(){} + }) + + this.redisUtil = { + asyncGet: promisify(this.redisClient.get).bind(this.redisClient), + asyncSet: promisify(this.redisClient.set).bind(this.redisClient), + } + + this.pending = {} + this.keys = {} + + this.redisClient.on('message', async (chan, mess) => { + /** + * only liten to HBP_SEAFILE_UPDATE_KEY update + */ + if (chan === HBP_SEAFILE_UPDATE_KEY) { + try { + const { pending, update } = JSON.parse(mess) + this.pending = pending + for (const key in update) { + try { + this.keys[key] = await this.redisUtil.asyncGet(key) + console.log('on message get key', key, this.keys[key]) + } catch (e) { + console.error(`[saneUrl][store.js] get key ${key} error`) + } + } + } catch (e) { + console.error(`[saneUrl][store.js] parse message HBP_SEAFILE_UPDATE_KEY error`) + } + } }) + + this.init() + + /** + * check expiry + */ + this.intervalRef = setInterval(() => { + this.checkExpiry() + }, 1000 * 60) + } - this.objStorateRootUrl = objStorateRootUrl || OBJ_STORAGE_ROOT_URL + async init() { + this.openIdClient = await getClient() + this.keys = { + [HBP_OIDC_V2_REFRESH_TOKEN_KEY]: (await this.redisUtil.asyncGet(HBP_OIDC_V2_REFRESH_TOKEN_KEY)) || HBP_V2_REFRESH_TOKEN, + [HBP_OIDC_V2_ACCESS_TOKEN_KEY]: (await this.redisUtil.asyncGet(HBP_OIDC_V2_ACCESS_TOKEN_KEY)) || HBP_V2_ACCESS_TOKEN, + [HBP_SEAFILE_TOKEN_KEY]: await this.redisUtil.asyncGet(HBP_SEAFILE_TOKEN_KEY), + } + + await this.refreshSeafileHandle() + const repos = await this.seafileHandle.getRepos() + const repoToUse = repos.find(repo => repo.name === 'interactive-atlas-viewer') + this.seafileRepoId = repoToUse.id + + this.healthFlag = true + } + + async checkExpiry(){ + const { + [HBP_OIDC_V2_REFRESH_TOKEN_KEY]: refreshToken, + [HBP_OIDC_V2_ACCESS_TOKEN_KEY]: accessToken + } = this.keys + + /** + * if access token is absent + * try to refresh token, without needing to check exp + */ + if (!accessToken) { + await this.doRefreshTokens() + return true + } + const { exp: refreshExp } = jwtDecode(refreshToken) + const { exp: accessExp } = jwtDecode(accessToken) + + const now = new Date() + + if (now < refreshExp && now > accessExp) { + console.log(`[saneUrl] access token expired. Refreshing access token...`) + await this.doRefreshTokens() + console.log(`[saneUrl] access token successfully refreshed...`) + return true + } + + return true + } + + async doRefreshTokens(){ - this.wrapper.username = username || OBJ_STORAGE_USERNAME - this.wrapper.password = password || OBJ_STORAGE_PASSWORD + /** + * first, check if another process/pod is currently updating + * if they are, give them 1 minute to complete the process + * (usually takes takes less than 5 seconds) + */ + const pendingStart = this.pending[HBP_OIDC_V2_REFRESH_TOKEN_KEY] && + this.pending[HBP_OIDC_V2_REFRESH_TOKEN_KEY].start + 1000 * 60 + const now = new Date() + if (pendingStart && pendingStart > now) { + return + } + + /** + * When start refreshing the tokens, set pending attribute, and start timestamp + */ + this.redisClient.publish(HBP_SEAFILE_UPDATE_KEY, JSON.stringify({ + pending: { + ...this.pending, + [HBP_OIDC_V2_REFRESH_TOKEN_KEY]: { + start: Date.now() + } + } + })) + const client = await getClient() + const tokenset = await client.refresh(this.keys[HBP_OIDC_V2_REFRESH_TOKEN_KEY]) + const { access_token: accessToken, refresh_token: refreshToken } = tokenset - this.getToken() + const { exp: accessTokenExp } = jwtDecode(accessToken) + const { exp: refreshTokenExp } = jwtDecode(refreshToken) + + if (refreshTokenExp - accessTokenExp < 60 * 60 ) { + console.warn(`[saneUrl] refreshToken expires within 1 hour of access token! ${accessTokenExp} ${refreshTokenExp}`) + } + + /** + * once tokens have been refreshed, set them in the redis store first + * Then publish the update message + */ + await this.redisUtil.asyncSet(HBP_OIDC_V2_REFRESH_TOKEN_KEY, refreshToken) + await this.redisUtil.asyncSet(HBP_OIDC_V2_ACCESS_TOKEN_KEY, accessToken) + this.redisClient.publish(HBP_SEAFILE_UPDATE_KEY, JSON.stringify({ + keys: { + [HBP_OIDC_V2_REFRESH_TOKEN_KEY]: refreshToken, + [HBP_OIDC_V2_ACCESS_TOKEN_KEY]: accessToken, + }, + pending: { + ...this.pending, + [HBP_OIDC_V2_REFRESH_TOKEN_KEY]: null + } + })) + this.keys = { + [HBP_OIDC_V2_REFRESH_TOKEN_KEY]: refreshToken, + [HBP_OIDC_V2_ACCESS_TOKEN_KEY]: accessToken, + } } - async getToken() { - this.token = await this.wrapper.getScopedToken({ projectId: OBJ_STORAGE_PROJECT_ID }) - return this.token + async refreshSeafileHandle(){ + await this.checkExpiry() + const { + [HBP_OIDC_V2_ACCESS_TOKEN_KEY]: accessToken, + [HBP_SEAFILE_TOKEN_KEY]: token, + } = this.keys + this.seafileHandle = Seafile.from({ + token, + accessToken + }) + if (!token) { + await this.seafileHandle.init() + } + return this.seafileHandle } - get(id) { + tryGetFromSwiftObj(id) { + if (!OBJ_STORAGE_ROOT_URL){ + return Promise.reject( + new NotFoundError() + ) + } return new Promise((rs, rj) => { - request.get(`${this.objStorateRootUrl}/${id}`, { - headers: { - 'X-Auth-Token': this.token - } - }, (err, resp, body) => { + request.get(`${OBJ_STORAGE_ROOT_URL}/${id}`, (err, resp, body) => { if (err) return rj(err) if (resp.statusCode === 404) return rj(new NotFoundError()) if (resp.statusCode >= 400) return rj(resp) @@ -61,34 +260,90 @@ class Store { }) } - _set(id, value) { - return new Promise((rs, rj) => { - request.put(`${this.objStorateRootUrl}/${id}`, { - headers: { - 'X-Auth-Token': this.token - }, - body: value - }, (err, resp, body) => { - if (err) return rj(err) - if (resp.statusCode >= 400) return rj(resp) - return rs(body) + async tryGetFromSeafile(id) { + const getFiles = async () => { + return await this.seafileHandle.ls({ + repoId: this.seafileRepoId, + dir: `/saneurl/` }) - }) + } + let files + try { + files = await getFiles() + } catch (e) { + await this.refreshSeafileHandle() + files = await getFiles() + } + + if (!files.find(f => f.name === id)) { + throw new NotFoundError() + } + + const getFile = async () => { + return await this.seafileHandle.readFile({ + dir: `/saneurl/${id}`, + repoId: this.seafileRepoId + }) + } + + try { + return await getFile() + } catch (e) { + this.refreshSeafileHandle() + return await getFile() + } } - async set(id, value) { + async get(id) { try { - const result = await this._set(id, value) - return result + return await this.tryGetFromSwiftObj(id) } catch (e) { - await this.getToken() - const result = await this._set(id, value) - return result + if (e instanceof NotFoundError) { + /** + * try get file from seafile + */ + + return await this.tryGetFromSeafile(id) + } else { + throw new Error(`Unknown Error: ${e}`) + } } } - async healthCheck(){ + async _set(id, value) { + const rs = new Readable() + rs.path = id + rs.push(value) + rs.push(null) + const uploadToSeafile = async () => { + await this.seafileHandle.uploadFile({ + pathToFile: `/saneurl/${id}`, + readStream: rs, + }, { + repoId: this.seafileRepoId, + dir: '/saneurl/' + }) + } + try { + await uploadToSeafile() + } catch (e) { + await this.refreshSeafileHandle() + await uploadToSeafile() + } + } + async set(id, value) { + const result = await this._set(id, value) + return result + } + + dispose(){ + clearInterval(this.intervalRef) + this.redisClient && this.redisClient.quit() + } + + async healthCheck(){ + return this.healthFlag } } diff --git a/deploy/user/index.js b/deploy/user/index.js index 9d1809c963bfa07e6f518991f567f0b4e5af4137..6a6dd14dd2de6eb4c7a5e2cfb1161ed926b07686 100644 --- a/deploy/user/index.js +++ b/deploy/user/index.js @@ -1,6 +1,6 @@ -const router = require('express').Router() +const express = require('express') +const router = express.Router() const { readUserData, saveUserData } = require('./store') -const bodyParser = require('body-parser') const loggedInOnlyMiddleware = (req, res, next) => { const { user } = req @@ -23,15 +23,20 @@ router.get('/config', loggedInOnlyMiddleware, async (req, res) => { } }) -router.post('/config', loggedInOnlyMiddleware, bodyParser.json(), async (req, res) => { - const { user, body } = req - try { - await saveUserData(user, body) - res.status(200).end() - } catch (e) { - console.error(e) - res.status(500).send(e.toString()) +router.post( + '/config', + loggedInOnlyMiddleware, + express.json(), + async (req, res) => { + const { user, body } = req + try { + await saveUserData(user, body) + res.status(200).end() + } catch (e) { + console.error(e) + res.status(500).send(e.toString()) + } } -}) +) module.exports = router \ No newline at end of file diff --git a/docs/releases/v2.3.10.md b/docs/releases/v2.3.10.md new file mode 100644 index 0000000000000000000000000000000000000000..0564438f214b62dd43db5112b98c6f8391e05669 --- /dev/null +++ b/docs/releases/v2.3.10.md @@ -0,0 +1,5 @@ +# v2.3.10 + +## Hotfixes + +- Update the logic saneURL, as object storage was no longer available. (#923) \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index 27dd11a403f9284d8af0b43c58e46daf5add7310..57e61b1c467ef7d2b7954459a711c3e17618b210 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -40,6 +40,7 @@ pages: - Fetching datasets: 'advanced/datasets.md' - Display non-atlas volumes: 'advanced/otherVolumes.md' - Release notes: + - v2.3.10: 'releases/v2.3.10.md' - v2.3.9: 'releases/v2.3.9.md' - v2.3.8: 'releases/v2.3.8.md' - v2.3.7: 'releases/v2.3.7.md' diff --git a/package.json b/package.json index ddaadebbe0ea2ab790f981de88e332c41e144b67..8bead6de940a2ba38a4887bc71f985427adc06b4 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "interactive-viewer", - "version": "2.3.9", + "version": "2.3.10", "description": "HBP interactive atlas viewer. Integrating KG query, dataset previews & more. Based on humanbrainproject/nehuba & google/neuroglancer. Built with angular.io", "scripts": { "dev-server-export": "webpack-dev-server --config webpack.export.js",