Update:Auth to use new user model

- Express requests include userNew to start migrating API controllers to new user model
This commit is contained in:
advplyr 2024-08-10 15:46:04 -05:00
parent 59370cae81
commit 202ceb02b5
14 changed files with 626 additions and 392 deletions

View File

@ -157,10 +157,6 @@ export default {
this.init() this.init()
}, },
beforeDestroy() { beforeDestroy() {
if (this.$refs.accountModal) {
this.$refs.accountModal.close()
}
if (this.$root.socket) { if (this.$root.socket) {
this.$root.socket.off('user_added', this.addUpdateUser) this.$root.socket.off('user_added', this.addUpdateUser)
this.$root.socket.off('user_updated', this.addUpdateUser) this.$root.socket.off('user_updated', this.addUpdateUser)

View File

@ -39,6 +39,11 @@ export default {
this.showAccountModal = true this.showAccountModal = true
} }
}, },
mounted() {} mounted() {},
beforeDestroy() {
if (this.$refs.accountModal) {
this.$refs.accountModal.close()
}
}
} }
</script> </script>

View File

@ -16,7 +16,7 @@ export const state = () => ({
authorSortBy: 'name', authorSortBy: 'name',
authorSortDesc: false, authorSortDesc: false,
jumpForwardAmount: 10, jumpForwardAmount: 10,
jumpBackwardAmount: 10, jumpBackwardAmount: 10
} }
}) })
@ -26,13 +26,15 @@ export const getters = {
getToken: (state) => { getToken: (state) => {
return state.user?.token || null return state.user?.token || null
}, },
getUserMediaProgress: (state) => (libraryItemId, episodeId = null) => { getUserMediaProgress:
if (!state.user.mediaProgress) return null (state) =>
return state.user.mediaProgress.find((li) => { (libraryItemId, episodeId = null) => {
if (episodeId && li.episodeId !== episodeId) return false if (!state.user.mediaProgress) return null
return li.libraryItemId == libraryItemId return state.user.mediaProgress.find((li) => {
}) if (episodeId && li.episodeId !== episodeId) return false
}, return li.libraryItemId == libraryItemId
})
},
getUserBookmarksForItem: (state) => (libraryItemId) => { getUserBookmarksForItem: (state) => (libraryItemId) => {
if (!state.user.bookmarks) return [] if (!state.user.bookmarks) return []
return state.user.bookmarks.filter((bm) => bm.libraryItemId === libraryItemId) return state.user.bookmarks.filter((bm) => bm.libraryItemId === libraryItemId)
@ -153,7 +155,7 @@ export const mutations = {
}, },
setUserToken(state, token) { setUserToken(state, token) {
state.user.token = token state.user.token = token
localStorage.setItem('token', user.token) localStorage.setItem('token', token)
}, },
updateMediaProgress(state, { id, data }) { updateMediaProgress(state, { id, data }) {
if (!state.user) return if (!state.user) return

View File

@ -213,8 +213,11 @@ class Auth {
return null return null
} }
user.authOpenIDSub = userinfo.sub // Update user with OpenID sub
await Database.userModel.updateFromOld(user) if (!user.extraData) user.extraData = {}
user.extraData.authOpenIDSub = userinfo.sub
user.changed('extraData', true)
await user.save()
Logger.debug(`[Auth] openid: User found by email/username`) Logger.debug(`[Auth] openid: User found by email/username`)
return user return user
@ -788,12 +791,14 @@ class Auth {
await Database.updateServerSettings() await Database.updateServerSettings()
// New token secret creation added in v2.1.0 so generate new API tokens for each user // New token secret creation added in v2.1.0 so generate new API tokens for each user
const users = await Database.userModel.getOldUsers() const users = await Database.userModel.findAll({
attributes: ['id', 'username', 'token']
})
if (users.length) { if (users.length) {
for (const user of users) { for (const user of users) {
user.token = await this.generateAccessToken(user) user.token = await this.generateAccessToken(user)
await user.save({ hooks: false })
} }
await Database.updateBulkUsers(users)
} }
} }
@ -879,13 +884,13 @@ class Auth {
/** /**
* Return the login info payload for a user * Return the login info payload for a user
* *
* @param {Object} user * @param {import('./models/User')} user
* @returns {Promise<Object>} jsonPayload * @returns {Promise<Object>} jsonPayload
*/ */
async getUserLoginResponsePayload(user) { async getUserLoginResponsePayload(user) {
const libraryIds = await Database.libraryModel.getAllLibraryIds() const libraryIds = await Database.libraryModel.getAllLibraryIds()
return { return {
user: user.toJSONForBrowser(), user: user.toOldJSONForBrowser(),
userDefaultLibraryId: user.getDefaultLibraryId(libraryIds), userDefaultLibraryId: user.getDefaultLibraryId(libraryIds),
serverSettings: Database.serverSettings.toJSONForBrowser(), serverSettings: Database.serverSettings.toJSONForBrowser(),
ereaderDevices: Database.emailSettings.getEReaderDevices(user), ereaderDevices: Database.emailSettings.getEReaderDevices(user),
@ -907,6 +912,7 @@ class Auth {
/** /**
* User changes their password from request * User changes their password from request
* TODO: Update responses to use error status codes
* *
* @param {import('express').Request} req * @param {import('express').Request} req
* @param {import('express').Response} res * @param {import('express').Response} res
@ -941,19 +947,27 @@ class Auth {
} }
} }
matchingUser.pash = pw Database.userModel
.update(
const success = await Database.updateUser(matchingUser) {
if (success) { pash: pw
Logger.info(`[Auth] User "${matchingUser.username}" changed password`) },
res.json({ {
success: true where: { id: matchingUser.id }
}
)
.then(() => {
Logger.info(`[Auth] User "${matchingUser.username}" changed password`)
res.json({
success: true
})
}) })
} else { .catch((error) => {
res.json({ Logger.error(`[Auth] User "${matchingUser.username}" failed to change password`, error)
error: 'Unknown error' res.json({
error: 'Unknown error'
})
}) })
}
} }
} }

View File

@ -363,7 +363,7 @@ class Database {
*/ */
async createRootUser(username, pash, auth) { async createRootUser(username, pash, auth) {
if (!this.sequelize) return false if (!this.sequelize) return false
await this.models.user.createRootUser(username, pash, auth) await this.userModel.createRootUser(username, pash, auth)
this.hasRootUser = true this.hasRootUser = true
return true return true
} }
@ -390,11 +390,6 @@ class Database {
return this.models.user.updateFromOld(oldUser) return this.models.user.updateFromOld(oldUser)
} }
updateBulkUsers(oldUsers) {
if (!this.sequelize) return false
return Promise.all(oldUsers.map((u) => this.updateUser(u)))
}
removeUser(userId) { removeUser(userId) {
if (!this.sequelize) return false if (!this.sequelize) return false
return this.models.user.removeById(userId) return this.models.user.removeById(userId)

View File

@ -89,9 +89,25 @@ class Server {
this.io = null this.io = null
} }
/**
* Middleware to check if the current request is authenticated
* req.user is set if authenticated to the OLD user object
* req.userNew is set if authenticated to the NEW user object
*
* @param {import('express').Request} req
* @param {import('express').Response} res
* @param {import('express').NextFunction} next
*/
authMiddleware(req, res, next) { authMiddleware(req, res, next) {
// ask passportjs if the current request is authenticated // ask passportjs if the current request is authenticated
this.auth.isAuthenticated(req, res, next) this.auth.isAuthenticated(req, res, () => {
if (req.user) {
// TODO: req.userNew to become req.user
req.userNew = req.user
req.user = Database.userModel.getOldUser(req.user)
}
next()
})
} }
cancelLibraryScan(libraryId) { cancelLibraryScan(libraryId) {

View File

@ -3,11 +3,20 @@ const Logger = require('./Logger')
const Database = require('./Database') const Database = require('./Database')
const Auth = require('./Auth') const Auth = require('./Auth')
/**
* @typedef SocketClient
* @property {string} id socket id
* @property {SocketIO.Socket} socket
* @property {number} connected_at
* @property {import('./models/User')} user
*/
class SocketAuthority { class SocketAuthority {
constructor() { constructor() {
this.Server = null this.Server = null
this.io = null this.io = null
/** @type {Object.<string, SocketClient>} */
this.clients = {} this.clients = {}
} }
@ -18,27 +27,29 @@ class SocketAuthority {
*/ */
getUsersOnline() { getUsersOnline() {
const onlineUsersMap = {} const onlineUsersMap = {}
Object.values(this.clients).filter(c => c.user).forEach(client => { Object.values(this.clients)
if (onlineUsersMap[client.user.id]) { .filter((c) => c.user)
onlineUsersMap[client.user.id].connections++ .forEach((client) => {
} else { if (onlineUsersMap[client.user.id]) {
onlineUsersMap[client.user.id] = { onlineUsersMap[client.user.id].connections++
...client.user.toJSONForPublic(this.Server.playbackSessionManager.sessions), } else {
connections: 1 onlineUsersMap[client.user.id] = {
...client.user.toJSONForPublic(this.Server.playbackSessionManager.sessions),
connections: 1
}
} }
} })
})
return Object.values(onlineUsersMap) return Object.values(onlineUsersMap)
} }
getClientsForUser(userId) { getClientsForUser(userId) {
return Object.values(this.clients).filter(c => c.user && c.user.id === userId) return Object.values(this.clients).filter((c) => c.user?.id === userId)
} }
/** /**
* Emits event to all authorized clients * Emits event to all authorized clients
* @param {string} evt * @param {string} evt
* @param {any} data * @param {any} data
* @param {Function} [filter] optional filter function to only send event to specific users * @param {Function} [filter] optional filter function to only send event to specific users
*/ */
emitter(evt, data, filter = null) { emitter(evt, data, filter = null) {
@ -67,7 +78,7 @@ class SocketAuthority {
// Emits event to all admin user clients // Emits event to all admin user clients
adminEmitter(evt, data) { adminEmitter(evt, data) {
for (const socketId in this.clients) { for (const socketId in this.clients) {
if (this.clients[socketId].user && this.clients[socketId].user.isAdminOrUp) { if (this.clients[socketId].user?.isAdminOrUp) {
this.clients[socketId].socket.emit(evt, data) this.clients[socketId].socket.emit(evt, data)
} }
} }
@ -75,16 +86,14 @@ class SocketAuthority {
/** /**
* Closes the Socket.IO server and disconnect all clients * Closes the Socket.IO server and disconnect all clients
* *
* @param {Function} callback * @param {Function} callback
*/ */
close(callback) { close(callback) {
Logger.info('[SocketAuthority] Shutting down') Logger.info('[SocketAuthority] Shutting down')
// This will close all open socket connections, and also close the underlying http server // This will close all open socket connections, and also close the underlying http server
if (this.io) if (this.io) this.io.close(callback)
this.io.close(callback) else callback()
else
callback()
} }
initialize(Server) { initialize(Server) {
@ -93,7 +102,7 @@ class SocketAuthority {
this.io = new SocketIO.Server(this.Server.server, { this.io = new SocketIO.Server(this.Server.server, {
cors: { cors: {
origin: '*', origin: '*',
methods: ["GET", "POST"] methods: ['GET', 'POST']
} }
}) })
@ -144,7 +153,7 @@ class SocketAuthority {
// admin user can send a message to all authenticated users // admin user can send a message to all authenticated users
// displays on the web app as a toast // displays on the web app as a toast
const client = this.clients[socket.id] || {} const client = this.clients[socket.id] || {}
if (client.user && client.user.isAdminOrUp) { if (client.user?.isAdminOrUp) {
this.emitter('admin_message', payload.message || '') this.emitter('admin_message', payload.message || '')
} else { } else {
Logger.error(`[SocketAuthority] Non-admin user sent the message_all_users event`) Logger.error(`[SocketAuthority] Non-admin user sent the message_all_users event`)
@ -162,8 +171,8 @@ class SocketAuthority {
/** /**
* When setting up a socket connection the user needs to be associated with a socket id * When setting up a socket connection the user needs to be associated with a socket id
* for this the client will send a 'auth' event that includes the users API token * for this the client will send a 'auth' event that includes the users API token
* *
* @param {SocketIO.Socket} socket * @param {SocketIO.Socket} socket
* @param {string} token JWT * @param {string} token JWT
*/ */
async authenticateSocket(socket, token) { async authenticateSocket(socket, token) {
@ -176,6 +185,7 @@ class SocketAuthority {
Logger.error('Cannot validate socket - invalid token') Logger.error('Cannot validate socket - invalid token')
return socket.emit('invalid_token') return socket.emit('invalid_token')
} }
// get the user via the id from the decoded jwt. // get the user via the id from the decoded jwt.
const user = await Database.userModel.getUserByIdOrOldId(token_data.userId) const user = await Database.userModel.getUserByIdOrOldId(token_data.userId)
if (!user) { if (!user) {
@ -196,18 +206,13 @@ class SocketAuthority {
client.user = user client.user = user
if (!client.user.toJSONForBrowser) {
Logger.error('Invalid user...', client.user)
return
}
Logger.debug(`[SocketAuthority] User Online ${client.user.username}`) Logger.debug(`[SocketAuthority] User Online ${client.user.username}`)
this.adminEmitter('user_online', client.user.toJSONForPublic(this.Server.playbackSessionManager.sessions)) this.adminEmitter('user_online', client.user.toJSONForPublic(this.Server.playbackSessionManager.sessions))
// Update user lastSeen without firing sequelize bulk update hooks // Update user lastSeen without firing sequelize bulk update hooks
user.lastSeen = Date.now() user.lastSeen = Date.now()
await Database.userModel.updateFromOld(user, false) await user.save({ hooks: false })
const initialPayload = { const initialPayload = {
userId: client.user.id, userId: client.user.id,
@ -224,4 +229,4 @@ class SocketAuthority {
this.Server.cancelLibraryScan(id) this.Server.cancelLibraryScan(id)
} }
} }
module.exports = new SocketAuthority() module.exports = new SocketAuthority()

View File

@ -223,7 +223,7 @@ class LibraryController {
// Only emit to users with access to library // Only emit to users with access to library
const userFilter = (user) => { const userFilter = (user) => {
return user.checkCanAccessLibrary && user.checkCanAccessLibrary(library.id) return user.checkCanAccessLibrary?.(library.id)
} }
SocketAuthority.emitter('library_updated', library.toJSON(), userFilter) SocketAuthority.emitter('library_updated', library.toJSON(), userFilter)

View File

@ -17,13 +17,13 @@ const adminStats = require('../utils/queries/adminStats')
// This is a controller for routes that don't have a home yet :( // This is a controller for routes that don't have a home yet :(
// //
class MiscController { class MiscController {
constructor() { } constructor() {}
/** /**
* POST: /api/upload * POST: /api/upload
* Update library item * Update library item
* @param {*} req * @param {*} req
* @param {*} res * @param {*} res
*/ */
async handleUpload(req, res) { async handleUpload(req, res) {
if (!req.user.canUpload) { if (!req.user.canUpload) {
@ -42,7 +42,7 @@ class MiscController {
if (!library) { if (!library) {
return res.status(404).send(`Library not found with id ${libraryId}`) return res.status(404).send(`Library not found with id ${libraryId}`)
} }
const folder = library.folders.find(fold => fold.id === folderId) const folder = library.folders.find((fold) => fold.id === folderId)
if (!folder) { if (!folder) {
return res.status(404).send(`Folder not found with id ${folderId} in library ${library.name}`) return res.status(404).send(`Folder not found with id ${folderId} in library ${library.name}`)
} }
@ -56,7 +56,7 @@ class MiscController {
// `.filter(Boolean)` to strip out all the potentially missing details (eg: `author`) // `.filter(Boolean)` to strip out all the potentially missing details (eg: `author`)
// before sanitizing all the directory parts to remove illegal chars and finally prepending // before sanitizing all the directory parts to remove illegal chars and finally prepending
// the base folder path // the base folder path
const cleanedOutputDirectoryParts = outputDirectoryParts.filter(Boolean).map(part => sanitizeFilename(part)) const cleanedOutputDirectoryParts = outputDirectoryParts.filter(Boolean).map((part) => sanitizeFilename(part))
const outputDirectory = Path.join(...[folder.fullPath, ...cleanedOutputDirectoryParts]) const outputDirectory = Path.join(...[folder.fullPath, ...cleanedOutputDirectoryParts])
await fs.ensureDir(outputDirectory) await fs.ensureDir(outputDirectory)
@ -66,7 +66,8 @@ class MiscController {
for (const file of files) { for (const file of files) {
const path = Path.join(outputDirectory, sanitizeFilename(file.name)) const path = Path.join(outputDirectory, sanitizeFilename(file.name))
await file.mv(path) await file
.mv(path)
.then(() => { .then(() => {
return true return true
}) })
@ -82,14 +83,14 @@ class MiscController {
/** /**
* GET: /api/tasks * GET: /api/tasks
* Get tasks for task manager * Get tasks for task manager
* @param {*} req * @param {*} req
* @param {*} res * @param {*} res
*/ */
getTasks(req, res) { getTasks(req, res) {
const includeArray = (req.query.include || '').split(',') const includeArray = (req.query.include || '').split(',')
const data = { const data = {
tasks: TaskManager.tasks.map(t => t.toJSON()) tasks: TaskManager.tasks.map((t) => t.toJSON())
} }
if (includeArray.includes('queue')) { if (includeArray.includes('queue')) {
@ -104,9 +105,9 @@ class MiscController {
/** /**
* PATCH: /api/settings * PATCH: /api/settings
* Update server settings * Update server settings
* *
* @param {import('express').Request} req * @param {import('express').Request} req
* @param {import('express').Response} res * @param {import('express').Response} res
*/ */
async updateServerSettings(req, res) { async updateServerSettings(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
@ -135,9 +136,9 @@ class MiscController {
/** /**
* PATCH: /api/sorting-prefixes * PATCH: /api/sorting-prefixes
* *
* @param {import('express').Request} req * @param {import('express').Request} req
* @param {import('express').Response} res * @param {import('express').Response} res
*/ */
async updateSortingPrefixes(req, res) { async updateSortingPrefixes(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
@ -148,7 +149,7 @@ class MiscController {
if (!sortingPrefixes?.length || !Array.isArray(sortingPrefixes)) { if (!sortingPrefixes?.length || !Array.isArray(sortingPrefixes)) {
return res.status(400).send('Invalid request body') return res.status(400).send('Invalid request body')
} }
sortingPrefixes = [...new Set(sortingPrefixes.map(p => p?.trim?.().toLowerCase()).filter(p => p))] sortingPrefixes = [...new Set(sortingPrefixes.map((p) => p?.trim?.().toLowerCase()).filter((p) => p))]
if (!sortingPrefixes.length) { if (!sortingPrefixes.length) {
return res.status(400).send('Invalid sortingPrefixes in request body') return res.status(400).send('Invalid sortingPrefixes in request body')
} }
@ -233,24 +234,26 @@ class MiscController {
/** /**
* POST: /api/authorize * POST: /api/authorize
* Used to authorize an API token * Used to authorize an API token
* *
* @param {import('express').Request} req * @this import('../routers/ApiRouter')
* @param {import('express').Response} res *
* @param {import('express').Request} req
* @param {import('express').Response} res
*/ */
async authorize(req, res) { async authorize(req, res) {
if (!req.user) { if (!req.user) {
Logger.error('Invalid user in authorize') Logger.error('Invalid user in authorize')
return res.sendStatus(401) return res.sendStatus(401)
} }
const userResponse = await this.auth.getUserLoginResponsePayload(req.user) const userResponse = await this.auth.getUserLoginResponsePayload(req.userNew)
res.json(userResponse) res.json(userResponse)
} }
/** /**
* GET: /api/tags * GET: /api/tags
* Get all tags * Get all tags
* @param {*} req * @param {*} req
* @param {*} res * @param {*} res
*/ */
async getAllTags(req, res) { async getAllTags(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
@ -292,8 +295,8 @@ class MiscController {
* POST: /api/tags/rename * POST: /api/tags/rename
* Rename tag * Rename tag
* Req.body { tag, newTag } * Req.body { tag, newTag }
* @param {*} req * @param {*} req
* @param {*} res * @param {*} res
*/ */
async renameTag(req, res) { async renameTag(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
@ -321,7 +324,7 @@ class MiscController {
} }
if (libraryItem.media.tags.includes(tag)) { if (libraryItem.media.tags.includes(tag)) {
libraryItem.media.tags = libraryItem.media.tags.filter(t => t !== tag) // Remove old tag libraryItem.media.tags = libraryItem.media.tags.filter((t) => t !== tag) // Remove old tag
if (!libraryItem.media.tags.includes(newTag)) { if (!libraryItem.media.tags.includes(newTag)) {
libraryItem.media.tags.push(newTag) libraryItem.media.tags.push(newTag)
} }
@ -346,8 +349,8 @@ class MiscController {
* DELETE: /api/tags/:tag * DELETE: /api/tags/:tag
* Remove a tag * Remove a tag
* :tag param is base64 encoded * :tag param is base64 encoded
* @param {*} req * @param {*} req
* @param {*} res * @param {*} res
*/ */
async deleteTag(req, res) { async deleteTag(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
@ -367,7 +370,7 @@ class MiscController {
// Remove tag from items // Remove tag from items
for (const libraryItem of libraryItemsWithTag) { for (const libraryItem of libraryItemsWithTag) {
Logger.debug(`[MiscController] Remove tag "${tag}" from item "${libraryItem.media.title}"`) Logger.debug(`[MiscController] Remove tag "${tag}" from item "${libraryItem.media.title}"`)
libraryItem.media.tags = libraryItem.media.tags.filter(t => t !== tag) libraryItem.media.tags = libraryItem.media.tags.filter((t) => t !== tag)
await libraryItem.media.update({ await libraryItem.media.update({
tags: libraryItem.media.tags tags: libraryItem.media.tags
}) })
@ -385,8 +388,8 @@ class MiscController {
/** /**
* GET: /api/genres * GET: /api/genres
* Get all genres * Get all genres
* @param {*} req * @param {*} req
* @param {*} res * @param {*} res
*/ */
async getAllGenres(req, res) { async getAllGenres(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
@ -427,8 +430,8 @@ class MiscController {
* POST: /api/genres/rename * POST: /api/genres/rename
* Rename genres * Rename genres
* Req.body { genre, newGenre } * Req.body { genre, newGenre }
* @param {*} req * @param {*} req
* @param {*} res * @param {*} res
*/ */
async renameGenre(req, res) { async renameGenre(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
@ -456,7 +459,7 @@ class MiscController {
} }
if (libraryItem.media.genres.includes(genre)) { if (libraryItem.media.genres.includes(genre)) {
libraryItem.media.genres = libraryItem.media.genres.filter(t => t !== genre) // Remove old genre libraryItem.media.genres = libraryItem.media.genres.filter((t) => t !== genre) // Remove old genre
if (!libraryItem.media.genres.includes(newGenre)) { if (!libraryItem.media.genres.includes(newGenre)) {
libraryItem.media.genres.push(newGenre) libraryItem.media.genres.push(newGenre)
} }
@ -481,8 +484,8 @@ class MiscController {
* DELETE: /api/genres/:genre * DELETE: /api/genres/:genre
* Remove a genre * Remove a genre
* :genre param is base64 encoded * :genre param is base64 encoded
* @param {*} req * @param {*} req
* @param {*} res * @param {*} res
*/ */
async deleteGenre(req, res) { async deleteGenre(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
@ -502,7 +505,7 @@ class MiscController {
// Remove genre from items // Remove genre from items
for (const libraryItem of libraryItemsWithGenre) { for (const libraryItem of libraryItemsWithGenre) {
Logger.debug(`[MiscController] Remove genre "${genre}" from item "${libraryItem.media.title}"`) Logger.debug(`[MiscController] Remove genre "${genre}" from item "${libraryItem.media.title}"`)
libraryItem.media.genres = libraryItem.media.genres.filter(g => g !== genre) libraryItem.media.genres = libraryItem.media.genres.filter((g) => g !== genre)
await libraryItem.media.update({ await libraryItem.media.update({
genres: libraryItem.media.genres genres: libraryItem.media.genres
}) })
@ -520,13 +523,13 @@ class MiscController {
/** /**
* POST: /api/watcher/update * POST: /api/watcher/update
* Update a watch path * Update a watch path
* Req.body { libraryId, path, type, [oldPath] } * Req.body { libraryId, path, type, [oldPath] }
* type = add, unlink, rename * type = add, unlink, rename
* oldPath = required only for rename * oldPath = required only for rename
* @this import('../routers/ApiRouter') * @this import('../routers/ApiRouter')
* *
* @param {import('express').Request} req * @param {import('express').Request} req
* @param {import('express').Response} res * @param {import('express').Response} res
*/ */
updateWatchedPath(req, res) { updateWatchedPath(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
@ -582,9 +585,9 @@ class MiscController {
/** /**
* GET: api/auth-settings (admin only) * GET: api/auth-settings (admin only)
* *
* @param {import('express').Request} req * @param {import('express').Request} req
* @param {import('express').Response} res * @param {import('express').Response} res
*/ */
getAuthSettings(req, res) { getAuthSettings(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
@ -597,9 +600,9 @@ class MiscController {
/** /**
* PATCH: api/auth-settings * PATCH: api/auth-settings
* @this import('../routers/ApiRouter') * @this import('../routers/ApiRouter')
* *
* @param {import('express').Request} req * @param {import('express').Request} req
* @param {import('express').Response} res * @param {import('express').Response} res
*/ */
async updateAuthSettings(req, res) { async updateAuthSettings(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
@ -642,15 +645,13 @@ class MiscController {
} }
const uris = settingsUpdate[key] const uris = settingsUpdate[key]
if (!Array.isArray(uris) || if (!Array.isArray(uris) || (uris.includes('*') && uris.length > 1) || uris.some((uri) => uri !== '*' && !isValidRedirectURI(uri))) {
(uris.includes('*') && uris.length > 1) ||
uris.some(uri => uri !== '*' && !isValidRedirectURI(uri))) {
Logger.warn(`[MiscController] Invalid value for authOpenIDMobileRedirectURIs`) Logger.warn(`[MiscController] Invalid value for authOpenIDMobileRedirectURIs`)
continue continue
} }
// Update the URIs // Update the URIs
if (Database.serverSettings[key].some(uri => !uris.includes(uri)) || uris.some(uri => !Database.serverSettings[key].includes(uri))) { if (Database.serverSettings[key].some((uri) => !uris.includes(uri)) || uris.some((uri) => !Database.serverSettings[key].includes(uri))) {
Logger.debug(`[MiscController] Updating auth settings key "${key}" from "${Database.serverSettings[key]}" to "${uris}"`) Logger.debug(`[MiscController] Updating auth settings key "${key}" from "${Database.serverSettings[key]}" to "${uris}"`)
Database.serverSettings[key] = uris Database.serverSettings[key] = uris
hasUpdates = true hasUpdates = true
@ -704,9 +705,9 @@ class MiscController {
/** /**
* GET: /api/stats/year/:year * GET: /api/stats/year/:year
* *
* @param {import('express').Request} req * @param {import('express').Request} req
* @param {import('express').Response} res * @param {import('express').Response} res
*/ */
async getAdminStatsForYear(req, res) { async getAdminStatsForYear(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {
@ -725,9 +726,9 @@ class MiscController {
/** /**
* GET: /api/logger-data * GET: /api/logger-data
* admin or up * admin or up
* *
* @param {import('express').Request} req * @param {import('express').Request} req
* @param {import('express').Response} res * @param {import('express').Response} res
*/ */
async getLoggerData(req, res) { async getLoggerData(req, res) {
if (!req.user.isAdminOrUp) { if (!req.user.isAdminOrUp) {

View File

@ -31,8 +31,8 @@ class UserController {
const includes = (req.query.include || '').split(',').map((i) => i.trim()) const includes = (req.query.include || '').split(',').map((i) => i.trim())
// Minimal toJSONForBrowser does not include mediaProgress and bookmarks // Minimal toJSONForBrowser does not include mediaProgress and bookmarks
const allUsers = await Database.userModel.getOldUsers() const allUsers = await Database.userModel.findAll()
const users = allUsers.map((u) => u.toJSONForBrowser(hideRootToken, true)) const users = allUsers.map((u) => u.toOldJSONForBrowser(hideRootToken, true))
if (includes.includes('latestSession')) { if (includes.includes('latestSession')) {
for (const user of users) { for (const user of users) {
@ -106,7 +106,7 @@ class UserController {
const account = req.body const account = req.body
const username = account.username const username = account.username
const usernameExists = await Database.userModel.getUserByUsername(username) const usernameExists = await Database.userModel.checkUserExistsWithUsername(username)
if (usernameExists) { if (usernameExists) {
return res.status(500).send('Username already taken') return res.status(500).send('Username already taken')
} }
@ -149,7 +149,7 @@ class UserController {
// When changing username create a new API token // When changing username create a new API token
if (account.username !== undefined && account.username !== user.username) { if (account.username !== undefined && account.username !== user.username) {
const usernameExists = await Database.userModel.getUserByUsername(account.username) const usernameExists = await Database.userModel.checkUserExistsWithUsername(account.username)
if (usernameExists) { if (usernameExists) {
return res.status(500).send('Username already taken') return res.status(500).send('Username already taken')
} }
@ -272,7 +272,8 @@ class UserController {
} }
if (req.params.id) { if (req.params.id) {
req.reqUser = await Database.userModel.getUserById(req.params.id) // TODO: Update to use new user model
req.reqUser = await Database.userModel.getOldUserById(req.params.id)
if (!req.reqUser) { if (!req.reqUser) {
return res.sendStatus(404) return res.sendStatus(404)
} }

View File

@ -34,29 +34,6 @@ class MediaProgress extends Model {
this.createdAt this.createdAt
} }
getOldMediaProgress() {
const isPodcastEpisode = this.mediaItemType === 'podcastEpisode'
return {
id: this.id,
userId: this.userId,
libraryItemId: this.extraData?.libraryItemId || null,
episodeId: isPodcastEpisode ? this.mediaItemId : null,
mediaItemId: this.mediaItemId,
mediaItemType: this.mediaItemType,
duration: this.duration,
progress: this.extraData?.progress || 0,
currentTime: this.currentTime,
isFinished: !!this.isFinished,
hideFromContinueListening: !!this.hideFromContinueListening,
ebookLocation: this.ebookLocation,
ebookProgress: this.ebookProgress,
lastUpdate: this.updatedAt.valueOf(),
startedAt: this.createdAt.valueOf(),
finishedAt: this.finishedAt?.valueOf() || null
}
}
static upsertFromOld(oldMediaProgress) { static upsertFromOld(oldMediaProgress) {
const mediaProgress = this.getFromOld(oldMediaProgress) const mediaProgress = this.getFromOld(oldMediaProgress)
return this.upsert(mediaProgress) return this.upsert(mediaProgress)
@ -182,6 +159,29 @@ class MediaProgress extends Model {
}) })
MediaProgress.belongsTo(user) MediaProgress.belongsTo(user)
} }
getOldMediaProgress() {
const isPodcastEpisode = this.mediaItemType === 'podcastEpisode'
return {
id: this.id,
userId: this.userId,
libraryItemId: this.extraData?.libraryItemId || null,
episodeId: isPodcastEpisode ? this.mediaItemId : null,
mediaItemId: this.mediaItemId,
mediaItemType: this.mediaItemType,
duration: this.duration,
progress: this.extraData?.progress || 0,
currentTime: this.currentTime,
isFinished: !!this.isFinished,
hideFromContinueListening: !!this.hideFromContinueListening,
ebookLocation: this.ebookLocation,
ebookProgress: this.ebookProgress,
lastUpdate: this.updatedAt.valueOf(),
startedAt: this.createdAt.valueOf(),
finishedAt: this.finishedAt?.valueOf() || null
}
}
} }
module.exports = MediaProgress module.exports = MediaProgress

View File

@ -42,31 +42,41 @@ class User extends Model {
} }
/** /**
* Get all oldUsers *
* @returns {Promise<oldUser>} * @param {string} type
* @returns
*/ */
static async getOldUsers() { static getDefaultPermissionsForUserType(type) {
const users = await this.findAll({ return {
include: this.sequelize.models.mediaProgress download: true,
}) update: type === 'root' || type === 'admin',
return users.map((u) => this.getOldUser(u)) delete: type === 'root',
upload: type === 'root' || type === 'admin',
accessAllLibraries: true,
accessAllTags: true,
accessExplicitContent: true,
librariesAccessible: [],
itemTagsSelected: []
}
} }
/** /**
* Get old user model from new * Get old user model from new
* *
* @param {Object} userExpanded * @param {User} userExpanded
* @returns {oldUser} * @returns {oldUser}
*/ */
static getOldUser(userExpanded) { static getOldUser(userExpanded) {
const mediaProgress = userExpanded.mediaProgresses.map((mp) => mp.getOldMediaProgress()) const mediaProgress = userExpanded.mediaProgresses.map((mp) => mp.getOldMediaProgress())
const librariesAccessible = userExpanded.permissions?.librariesAccessible || [] const librariesAccessible = [...(userExpanded.permissions?.librariesAccessible || [])]
const itemTagsSelected = userExpanded.permissions?.itemTagsSelected || [] const itemTagsSelected = [...(userExpanded.permissions?.itemTagsSelected || [])]
const permissions = userExpanded.permissions || {} const permissions = { ...(userExpanded.permissions || {}) }
delete permissions.librariesAccessible delete permissions.librariesAccessible
delete permissions.itemTagsSelected delete permissions.itemTagsSelected
const seriesHideFromContinueListening = userExpanded.extraData?.seriesHideFromContinueListening || []
return new oldUser({ return new oldUser({
id: userExpanded.id, id: userExpanded.id,
oldUserId: userExpanded.extraData?.oldUserId || null, oldUserId: userExpanded.extraData?.oldUserId || null,
@ -76,7 +86,7 @@ class User extends Model {
type: userExpanded.type, type: userExpanded.type,
token: userExpanded.token, token: userExpanded.token,
mediaProgress, mediaProgress,
seriesHideFromContinueListening: userExpanded.extraData?.seriesHideFromContinueListening || [], seriesHideFromContinueListening: [...seriesHideFromContinueListening],
bookmarks: userExpanded.bookmarks, bookmarks: userExpanded.bookmarks,
isActive: userExpanded.isActive, isActive: userExpanded.isActive,
isLocked: userExpanded.isLocked, isLocked: userExpanded.isLocked,
@ -168,32 +178,35 @@ class User extends Model {
* Create root user * Create root user
* @param {string} username * @param {string} username
* @param {string} pash * @param {string} pash
* @param {Auth} auth * @param {import('../Auth')} auth
* @returns {Promise<oldUser>} * @returns {Promise<User>}
*/ */
static async createRootUser(username, pash, auth) { static async createRootUser(username, pash, auth) {
const userId = uuidv4() const userId = uuidv4()
const token = await auth.generateAccessToken({ id: userId, username }) const token = await auth.generateAccessToken({ id: userId, username })
const newRoot = new oldUser({ const newUser = {
id: userId, id: userId,
type: 'root', type: 'root',
username, username,
pash, pash,
token, token,
isActive: true, isActive: true,
createdAt: Date.now() permissions: this.getDefaultPermissionsForUserType('root'),
}) bookmarks: [],
await this.createFromOld(newRoot) extraData: {
return newRoot seriesHideFromContinueListening: []
}
}
return this.create(newUser)
} }
/** /**
* Create user from openid userinfo * Create user from openid userinfo
* @param {Object} userinfo * @param {Object} userinfo
* @param {Auth} auth * @param {import('../Auth')} auth
* @returns {Promise<oldUser>} * @returns {Promise<User>}
*/ */
static async createUserFromOpenIdUserInfo(userinfo, auth) { static async createUserFromOpenIdUserInfo(userinfo, auth) {
const userId = uuidv4() const userId = uuidv4()
@ -203,7 +216,7 @@ class User extends Model {
const token = await auth.generateAccessToken({ id: userId, username }) const token = await auth.generateAccessToken({ id: userId, username })
const newUser = new oldUser({ const newUser = {
id: userId, id: userId,
type: 'user', type: 'user',
username, username,
@ -211,51 +224,30 @@ class User extends Model {
pash: null, pash: null,
token, token,
isActive: true, isActive: true,
authOpenIDSub: userinfo.sub, permissions: this.getDefaultPermissionsForUserType('user'),
createdAt: Date.now() bookmarks: [],
}) extraData: {
if (await this.createFromOld(newUser)) { authOpenIDSub: userinfo.sub,
SocketAuthority.adminEmitter('user_added', newUser.toJSONForBrowser()) seriesHideFromContinueListening: []
return newUser }
}
const user = await this.create(newUser)
if (user) {
SocketAuthority.adminEmitter('user_added', user.toOldJSONForBrowser())
return user
} }
return null return null
} }
/**
* Get a user by id or by the old database id
* @temp User ids were updated in v2.3.0 migration and old API tokens may still use that id
* @param {string} userId
* @returns {Promise<oldUser|null>} null if not found
*/
static async getUserByIdOrOldId(userId) {
if (!userId) return null
const user = await this.findOne({
where: {
[sequelize.Op.or]: [
{
id: userId
},
{
extraData: {
[sequelize.Op.substring]: userId
}
}
]
},
include: this.sequelize.models.mediaProgress
})
if (!user) return null
return this.getOldUser(user)
}
/** /**
* Get user by username case insensitive * Get user by username case insensitive
* @param {string} username * @param {string} username
* @returns {Promise<oldUser|null>} returns null if not found * @returns {Promise<User>}
*/ */
static async getUserByUsername(username) { static async getUserByUsername(username) {
if (!username) return null if (!username) return null
const user = await this.findOne({ return this.findOne({
where: { where: {
username: { username: {
[sequelize.Op.like]: username [sequelize.Op.like]: username
@ -263,18 +255,16 @@ class User extends Model {
}, },
include: this.sequelize.models.mediaProgress include: this.sequelize.models.mediaProgress
}) })
if (!user) return null
return this.getOldUser(user)
} }
/** /**
* Get user by email case insensitive * Get user by email case insensitive
* @param {string} username * @param {string} email
* @returns {Promise<oldUser|null>} returns null if not found * @returns {Promise<User>}
*/ */
static async getUserByEmail(email) { static async getUserByEmail(email) {
if (!email) return null if (!email) return null
const user = await this.findOne({ return this.findOne({
where: { where: {
email: { email: {
[sequelize.Op.like]: email [sequelize.Op.like]: email
@ -282,20 +272,45 @@ class User extends Model {
}, },
include: this.sequelize.models.mediaProgress include: this.sequelize.models.mediaProgress
}) })
if (!user) return null
return this.getOldUser(user)
} }
/** /**
* Get user by id * Get user by id
* @param {string} userId * @param {string} userId
* @returns {Promise<oldUser|null>} returns null if not found * @returns {Promise<User>}
*/ */
static async getUserById(userId) { static async getUserById(userId) {
if (!userId) return null if (!userId) return null
const user = await this.findByPk(userId, { return this.findByPk(userId, {
include: this.sequelize.models.mediaProgress include: this.sequelize.models.mediaProgress
}) })
}
/**
* Get user by id or old id
* JWT tokens generated before 2.3.0 used old user ids
*
* @param {string} userId
* @returns {Promise<User>}
*/
static async getUserByIdOrOldId(userId) {
if (!userId) return null
return this.findOne({
where: {
[sequelize.Op.or]: [{ id: userId }, { 'extraData.oldUserId': userId }]
},
include: this.sequelize.models.mediaProgress
})
}
/**
* @deprecated
* Get old user by id
* @param {string} userId
* @returns {Promise<oldUser|null>} returns null if not found
*/
static async getOldUserById(userId) {
const user = await this.getUserById(userId)
if (!user) return null if (!user) return null
return this.getOldUser(user) return this.getOldUser(user)
} }
@ -303,16 +318,14 @@ class User extends Model {
/** /**
* Get user by openid sub * Get user by openid sub
* @param {string} sub * @param {string} sub
* @returns {Promise<oldUser|null>} returns null if not found * @returns {Promise<User>}
*/ */
static async getUserByOpenIDSub(sub) { static async getUserByOpenIDSub(sub) {
if (!sub) return null if (!sub) return null
const user = await this.findOne({ return this.findOne({
where: sequelize.where(sequelize.literal(`extraData->>"authOpenIDSub"`), sub), where: sequelize.where(sequelize.literal(`extraData->>"authOpenIDSub"`), sub),
include: this.sequelize.models.mediaProgress include: this.sequelize.models.mediaProgress
}) })
if (!user) return null
return this.getOldUser(user)
} }
/** /**
@ -344,6 +357,20 @@ class User extends Model {
return count > 0 return count > 0
} }
/**
* Check if user exists with username
* @param {string} username
* @returns {boolean}
*/
static async checkUserExistsWithUsername(username) {
const count = await this.count({
where: {
username
}
})
return count > 0
}
/** /**
* Initialize model * Initialize model
* @param {import('../Database').sequelize} sequelize * @param {import('../Database').sequelize} sequelize
@ -380,6 +407,99 @@ class User extends Model {
} }
) )
} }
get isAdminOrUp() {
return this.type === 'root' || this.type === 'admin'
}
get isUser() {
return this.type === 'user'
}
/** @type {string|null} */
get authOpenIDSub() {
return this.extraData?.authOpenIDSub || null
}
/**
* User data for clients
* Emitted on socket events user_online, user_offline and user_stream_update
*
* @param {import('../objects/PlaybackSession')[]} sessions
* @returns
*/
toJSONForPublic(sessions) {
const session = sessions?.find((s) => s.userId === this.id)?.toJSONForClient() || null
return {
id: this.id,
username: this.username,
type: this.type,
session,
lastSeen: this.lastSeen?.valueOf() || null,
createdAt: this.createdAt.valueOf()
}
}
/**
* User data for browser using old model
*
* @param {boolean} [hideRootToken=false]
* @param {boolean} [minimal=false]
* @returns
*/
toOldJSONForBrowser(hideRootToken = false, minimal = false) {
const seriesHideFromContinueListening = this.extraData?.seriesHideFromContinueListening || []
const librariesAccessible = this.permissions?.librariesAccessible || []
const itemTagsSelected = this.permissions?.itemTagsSelected || []
const permissions = { ...this.permissions }
delete permissions.librariesAccessible
delete permissions.itemTagsSelected
const json = {
id: this.id,
username: this.username,
email: this.email,
type: this.type,
token: this.type === 'root' && hideRootToken ? '' : this.token,
mediaProgress: this.mediaProgresses?.map((mp) => mp.getOldMediaProgress()) || [],
seriesHideFromContinueListening: [...seriesHideFromContinueListening],
bookmarks: this.bookmarks?.map((b) => ({ ...b })) || [],
isActive: this.isActive,
isLocked: this.isLocked,
lastSeen: this.lastSeen?.valueOf() || null,
createdAt: this.createdAt.valueOf(),
permissions: permissions,
librariesAccessible: [...librariesAccessible],
itemTagsSelected: [...itemTagsSelected],
hasOpenIDLink: !!this.authOpenIDSub
}
if (minimal) {
delete json.mediaProgress
delete json.bookmarks
}
return json
}
/**
* Check user has access to library
*
* @param {string} libraryId
* @returns {boolean}
*/
checkCanAccessLibrary(libraryId) {
if (this.permissions?.accessAllLibraries) return true
if (!this.permissions?.librariesAccessible) return false
return this.permissions.librariesAccessible.includes(libraryId)
}
/**
* Get first available library id for user
*
* @param {string[]} libraryIds
* @returns {string|null}
*/
getDefaultLibraryId(libraryIds) {
// Libraries should already be in ascending display order, find first accessible
return libraryIds.find((lid) => this.checkCanAccessLibrary(lid)) || null
}
} }
module.exports = User module.exports = User

View File

@ -140,7 +140,7 @@ class EmailSettings {
/** /**
* *
* @param {EreaderDeviceObject} device * @param {EreaderDeviceObject} device
* @param {import('../user/User')} user * @param {import('../../models/User')} user
* @returns {boolean} * @returns {boolean}
*/ */
checkUserCanAccessDevice(device, user) { checkUserCanAccessDevice(device, user) {
@ -158,7 +158,7 @@ class EmailSettings {
/** /**
* Get ereader devices accessible to user * Get ereader devices accessible to user
* *
* @param {import('../user/User')} user * @param {import('../../models/User')} user
* @returns {EreaderDeviceObject[]} * @returns {EreaderDeviceObject[]}
*/ */
getEReaderDevices(user) { getEReaderDevices(user) {

View File

@ -1,6 +1,6 @@
const { DataTypes, QueryInterface } = require('sequelize') const { DataTypes, QueryInterface } = require('sequelize')
const Path = require('path') const Path = require('path')
const uuidv4 = require("uuid").v4 const uuidv4 = require('uuid').v4
const Logger = require('../../Logger') const Logger = require('../../Logger')
const fs = require('../../libs/fsExtra') const fs = require('../../libs/fsExtra')
const oldDbFiles = require('./oldDbFiles') const oldDbFiles = require('./oldDbFiles')
@ -36,25 +36,14 @@ function getDeviceInfoString(deviceInfo, UserId) {
if (!deviceInfo) return null if (!deviceInfo) return null
if (deviceInfo.deviceId) return deviceInfo.deviceId if (deviceInfo.deviceId) return deviceInfo.deviceId
const keys = [ const keys = [UserId, deviceInfo.browserName || null, deviceInfo.browserVersion || null, deviceInfo.osName || null, deviceInfo.osVersion || null, deviceInfo.clientVersion || null, deviceInfo.manufacturer || null, deviceInfo.model || null, deviceInfo.sdkVersion || null, deviceInfo.ipAddress || null].map((k) => k || '')
UserId,
deviceInfo.browserName || null,
deviceInfo.browserVersion || null,
deviceInfo.osName || null,
deviceInfo.osVersion || null,
deviceInfo.clientVersion || null,
deviceInfo.manufacturer || null,
deviceInfo.model || null,
deviceInfo.sdkVersion || null,
deviceInfo.ipAddress || null
].map(k => k || '')
return 'temp-' + Buffer.from(keys.join('-'), 'utf-8').toString('base64') return 'temp-' + Buffer.from(keys.join('-'), 'utf-8').toString('base64')
} }
/** /**
* Migrate oldLibraryItem.media to Book model * Migrate oldLibraryItem.media to Book model
* Migrate BookSeries and BookAuthor * Migrate BookSeries and BookAuthor
* @param {objects.LibraryItem} oldLibraryItem * @param {objects.LibraryItem} oldLibraryItem
* @param {object} LibraryItem models.LibraryItem object * @param {object} LibraryItem models.LibraryItem object
* @returns {object} { book: object, bookSeries: [], bookAuthor: [] } * @returns {object} { book: object, bookSeries: [], bookAuthor: [] }
*/ */
@ -67,7 +56,7 @@ function migrateBook(oldLibraryItem, LibraryItem) {
bookAuthor: [] bookAuthor: []
} }
const tracks = (oldBook.audioFiles || []).filter(af => !af.exclude && !af.invalid) const tracks = (oldBook.audioFiles || []).filter((af) => !af.exclude && !af.invalid)
let duration = 0 let duration = 0
for (const track of tracks) { for (const track of tracks) {
if (track.duration !== null && !isNaN(track.duration)) { if (track.duration !== null && !isNaN(track.duration)) {
@ -156,7 +145,7 @@ function migrateBook(oldLibraryItem, LibraryItem) {
/** /**
* Migrate oldLibraryItem.media to Podcast model * Migrate oldLibraryItem.media to Podcast model
* Migrate PodcastEpisode * Migrate PodcastEpisode
* @param {objects.LibraryItem} oldLibraryItem * @param {objects.LibraryItem} oldLibraryItem
* @param {object} LibraryItem models.LibraryItem object * @param {object} LibraryItem models.LibraryItem object
* @returns {object} { podcast: object, podcastEpisode: [] } * @returns {object} { podcast: object, podcastEpisode: [] }
*/ */
@ -239,7 +228,7 @@ function migratePodcast(oldLibraryItem, LibraryItem) {
/** /**
* Migrate libraryItems to LibraryItem, Book, Podcast models * Migrate libraryItems to LibraryItem, Book, Podcast models
* @param {Array<objects.LibraryItem>} oldLibraryItems * @param {Array<objects.LibraryItem>} oldLibraryItems
* @returns {object} { libraryItem: [], book: [], podcast: [], podcastEpisode: [], bookSeries: [], bookAuthor: [] } * @returns {object} { libraryItem: [], book: [], podcast: [], podcastEpisode: [], bookSeries: [], bookAuthor: [] }
*/ */
function migrateLibraryItems(oldLibraryItems) { function migrateLibraryItems(oldLibraryItems) {
@ -298,7 +287,7 @@ function migrateLibraryItems(oldLibraryItems) {
updatedAt: oldLibraryItem.updatedAt, updatedAt: oldLibraryItem.updatedAt,
libraryId, libraryId,
libraryFolderId, libraryFolderId,
libraryFiles: oldLibraryItem.libraryFiles.map(lf => { libraryFiles: oldLibraryItem.libraryFiles.map((lf) => {
if (lf.isSupplementary === undefined) lf.isSupplementary = null if (lf.isSupplementary === undefined) lf.isSupplementary = null
return lf return lf
}) })
@ -306,7 +295,7 @@ function migrateLibraryItems(oldLibraryItems) {
oldDbIdMap.libraryItems[oldLibraryItem.id] = LibraryItem.id oldDbIdMap.libraryItems[oldLibraryItem.id] = LibraryItem.id
_newRecords.libraryItem.push(LibraryItem) _newRecords.libraryItem.push(LibraryItem)
// //
// Migrate Book/Podcast // Migrate Book/Podcast
// //
if (oldLibraryItem.mediaType === 'book') { if (oldLibraryItem.mediaType === 'book') {
@ -329,7 +318,7 @@ function migrateLibraryItems(oldLibraryItems) {
/** /**
* Migrate Library and LibraryFolder * Migrate Library and LibraryFolder
* @param {Array<objects.Library>} oldLibraries * @param {Array<objects.Library>} oldLibraries
* @returns {object} { library: [], libraryFolder: [] } * @returns {object} { library: [], libraryFolder: [] }
*/ */
function migrateLibraries(oldLibraries) { function migrateLibraries(oldLibraries) {
@ -343,7 +332,7 @@ function migrateLibraries(oldLibraries) {
continue continue
} }
// //
// Migrate Library // Migrate Library
// //
const Library = { const Library = {
@ -361,7 +350,7 @@ function migrateLibraries(oldLibraries) {
oldDbIdMap.libraries[oldLibrary.id] = Library.id oldDbIdMap.libraries[oldLibrary.id] = Library.id
_newRecords.library.push(Library) _newRecords.library.push(Library)
// //
// Migrate LibraryFolders // Migrate LibraryFolders
// //
for (const oldFolder of oldLibrary.folders) { for (const oldFolder of oldLibrary.folders) {
@ -382,21 +371,27 @@ function migrateLibraries(oldLibraries) {
/** /**
* Migrate Author * Migrate Author
* Previously Authors were shared between libraries, this will ensure every author has one library * Previously Authors were shared between libraries, this will ensure every author has one library
* @param {Array<objects.entities.Author>} oldAuthors * @param {Array<objects.entities.Author>} oldAuthors
* @param {Array<objects.LibraryItem>} oldLibraryItems * @param {Array<objects.LibraryItem>} oldLibraryItems
* @returns {Array<object>} Array of Author model objs * @returns {Array<object>} Array of Author model objs
*/ */
function migrateAuthors(oldAuthors, oldLibraryItems) { function migrateAuthors(oldAuthors, oldLibraryItems) {
const _newRecords = [] const _newRecords = []
for (const oldAuthor of oldAuthors) { for (const oldAuthor of oldAuthors) {
// Get an array of NEW library ids that have this author // Get an array of NEW library ids that have this author
const librariesWithThisAuthor = [...new Set(oldLibraryItems.map(li => { const librariesWithThisAuthor = [
if (!li.media.metadata.authors?.some(au => au.id === oldAuthor.id)) return null ...new Set(
if (!oldDbIdMap.libraries[li.libraryId]) { oldLibraryItems
Logger.warn(`[dbMigration] Authors library id ${li.libraryId} was not migrated`) .map((li) => {
} if (!li.media.metadata.authors?.some((au) => au.id === oldAuthor.id)) return null
return oldDbIdMap.libraries[li.libraryId] if (!oldDbIdMap.libraries[li.libraryId]) {
}).filter(lid => lid))] Logger.warn(`[dbMigration] Authors library id ${li.libraryId} was not migrated`)
}
return oldDbIdMap.libraries[li.libraryId]
})
.filter((lid) => lid)
)
]
if (!librariesWithThisAuthor.length) { if (!librariesWithThisAuthor.length) {
Logger.error(`[dbMigration] Author ${oldAuthor.name} was not found in any libraries`) Logger.error(`[dbMigration] Author ${oldAuthor.name} was not found in any libraries`)
@ -426,8 +421,8 @@ function migrateAuthors(oldAuthors, oldLibraryItems) {
/** /**
* Migrate Series * Migrate Series
* Previously Series were shared between libraries, this will ensure every series has one library * Previously Series were shared between libraries, this will ensure every series has one library
* @param {Array<objects.entities.Series>} oldSerieses * @param {Array<objects.entities.Series>} oldSerieses
* @param {Array<objects.LibraryItem>} oldLibraryItems * @param {Array<objects.LibraryItem>} oldLibraryItems
* @returns {Array<object>} Array of Series model objs * @returns {Array<object>} Array of Series model objs
*/ */
function migrateSeries(oldSerieses, oldLibraryItems) { function migrateSeries(oldSerieses, oldLibraryItems) {
@ -436,10 +431,16 @@ function migrateSeries(oldSerieses, oldLibraryItems) {
// Series will be separate between libraries // Series will be separate between libraries
for (const oldSeries of oldSerieses) { for (const oldSeries of oldSerieses) {
// Get an array of NEW library ids that have this series // Get an array of NEW library ids that have this series
const librariesWithThisSeries = [...new Set(oldLibraryItems.map(li => { const librariesWithThisSeries = [
if (!li.media.metadata.series?.some(se => se.id === oldSeries.id)) return null ...new Set(
return oldDbIdMap.libraries[li.libraryId] oldLibraryItems
}).filter(lid => lid))] .map((li) => {
if (!li.media.metadata.series?.some((se) => se.id === oldSeries.id)) return null
return oldDbIdMap.libraries[li.libraryId]
})
.filter((lid) => lid)
)
]
if (!librariesWithThisSeries.length) { if (!librariesWithThisSeries.length) {
Logger.error(`[dbMigration] Series ${oldSeries.name} was not found in any libraries`) Logger.error(`[dbMigration] Series ${oldSeries.name} was not found in any libraries`)
@ -465,7 +466,7 @@ function migrateSeries(oldSerieses, oldLibraryItems) {
/** /**
* Migrate users to User and MediaProgress models * Migrate users to User and MediaProgress models
* @param {Array<objects.User>} oldUsers * @param {Array<objects.User>} oldUsers
* @returns {object} { user: [], mediaProgress: [] } * @returns {object} { user: [], mediaProgress: [] }
*/ */
function migrateUsers(oldUsers) { function migrateUsers(oldUsers) {
@ -474,29 +475,33 @@ function migrateUsers(oldUsers) {
mediaProgress: [] mediaProgress: []
} }
for (const oldUser of oldUsers) { for (const oldUser of oldUsers) {
// //
// Migrate User // Migrate User
// //
// Convert old library ids to new ids // Convert old library ids to new ids
const librariesAccessible = (oldUser.librariesAccessible || []).map((lid) => oldDbIdMap.libraries[lid]).filter(li => li) const librariesAccessible = (oldUser.librariesAccessible || []).map((lid) => oldDbIdMap.libraries[lid]).filter((li) => li)
// Convert old library item ids to new ids // Convert old library item ids to new ids
const bookmarks = (oldUser.bookmarks || []).map(bm => { const bookmarks = (oldUser.bookmarks || [])
bm.libraryItemId = oldDbIdMap.libraryItems[bm.libraryItemId] .map((bm) => {
return bm bm.libraryItemId = oldDbIdMap.libraryItems[bm.libraryItemId]
}).filter(bm => bm.libraryItemId) return bm
})
.filter((bm) => bm.libraryItemId)
// Convert old series ids to new // Convert old series ids to new
const seriesHideFromContinueListening = (oldUser.seriesHideFromContinueListening || []).map(oldSeriesId => { const seriesHideFromContinueListening = (oldUser.seriesHideFromContinueListening || [])
// Series were split to be per library .map((oldSeriesId) => {
// This will use the first series it finds // Series were split to be per library
for (const libraryId in oldDbIdMap.series) { // This will use the first series it finds
if (oldDbIdMap.series[libraryId][oldSeriesId]) { for (const libraryId in oldDbIdMap.series) {
return oldDbIdMap.series[libraryId][oldSeriesId] if (oldDbIdMap.series[libraryId][oldSeriesId]) {
return oldDbIdMap.series[libraryId][oldSeriesId]
}
} }
} return null
return null })
}).filter(se => se) .filter((se) => se)
const User = { const User = {
id: uuidv4(), id: uuidv4(),
@ -521,7 +526,7 @@ function migrateUsers(oldUsers) {
oldDbIdMap.users[oldUser.id] = User.id oldDbIdMap.users[oldUser.id] = User.id
_newRecords.user.push(User) _newRecords.user.push(User)
// //
// Migrate MediaProgress // Migrate MediaProgress
// //
for (const oldMediaProgress of oldUser.mediaProgress) { for (const oldMediaProgress of oldUser.mediaProgress) {
@ -566,7 +571,7 @@ function migrateUsers(oldUsers) {
/** /**
* Migrate playbackSessions to PlaybackSession and Device models * Migrate playbackSessions to PlaybackSession and Device models
* @param {Array<objects.PlaybackSession>} oldSessions * @param {Array<objects.PlaybackSession>} oldSessions
* @returns {object} { playbackSession: [], device: [] } * @returns {object} { playbackSession: [], device: [] }
*/ */
function migrateSessions(oldSessions) { function migrateSessions(oldSessions) {
@ -690,7 +695,7 @@ function migrateSessions(oldSessions) {
/** /**
* Migrate collections to Collection & CollectionBook * Migrate collections to Collection & CollectionBook
* @param {Array<objects.Collection>} oldCollections * @param {Array<objects.Collection>} oldCollections
* @returns {object} { collection: [], collectionBook: [] } * @returns {object} { collection: [], collectionBook: [] }
*/ */
function migrateCollections(oldCollections) { function migrateCollections(oldCollections) {
@ -705,7 +710,7 @@ function migrateCollections(oldCollections) {
continue continue
} }
const BookIds = oldCollection.books.map(lid => oldDbIdMap.books[lid]).filter(bid => bid) const BookIds = oldCollection.books.map((lid) => oldDbIdMap.books[lid]).filter((bid) => bid)
if (!BookIds.length) { if (!BookIds.length) {
Logger.warn(`[dbMigration] migrateCollections: Collection "${oldCollection.name}" has no books`) Logger.warn(`[dbMigration] migrateCollections: Collection "${oldCollection.name}" has no books`)
continue continue
@ -739,7 +744,7 @@ function migrateCollections(oldCollections) {
/** /**
* Migrate playlists to Playlist and PlaylistMediaItem * Migrate playlists to Playlist and PlaylistMediaItem
* @param {Array<objects.Playlist>} oldPlaylists * @param {Array<objects.Playlist>} oldPlaylists
* @returns {object} { playlist: [], playlistMediaItem: [] } * @returns {object} { playlist: [], playlistMediaItem: [] }
*/ */
function migratePlaylists(oldPlaylists) { function migratePlaylists(oldPlaylists) {
@ -806,7 +811,7 @@ function migratePlaylists(oldPlaylists) {
/** /**
* Migrate feeds to Feed and FeedEpisode models * Migrate feeds to Feed and FeedEpisode models
* @param {Array<objects.Feed>} oldFeeds * @param {Array<objects.Feed>} oldFeeds
* @returns {object} { feed: [], feedEpisode: [] } * @returns {object} { feed: [], feedEpisode: [] }
*/ */
function migrateFeeds(oldFeeds) { function migrateFeeds(oldFeeds) {
@ -907,14 +912,14 @@ function migrateFeeds(oldFeeds) {
/** /**
* Migrate ServerSettings, NotificationSettings and EmailSettings to Setting model * Migrate ServerSettings, NotificationSettings and EmailSettings to Setting model
* @param {Array<objects.settings.*>} oldSettings * @param {Array<objects.settings.*>} oldSettings
* @returns {Array<object>} Array of Setting model objs * @returns {Array<object>} Array of Setting model objs
*/ */
function migrateSettings(oldSettings) { function migrateSettings(oldSettings) {
const _newRecords = [] const _newRecords = []
const serverSettings = oldSettings.find(s => s.id === 'server-settings') const serverSettings = oldSettings.find((s) => s.id === 'server-settings')
const notificationSettings = oldSettings.find(s => s.id === 'notification-settings') const notificationSettings = oldSettings.find((s) => s.id === 'notification-settings')
const emailSettings = oldSettings.find(s => s.id === 'email-settings') const emailSettings = oldSettings.find((s) => s.id === 'email-settings')
if (serverSettings) { if (serverSettings) {
_newRecords.push({ _newRecords.push({
@ -946,7 +951,7 @@ function migrateSettings(oldSettings) {
/** /**
* Load old libraries and bulkCreate new Library and LibraryFolder rows * Load old libraries and bulkCreate new Library and LibraryFolder rows
* @param {Map<string,Model>} DatabaseModels * @param {Map<string,Model>} DatabaseModels
*/ */
async function handleMigrateLibraries(DatabaseModels) { async function handleMigrateLibraries(DatabaseModels) {
const oldLibraries = await oldDbFiles.loadOldData('libraries') const oldLibraries = await oldDbFiles.loadOldData('libraries')
@ -959,7 +964,7 @@ async function handleMigrateLibraries(DatabaseModels) {
/** /**
* Load old EmailSettings, NotificationSettings and ServerSettings and bulkCreate new Setting rows * Load old EmailSettings, NotificationSettings and ServerSettings and bulkCreate new Setting rows
* @param {Map<string,Model>} DatabaseModels * @param {Map<string,Model>} DatabaseModels
*/ */
async function handleMigrateSettings(DatabaseModels) { async function handleMigrateSettings(DatabaseModels) {
const oldSettings = await oldDbFiles.loadOldData('settings') const oldSettings = await oldDbFiles.loadOldData('settings')
@ -970,7 +975,7 @@ async function handleMigrateSettings(DatabaseModels) {
/** /**
* Load old authors and bulkCreate new Author rows * Load old authors and bulkCreate new Author rows
* @param {Map<string,Model>} DatabaseModels * @param {Map<string,Model>} DatabaseModels
* @param {Array<objects.LibraryItem>} oldLibraryItems * @param {Array<objects.LibraryItem>} oldLibraryItems
*/ */
async function handleMigrateAuthors(DatabaseModels, oldLibraryItems) { async function handleMigrateAuthors(DatabaseModels, oldLibraryItems) {
@ -982,7 +987,7 @@ async function handleMigrateAuthors(DatabaseModels, oldLibraryItems) {
/** /**
* Load old series and bulkCreate new Series rows * Load old series and bulkCreate new Series rows
* @param {Map<string,Model>} DatabaseModels * @param {Map<string,Model>} DatabaseModels
* @param {Array<objects.LibraryItem>} oldLibraryItems * @param {Array<objects.LibraryItem>} oldLibraryItems
*/ */
async function handleMigrateSeries(DatabaseModels, oldLibraryItems) { async function handleMigrateSeries(DatabaseModels, oldLibraryItems) {
@ -994,7 +999,7 @@ async function handleMigrateSeries(DatabaseModels, oldLibraryItems) {
/** /**
* bulkCreate new LibraryItem, Book and Podcast rows * bulkCreate new LibraryItem, Book and Podcast rows
* @param {Map<string,Model>} DatabaseModels * @param {Map<string,Model>} DatabaseModels
* @param {Array<objects.LibraryItem>} oldLibraryItems * @param {Array<objects.LibraryItem>} oldLibraryItems
*/ */
async function handleMigrateLibraryItems(DatabaseModels, oldLibraryItems) { async function handleMigrateLibraryItems(DatabaseModels, oldLibraryItems) {
@ -1008,7 +1013,7 @@ async function handleMigrateLibraryItems(DatabaseModels, oldLibraryItems) {
/** /**
* Migrate authors, series then library items in chunks * Migrate authors, series then library items in chunks
* Authors and series require old library items loaded first * Authors and series require old library items loaded first
* @param {Map<string,Model>} DatabaseModels * @param {Map<string,Model>} DatabaseModels
*/ */
async function handleMigrateAuthorsSeriesAndLibraryItems(DatabaseModels) { async function handleMigrateAuthorsSeriesAndLibraryItems(DatabaseModels) {
const oldLibraryItems = await oldDbFiles.loadOldData('libraryItems') const oldLibraryItems = await oldDbFiles.loadOldData('libraryItems')
@ -1026,7 +1031,7 @@ async function handleMigrateAuthorsSeriesAndLibraryItems(DatabaseModels) {
/** /**
* Load old users and bulkCreate new User rows * Load old users and bulkCreate new User rows
* @param {Map<string,Model>} DatabaseModels * @param {Map<string,Model>} DatabaseModels
*/ */
async function handleMigrateUsers(DatabaseModels) { async function handleMigrateUsers(DatabaseModels) {
const oldUsers = await oldDbFiles.loadOldData('users') const oldUsers = await oldDbFiles.loadOldData('users')
@ -1039,7 +1044,7 @@ async function handleMigrateUsers(DatabaseModels) {
/** /**
* Load old sessions and bulkCreate new PlaybackSession & Device rows * Load old sessions and bulkCreate new PlaybackSession & Device rows
* @param {Map<string,Model>} DatabaseModels * @param {Map<string,Model>} DatabaseModels
*/ */
async function handleMigrateSessions(DatabaseModels) { async function handleMigrateSessions(DatabaseModels) {
const oldSessions = await oldDbFiles.loadOldData('sessions') const oldSessions = await oldDbFiles.loadOldData('sessions')
@ -1055,12 +1060,11 @@ async function handleMigrateSessions(DatabaseModels) {
await DatabaseModels[model].bulkCreate(newSessionRecords[model]) await DatabaseModels[model].bulkCreate(newSessionRecords[model])
} }
} }
} }
/** /**
* Load old collections and bulkCreate new Collection, CollectionBook models * Load old collections and bulkCreate new Collection, CollectionBook models
* @param {Map<string,Model>} DatabaseModels * @param {Map<string,Model>} DatabaseModels
*/ */
async function handleMigrateCollections(DatabaseModels) { async function handleMigrateCollections(DatabaseModels) {
const oldCollections = await oldDbFiles.loadOldData('collections') const oldCollections = await oldDbFiles.loadOldData('collections')
@ -1073,7 +1077,7 @@ async function handleMigrateCollections(DatabaseModels) {
/** /**
* Load old playlists and bulkCreate new Playlist, PlaylistMediaItem models * Load old playlists and bulkCreate new Playlist, PlaylistMediaItem models
* @param {Map<string,Model>} DatabaseModels * @param {Map<string,Model>} DatabaseModels
*/ */
async function handleMigratePlaylists(DatabaseModels) { async function handleMigratePlaylists(DatabaseModels) {
const oldPlaylists = await oldDbFiles.loadOldData('playlists') const oldPlaylists = await oldDbFiles.loadOldData('playlists')
@ -1086,7 +1090,7 @@ async function handleMigratePlaylists(DatabaseModels) {
/** /**
* Load old feeds and bulkCreate new Feed, FeedEpisode models * Load old feeds and bulkCreate new Feed, FeedEpisode models
* @param {Map<string,Model>} DatabaseModels * @param {Map<string,Model>} DatabaseModels
*/ */
async function handleMigrateFeeds(DatabaseModels) { async function handleMigrateFeeds(DatabaseModels) {
const oldFeeds = await oldDbFiles.loadOldData('feeds') const oldFeeds = await oldDbFiles.loadOldData('feeds')
@ -1152,21 +1156,36 @@ module.exports.checkShouldMigrate = async () => {
/** /**
* Migration from 2.3.0 to 2.3.1 - create extraData columns in LibraryItem and PodcastEpisode * Migration from 2.3.0 to 2.3.1 - create extraData columns in LibraryItem and PodcastEpisode
* @param {QueryInterface} queryInterface * @param {QueryInterface} queryInterface
*/ */
async function migrationPatchNewColumns(queryInterface) { async function migrationPatchNewColumns(queryInterface) {
try { try {
return queryInterface.sequelize.transaction(t => { return queryInterface.sequelize.transaction((t) => {
return Promise.all([ return Promise.all([
queryInterface.addColumn('libraryItems', 'extraData', { queryInterface.addColumn(
type: DataTypes.JSON 'libraryItems',
}, { transaction: t }), 'extraData',
queryInterface.addColumn('podcastEpisodes', 'extraData', { {
type: DataTypes.JSON type: DataTypes.JSON
}, { transaction: t }), },
queryInterface.addColumn('libraries', 'extraData', { { transaction: t }
type: DataTypes.JSON ),
}, { transaction: t }) queryInterface.addColumn(
'podcastEpisodes',
'extraData',
{
type: DataTypes.JSON
},
{ transaction: t }
),
queryInterface.addColumn(
'libraries',
'extraData',
{
type: DataTypes.JSON
},
{ transaction: t }
)
]) ])
}) })
} catch (error) { } catch (error) {
@ -1177,7 +1196,7 @@ async function migrationPatchNewColumns(queryInterface) {
/** /**
* Migration from 2.3.0 to 2.3.1 - old library item ids * Migration from 2.3.0 to 2.3.1 - old library item ids
* @param {/src/Database} ctx * @param {/src/Database} ctx
*/ */
async function handleOldLibraryItems(ctx) { async function handleOldLibraryItems(ctx) {
const oldLibraryItems = await oldDbFiles.loadOldData('libraryItems') const oldLibraryItems = await oldDbFiles.loadOldData('libraryItems')
@ -1188,7 +1207,7 @@ async function handleOldLibraryItems(ctx) {
for (const libraryItem of libraryItems) { for (const libraryItem of libraryItems) {
// Find matching old library item by ino // Find matching old library item by ino
const matchingOldLibraryItem = oldLibraryItems.find(oli => oli.ino === libraryItem.ino) const matchingOldLibraryItem = oldLibraryItems.find((oli) => oli.ino === libraryItem.ino)
if (matchingOldLibraryItem) { if (matchingOldLibraryItem) {
oldDbIdMap.libraryItems[matchingOldLibraryItem.id] = libraryItem.id oldDbIdMap.libraryItems[matchingOldLibraryItem.id] = libraryItem.id
@ -1202,7 +1221,7 @@ async function handleOldLibraryItems(ctx) {
if (libraryItem.media.episodes?.length && matchingOldLibraryItem.media.episodes?.length) { if (libraryItem.media.episodes?.length && matchingOldLibraryItem.media.episodes?.length) {
for (const podcastEpisode of libraryItem.media.episodes) { for (const podcastEpisode of libraryItem.media.episodes) {
// Find matching old episode by audio file ino // Find matching old episode by audio file ino
const matchingOldPodcastEpisode = matchingOldLibraryItem.media.episodes.find(oep => oep.audioFile?.ino && oep.audioFile.ino === podcastEpisode.audioFile?.ino) const matchingOldPodcastEpisode = matchingOldLibraryItem.media.episodes.find((oep) => oep.audioFile?.ino && oep.audioFile.ino === podcastEpisode.audioFile?.ino)
if (matchingOldPodcastEpisode) { if (matchingOldPodcastEpisode) {
oldDbIdMap.podcastEpisodes[matchingOldPodcastEpisode.id] = podcastEpisode.id oldDbIdMap.podcastEpisodes[matchingOldPodcastEpisode.id] = podcastEpisode.id
@ -1235,7 +1254,7 @@ async function handleOldLibraryItems(ctx) {
/** /**
* Migration from 2.3.0 to 2.3.1 - updating oldLibraryId * Migration from 2.3.0 to 2.3.1 - updating oldLibraryId
* @param {/src/Database} ctx * @param {/src/Database} ctx
*/ */
async function handleOldLibraries(ctx) { async function handleOldLibraries(ctx) {
const oldLibraries = await oldDbFiles.loadOldData('libraries') const oldLibraries = await oldDbFiles.loadOldData('libraries')
@ -1244,11 +1263,11 @@ async function handleOldLibraries(ctx) {
let librariesUpdated = 0 let librariesUpdated = 0
for (const library of libraries) { for (const library of libraries) {
// Find matching old library using exact match on folder paths, exact match on library name // Find matching old library using exact match on folder paths, exact match on library name
const matchingOldLibrary = oldLibraries.find(ol => { const matchingOldLibrary = oldLibraries.find((ol) => {
if (ol.name !== library.name) { if (ol.name !== library.name) {
return false return false
} }
const folderPaths = ol.folders?.map(f => f.fullPath) || [] const folderPaths = ol.folders?.map((f) => f.fullPath) || []
return folderPaths.join(',') === library.folderPaths.join(',') return folderPaths.join(',') === library.folderPaths.join(',')
}) })
@ -1264,42 +1283,51 @@ async function handleOldLibraries(ctx) {
/** /**
* Migration from 2.3.0 to 2.3.1 - fixing librariesAccessible and bookmarks * Migration from 2.3.0 to 2.3.1 - fixing librariesAccessible and bookmarks
* @param {/src/Database} ctx * @param {import('../../Database')} ctx
*/ */
async function handleOldUsers(ctx) { async function handleOldUsers(ctx) {
const users = await ctx.models.user.getOldUsers() const usersNew = await ctx.userModel.findAll({
include: ctx.models.mediaProgress
})
const users = usersNew.map((u) => ctx.userModel.getOldUser(u))
let usersUpdated = 0 let usersUpdated = 0
for (const user of users) { for (const user of users) {
let hasUpdates = false let hasUpdates = false
if (user.bookmarks?.length) { if (user.bookmarks?.length) {
user.bookmarks = user.bookmarks.map(bm => { user.bookmarks = user.bookmarks
// Only update if this is not the old id format .map((bm) => {
if (!bm.libraryItemId.startsWith('li_')) return bm // Only update if this is not the old id format
if (!bm.libraryItemId.startsWith('li_')) return bm
bm.libraryItemId = oldDbIdMap.libraryItems[bm.libraryItemId] bm.libraryItemId = oldDbIdMap.libraryItems[bm.libraryItemId]
hasUpdates = true hasUpdates = true
return bm return bm
}).filter(bm => bm.libraryItemId) })
.filter((bm) => bm.libraryItemId)
} }
// Convert old library ids to new library ids // Convert old library ids to new library ids
if (user.librariesAccessible?.length) { if (user.librariesAccessible?.length) {
user.librariesAccessible = user.librariesAccessible.map(lid => { user.librariesAccessible = user.librariesAccessible
if (!lid.startsWith('lib_') && lid !== 'main') return lid // Already not an old library id so dont change .map((lid) => {
hasUpdates = true if (!lid.startsWith('lib_') && lid !== 'main') return lid // Already not an old library id so dont change
return oldDbIdMap.libraries[lid] hasUpdates = true
}).filter(lid => lid) return oldDbIdMap.libraries[lid]
})
.filter((lid) => lid)
} }
if (user.seriesHideFromContinueListening?.length) { if (user.seriesHideFromContinueListening?.length) {
user.seriesHideFromContinueListening = user.seriesHideFromContinueListening.map((seriesId) => { user.seriesHideFromContinueListening = user.seriesHideFromContinueListening
if (seriesId.startsWith('se_')) { .map((seriesId) => {
hasUpdates = true if (seriesId.startsWith('se_')) {
return null // Filter out old series ids hasUpdates = true
} return null // Filter out old series ids
return seriesId }
}).filter(se => se) return seriesId
})
.filter((se) => se)
} }
if (hasUpdates) { if (hasUpdates) {
@ -1312,7 +1340,7 @@ async function handleOldUsers(ctx) {
/** /**
* Migration from 2.3.0 to 2.3.1 * Migration from 2.3.0 to 2.3.1
* @param {/src/Database} ctx * @param {/src/Database} ctx
*/ */
module.exports.migrationPatch = async (ctx) => { module.exports.migrationPatch = async (ctx) => {
const queryInterface = ctx.sequelize.getQueryInterface() const queryInterface = ctx.sequelize.getQueryInterface()
@ -1328,7 +1356,7 @@ module.exports.migrationPatch = async (ctx) => {
} }
const oldDbPath = Path.join(global.ConfigPath, 'oldDb.zip') const oldDbPath = Path.join(global.ConfigPath, 'oldDb.zip')
if (!await fs.pathExists(oldDbPath)) { if (!(await fs.pathExists(oldDbPath))) {
Logger.info(`[dbMigration] Migration patch 2.3.0+ unnecessary - no oldDb.zip found`) Logger.info(`[dbMigration] Migration patch 2.3.0+ unnecessary - no oldDb.zip found`)
return return
} }
@ -1337,7 +1365,7 @@ module.exports.migrationPatch = async (ctx) => {
Logger.info(`[dbMigration] Applying migration patch from 2.3.0+`) Logger.info(`[dbMigration] Applying migration patch from 2.3.0+`)
// Extract from oldDb.zip // Extract from oldDb.zip
if (!await oldDbFiles.checkExtractItemsUsersAndLibraries()) { if (!(await oldDbFiles.checkExtractItemsUsersAndLibraries())) {
return return
} }
@ -1354,8 +1382,8 @@ module.exports.migrationPatch = async (ctx) => {
/** /**
* Migration from 2.3.3 to 2.3.4 * Migration from 2.3.3 to 2.3.4
* Populating the size column on libraryItem * Populating the size column on libraryItem
* @param {/src/Database} ctx * @param {/src/Database} ctx
* @param {number} offset * @param {number} offset
*/ */
async function migrationPatch2LibraryItems(ctx, offset = 0) { async function migrationPatch2LibraryItems(ctx, offset = 0) {
const libraryItems = await ctx.models.libraryItem.findAll({ const libraryItems = await ctx.models.libraryItem.findAll({
@ -1368,7 +1396,7 @@ async function migrationPatch2LibraryItems(ctx, offset = 0) {
for (const libraryItem of libraryItems) { for (const libraryItem of libraryItems) {
if (libraryItem.libraryFiles?.length) { if (libraryItem.libraryFiles?.length) {
let size = 0 let size = 0
libraryItem.libraryFiles.forEach(lf => { libraryItem.libraryFiles.forEach((lf) => {
if (!isNaN(lf.metadata?.size)) { if (!isNaN(lf.metadata?.size)) {
size += Number(lf.metadata.size) size += Number(lf.metadata.size)
} }
@ -1396,8 +1424,8 @@ async function migrationPatch2LibraryItems(ctx, offset = 0) {
/** /**
* Migration from 2.3.3 to 2.3.4 * Migration from 2.3.3 to 2.3.4
* Populating the duration & titleIgnorePrefix column on book * Populating the duration & titleIgnorePrefix column on book
* @param {/src/Database} ctx * @param {/src/Database} ctx
* @param {number} offset * @param {number} offset
*/ */
async function migrationPatch2Books(ctx, offset = 0) { async function migrationPatch2Books(ctx, offset = 0) {
const books = await ctx.models.book.findAll({ const books = await ctx.models.book.findAll({
@ -1411,7 +1439,7 @@ async function migrationPatch2Books(ctx, offset = 0) {
let duration = 0 let duration = 0
if (book.audioFiles?.length) { if (book.audioFiles?.length) {
const tracks = book.audioFiles.filter(af => !af.exclude && !af.invalid) const tracks = book.audioFiles.filter((af) => !af.exclude && !af.invalid)
for (const track of tracks) { for (const track of tracks) {
if (track.duration !== null && !isNaN(track.duration)) { if (track.duration !== null && !isNaN(track.duration)) {
duration += track.duration duration += track.duration
@ -1442,8 +1470,8 @@ async function migrationPatch2Books(ctx, offset = 0) {
/** /**
* Migration from 2.3.3 to 2.3.4 * Migration from 2.3.3 to 2.3.4
* Populating the titleIgnorePrefix column on podcast * Populating the titleIgnorePrefix column on podcast
* @param {/src/Database} ctx * @param {/src/Database} ctx
* @param {number} offset * @param {number} offset
*/ */
async function migrationPatch2Podcasts(ctx, offset = 0) { async function migrationPatch2Podcasts(ctx, offset = 0) {
const podcasts = await ctx.models.podcast.findAll({ const podcasts = await ctx.models.podcast.findAll({
@ -1476,8 +1504,8 @@ async function migrationPatch2Podcasts(ctx, offset = 0) {
/** /**
* Migration from 2.3.3 to 2.3.4 * Migration from 2.3.3 to 2.3.4
* Populating the nameIgnorePrefix column on series * Populating the nameIgnorePrefix column on series
* @param {/src/Database} ctx * @param {/src/Database} ctx
* @param {number} offset * @param {number} offset
*/ */
async function migrationPatch2Series(ctx, offset = 0) { async function migrationPatch2Series(ctx, offset = 0) {
const allSeries = await ctx.models.series.findAll({ const allSeries = await ctx.models.series.findAll({
@ -1510,8 +1538,8 @@ async function migrationPatch2Series(ctx, offset = 0) {
/** /**
* Migration from 2.3.3 to 2.3.4 * Migration from 2.3.3 to 2.3.4
* Populating the lastFirst column on author * Populating the lastFirst column on author
* @param {/src/Database} ctx * @param {/src/Database} ctx
* @param {number} offset * @param {number} offset
*/ */
async function migrationPatch2Authors(ctx, offset = 0) { async function migrationPatch2Authors(ctx, offset = 0) {
const authors = await ctx.models.author.findAll({ const authors = await ctx.models.author.findAll({
@ -1546,8 +1574,8 @@ async function migrationPatch2Authors(ctx, offset = 0) {
/** /**
* Migration from 2.3.3 to 2.3.4 * Migration from 2.3.3 to 2.3.4
* Populating the createdAt column on bookAuthor * Populating the createdAt column on bookAuthor
* @param {/src/Database} ctx * @param {/src/Database} ctx
* @param {number} offset * @param {number} offset
*/ */
async function migrationPatch2BookAuthors(ctx, offset = 0) { async function migrationPatch2BookAuthors(ctx, offset = 0) {
const bookAuthors = await ctx.models.bookAuthor.findAll({ const bookAuthors = await ctx.models.bookAuthor.findAll({
@ -1581,8 +1609,8 @@ async function migrationPatch2BookAuthors(ctx, offset = 0) {
/** /**
* Migration from 2.3.3 to 2.3.4 * Migration from 2.3.3 to 2.3.4
* Populating the createdAt column on bookSeries * Populating the createdAt column on bookSeries
* @param {/src/Database} ctx * @param {/src/Database} ctx
* @param {number} offset * @param {number} offset
*/ */
async function migrationPatch2BookSeries(ctx, offset = 0) { async function migrationPatch2BookSeries(ctx, offset = 0) {
const allBookSeries = await ctx.models.bookSeries.findAll({ const allBookSeries = await ctx.models.bookSeries.findAll({
@ -1616,7 +1644,7 @@ async function migrationPatch2BookSeries(ctx, offset = 0) {
/** /**
* Migration from 2.3.3 to 2.3.4 * Migration from 2.3.3 to 2.3.4
* Adding coverPath column to Feed model * Adding coverPath column to Feed model
* @param {/src/Database} ctx * @param {/src/Database} ctx
*/ */
module.exports.migrationPatch2 = async (ctx) => { module.exports.migrationPatch2 = async (ctx) => {
const queryInterface = ctx.sequelize.getQueryInterface() const queryInterface = ctx.sequelize.getQueryInterface()
@ -1631,44 +1659,95 @@ module.exports.migrationPatch2 = async (ctx) => {
Logger.info(`[dbMigration] Applying migration patch from 2.3.3+`) Logger.info(`[dbMigration] Applying migration patch from 2.3.3+`)
try { try {
await queryInterface.sequelize.transaction(t => { await queryInterface.sequelize.transaction((t) => {
const queries = [] const queries = []
if (!bookAuthorsTableDescription?.createdAt) { if (!bookAuthorsTableDescription?.createdAt) {
queries.push(...[ queries.push(
queryInterface.addColumn('bookAuthors', 'createdAt', { ...[
type: DataTypes.DATE queryInterface.addColumn(
}, { transaction: t }), 'bookAuthors',
queryInterface.addColumn('bookSeries', 'createdAt', { 'createdAt',
type: DataTypes.DATE {
}, { transaction: t }), type: DataTypes.DATE
]) },
{ transaction: t }
),
queryInterface.addColumn(
'bookSeries',
'createdAt',
{
type: DataTypes.DATE
},
{ transaction: t }
)
]
)
} }
if (!authorsTableDescription?.lastFirst) { if (!authorsTableDescription?.lastFirst) {
queries.push(...[ queries.push(
queryInterface.addColumn('authors', 'lastFirst', { ...[
type: DataTypes.STRING queryInterface.addColumn(
}, { transaction: t }), 'authors',
queryInterface.addColumn('libraryItems', 'size', { 'lastFirst',
type: DataTypes.BIGINT {
}, { transaction: t }), type: DataTypes.STRING
queryInterface.addColumn('books', 'duration', { },
type: DataTypes.FLOAT { transaction: t }
}, { transaction: t }), ),
queryInterface.addColumn('books', 'titleIgnorePrefix', { queryInterface.addColumn(
type: DataTypes.STRING 'libraryItems',
}, { transaction: t }), 'size',
queryInterface.addColumn('podcasts', 'titleIgnorePrefix', { {
type: DataTypes.STRING type: DataTypes.BIGINT
}, { transaction: t }), },
queryInterface.addColumn('series', 'nameIgnorePrefix', { { transaction: t }
type: DataTypes.STRING ),
}, { transaction: t }), queryInterface.addColumn(
]) 'books',
'duration',
{
type: DataTypes.FLOAT
},
{ transaction: t }
),
queryInterface.addColumn(
'books',
'titleIgnorePrefix',
{
type: DataTypes.STRING
},
{ transaction: t }
),
queryInterface.addColumn(
'podcasts',
'titleIgnorePrefix',
{
type: DataTypes.STRING
},
{ transaction: t }
),
queryInterface.addColumn(
'series',
'nameIgnorePrefix',
{
type: DataTypes.STRING
},
{ transaction: t }
)
]
)
} }
if (!feedTableDescription?.coverPath) { if (!feedTableDescription?.coverPath) {
queries.push(queryInterface.addColumn('feeds', 'coverPath', { queries.push(
type: DataTypes.STRING queryInterface.addColumn(
}, { transaction: t })) 'feeds',
'coverPath',
{
type: DataTypes.STRING
},
{ transaction: t }
)
)
} }
return Promise.all(queries) return Promise.all(queries)
}) })
@ -1708,4 +1787,4 @@ module.exports.migrationPatch2 = async (ctx) => {
Logger.error(`[dbMigration] Migration from 2.3.3+ column creation failed`, error) Logger.error(`[dbMigration] Migration from 2.3.3+ column creation failed`, error)
throw new Error('Migration 2.3.3+ failed ' + error) throw new Error('Migration 2.3.3+ failed ' + error)
} }
} }