Browse Source

Removed custom ESLint curly rule

Sigh, why did you do this, past me..?

Also fixed "Delete uploads by names".
master
Bobby Wibowo 8 months ago
parent
commit
47dd512910
No known key found for this signature in database GPG Key ID: 51C3A1E1E22D26CF
  1. 7
      .eslintrc.js
  2. 134
      controllers/albumsController.js
  3. 128
      controllers/authController.js
  4. 3
      controllers/multerStorageController.js
  5. 12
      controllers/pathsController.js
  6. 3
      controllers/permissionController.js
  7. 17
      controllers/tokenController.js
  8. 429
      controllers/uploadController.js
  9. 113
      controllers/utilsController.js
  10. 3
      gulpfile.js
  11. 3
      logger.js
  12. 63
      lolisafe.js
  13. 5
      src/js/.eslintrc.js
  14. 3
      src/js/album.js
  15. 30
      src/js/auth.js
  16. 421
      src/js/dashboard.js
  17. 185
      src/js/home.js
  18. 20
      src/js/misc/newsfeed.js
  19. 37
      src/js/misc/render.js
  20. 25
      src/js/misc/utils.js

7
.eslintrc.js

@ -10,16 +10,11 @@ module.exports = {
'standard'
],
rules: {
curly: [
'error',
'multi',
'consistent'
],
'no-throw-literal': 0,
'object-shorthand': [
'error',
'always'
],
'standard/no-callback-literal': 0
'node/no-callback-literal': 0
}
}

134
controllers/albumsController.js

@ -29,14 +29,10 @@ const zipOptions = config.uploads.jsZipOptions
zipOptions.type = 'nodebuffer'
// Apply fallbacks for missing config values
if (zipOptions.streamFiles === undefined)
zipOptions.streamFiles = true
if (zipOptions.compression === undefined)
zipOptions.compression = 'DEFLATE'
if (zipOptions.compressionOptions === undefined)
zipOptions.compressionOptions = {}
if (zipOptions.compressionOptions.level === undefined)
zipOptions.compressionOptions.level = 1
if (zipOptions.streamFiles === undefined) zipOptions.streamFiles = true
if (zipOptions.compression === undefined) zipOptions.compression = 'DEFLATE'
if (zipOptions.compressionOptions === undefined) zipOptions.compressionOptions = {}
if (zipOptions.compressionOptions.level === undefined) zipOptions.compressionOptions.level = 1
self.zipEmitters = new Map()
@ -51,8 +47,7 @@ class ZipEmitter extends EventEmitter {
self.getUniqueRandomName = async () => {
for (let i = 0; i < utils.idMaxTries; i++) {
const identifier = randomstring.generate(config.uploads.albumIdentifierLength)
if (self.onHold.has(identifier))
continue
if (self.onHold.has(identifier)) continue
// Put token on-hold (wait for it to be inserted to DB)
self.onHold.add(identifier)
@ -80,15 +75,15 @@ self.list = async (req, res, next) => {
const all = req.headers.all === '1'
const sidebar = req.headers.sidebar
const ismoderator = perms.is(user, 'moderator')
if (all && !ismoderator)
return res.status(403).end()
if (all && !ismoderator) return res.status(403).end()
const filter = function () {
if (!all)
if (!all) {
this.where({
enabled: 1,
userid: user.id
})
}
}
try {
@ -97,8 +92,7 @@ self.list = async (req, res, next) => {
.where(filter)
.count('id as count')
.then(rows => rows[0].count)
if (!count)
return res.json({ success: true, albums: [], count })
if (!count) return res.json({ success: true, albums: [], count })
const fields = ['id', 'name']
@ -116,8 +110,7 @@ self.list = async (req, res, next) => {
else if (offset < 0) offset = Math.max(0, Math.ceil(count / 25) + offset)
fields.push('identifier', 'enabled', 'timestamp', 'editedAt', 'download', 'public', 'description')
if (all)
fields.push('userid')
if (all) fields.push('userid')
albums = await db.table('albums')
.where(filter)
@ -140,13 +133,14 @@ self.list = async (req, res, next) => {
.whereIn('albumid', Object.keys(albumids))
.select('albumid')
for (const upload of uploads)
if (albumids[upload.albumid])
for (const upload of uploads) {
if (albumids[upload.albumid]) {
albumids[upload.albumid].uploads++
}
}
// If we are not listing all albums, send response
if (!all)
return res.json({ success: true, albums, count, homeDomain })
if (!all) return res.json({ success: true, albums, count, homeDomain })
// Otherwise proceed to querying usernames
const userids = albums
@ -156,8 +150,7 @@ self.list = async (req, res, next) => {
})
// If there are no albums attached to a registered user, send response
if (userids.length === 0)
return res.json({ success: true, albums, count, homeDomain })
if (!userids.length) return res.json({ success: true, albums, count, homeDomain })
// Query usernames of user IDs from currently selected files
const usersTable = await db.table('users')
@ -165,8 +158,9 @@ self.list = async (req, res, next) => {
.select('id', 'username')
const users = {}
for (const user of usersTable)
for (const user of usersTable) {
users[user.id] = user.username
}
return res.json({ success: true, albums, count, users, homeDomain })
} catch (error) {
@ -183,8 +177,7 @@ self.create = async (req, res, next) => {
? utils.escape(req.body.name.trim().substring(0, self.titleMaxLength))
: ''
if (!name)
return res.json({ success: false, description: 'No album name specified.' })
if (!name) return res.json({ success: false, description: 'No album name specified.' })
try {
const album = await db.table('albums')
@ -195,8 +188,7 @@ self.create = async (req, res, next) => {
})
.first()
if (album)
return res.json({ success: false, description: 'There is already an album with that name.' })
if (album) return res.json({ success: false, description: 'There is already an album with that name.' })
const identifier = await self.getUniqueRandomName()
@ -235,8 +227,7 @@ self.disable = async (req, res, next) => {
const id = req.body.id
const purge = req.body.purge
if (!Number.isFinite(id))
return res.json({ success: false, description: 'No album specified.' })
if (!Number.isFinite(id)) return res.json({ success: false, description: 'No album specified.' })
try {
if (purge) {
@ -249,8 +240,7 @@ self.disable = async (req, res, next) => {
if (files.length) {
const ids = files.map(file => file.id)
const failed = await utils.bulkDeleteFromDb('id', ids, user)
if (failed.length)
return res.json({ success: false, failed })
if (failed.length) return res.json({ success: false, failed })
}
utils.invalidateStatsCache('uploads')
}
@ -291,24 +281,23 @@ self.edit = async (req, res, next) => {
const ismoderator = perms.is(user, 'moderator')
const id = parseInt(req.body.id)
if (isNaN(id))
return res.json({ success: false, description: 'No album specified.' })
if (isNaN(id)) return res.json({ success: false, description: 'No album specified.' })
const name = typeof req.body.name === 'string'
? utils.escape(req.body.name.trim().substring(0, self.titleMaxLength))
: ''
if (!name)
return res.json({ success: false, description: 'No name specified.' })
if (!name) return res.json({ success: false, description: 'No name specified.' })
const filter = function () {
this.where('id', id)
if (!ismoderator)
if (!ismoderator) {
this.andWhere({
enabled: 1,
userid: user.id
})
}
}
try {
@ -316,13 +305,14 @@ self.edit = async (req, res, next) => {
.where(filter)
.first()
if (!album)
if (!album) {
return res.json({ success: false, description: 'Could not get album with the specified ID.' })
else if (album.id !== id)
} else if (album.id !== id) {
return res.json({ success: false, description: 'Name already in use.' })
else if (req._old && (album.id === id))
} else if (req._old && (album.id === id)) {
// Old rename API
return res.json({ success: false, description: 'You did not specify a new name.' })
}
const update = {
name,
@ -333,11 +323,13 @@ self.edit = async (req, res, next) => {
: ''
}
if (ismoderator)
if (ismoderator) {
update.enabled = Boolean(req.body.enabled)
}
if (req.body.requestLink)
if (req.body.requestLink) {
update.identifier = await self.getUniqueRandomName()
}
await db.table('albums')
.where(filter)
@ -353,10 +345,9 @@ self.edit = async (req, res, next) => {
const oldZip = path.join(paths.zips, `${album.identifier}.zip`)
const newZip = path.join(paths.zips, `${update.identifier}.zip`)
await paths.rename(oldZip, newZip)
} catch (err) {
} catch (error) {
// Re-throw error
if (err.code !== 'ENOENT')
throw err
if (error.code !== 'ENOENT') throw error
}
return res.json({
@ -380,8 +371,9 @@ self.rename = async (req, res, next) => {
self.get = async (req, res, next) => {
const identifier = req.params.identifier
if (identifier === undefined)
if (identifier === undefined) {
return res.status(401).json({ success: false, description: 'No identifier provided.' })
}
try {
const album = await db.table('albums')
@ -391,16 +383,17 @@ self.get = async (req, res, next) => {
})
.first()
if (!album)
if (!album) {
return res.json({
success: false,
description: 'Album not found.'
})
else if (album.public === 0)
} else if (album.public === 0) {
return res.status(403).json({
success: false,
description: 'This album is not available for public.'
})
}
const title = album.name
const files = await db.table('files')
@ -412,8 +405,9 @@ self.get = async (req, res, next) => {
file.file = `${config.domain}/${file.name}`
const extname = utils.extname(file.name)
if (utils.mayGenerateThumb(extname))
if (utils.mayGenerateThumb(extname)) {
file.thumb = `${config.domain}/thumbs/${file.name.slice(0, -extname.length)}.png`
}
}
return res.json({
@ -432,17 +426,19 @@ self.generateZip = async (req, res, next) => {
const versionString = parseInt(req.query.v)
const identifier = req.params.identifier
if (identifier === undefined)
if (identifier === undefined) {
return res.status(401).json({
success: false,
description: 'No identifier provided.'
})
}
if (!config.uploads.generateZips)
if (!config.uploads.generateZips) {
return res.status(401).json({
success: false,
description: 'Zip generation disabled.'
})
}
try {
const album = await db.table('albums')
@ -452,32 +448,35 @@ self.generateZip = async (req, res, next) => {
})
.first()
if (!album)
if (!album) {
return res.json({ success: false, description: 'Album not found.' })
else if (album.download === 0)
} else if (album.download === 0) {
return res.json({ success: false, description: 'Download for this album is disabled.' })
}
if ((isNaN(versionString) || versionString <= 0) && album.editedAt)
if ((isNaN(versionString) || versionString <= 0) && album.editedAt) {
return res.redirect(`${album.identifier}?v=${album.editedAt}`)
}
if (album.zipGeneratedAt > album.editedAt)
if (album.zipGeneratedAt > album.editedAt) {
try {
const filePath = path.join(paths.zips, `${identifier}.zip`)
await paths.access(filePath)
return res.download(filePath, `${album.name}.zip`)
} catch (error) {
// Re-throw error
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
}
if (self.zipEmitters.has(identifier)) {
logger.log(`Waiting previous zip task for album: ${identifier}.`)
return self.zipEmitters.get(identifier).once('done', (filePath, fileName, json) => {
if (filePath && fileName)
if (filePath && fileName) {
res.download(filePath, fileName)
else if (json)
} else if (json) {
res.json(json)
}
})
}
@ -559,8 +558,9 @@ self.addFiles = async (req, res, next) => {
if (!user) return
const ids = req.body.ids
if (!Array.isArray(ids) || !ids.length)
if (!Array.isArray(ids) || !ids.length) {
return res.json({ success: false, description: 'No files specified.' })
}
let albumid = parseInt(req.body.albumid)
if (isNaN(albumid) || albumid < 0) albumid = null
@ -572,16 +572,18 @@ self.addFiles = async (req, res, next) => {
const album = await db.table('albums')
.where('id', albumid)
.where(function () {
if (user.username !== 'root')
if (user.username !== 'root') {
this.where('userid', user.id)
}
})
.first()
if (!album)
if (!album) {
return res.json({
success: false,
description: 'Album does not exist or it does not belong to the user.'
})
}
albumids.push(albumid)
}
@ -597,8 +599,9 @@ self.addFiles = async (req, res, next) => {
.update('albumid', albumid)
files.forEach(file => {
if (file.albumid && !albumids.includes(file.albumid))
if (file.albumid && !albumids.includes(file.albumid)) {
albumids.push(file.albumid)
}
})
await db.table('albums')
@ -609,13 +612,14 @@ self.addFiles = async (req, res, next) => {
return res.json({ success: true, failed })
} catch (error) {
logger.error(error)
if (failed.length === ids.length)
if (failed.length === ids.length) {
return res.json({
success: false,
description: `Could not ${albumid === null ? 'add' : 'remove'} any files ${albumid === null ? 'to' : 'from'} the album.`
})
else
} else {
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
}
}
}

128
controllers/authController.js

@ -34,31 +34,30 @@ self.verify = async (req, res, next) => {
const username = typeof req.body.username === 'string'
? req.body.username.trim()
: ''
if (!username)
return res.json({ success: false, description: 'No username provided.' })
if (!username) return res.json({ success: false, description: 'No username provided.' })
const password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (!password)
return res.json({ success: false, description: 'No password provided.' })
if (!password) return res.json({ success: false, description: 'No password provided.' })
try {
const user = await db.table('users')
.where('username', username)
.first()
if (!user)
return res.json({ success: false, description: 'Username does not exist.' })
if (!user) return res.json({ success: false, description: 'Username does not exist.' })
if (user.enabled === false || user.enabled === 0)
if (user.enabled === false || user.enabled === 0) {
return res.json({ success: false, description: 'This account has been disabled.' })
}
const result = await bcrypt.compare(password, user.password)
if (result === false)
if (result === false) {
return res.json({ success: false, description: 'Wrong password.' })
else
} else {
return res.json({ success: true, token: user.token })
}
} catch (error) {
logger.error(error)
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
@ -66,34 +65,46 @@ self.verify = async (req, res, next) => {
}
self.register = async (req, res, next) => {
if (config.enableUserAccounts === false)
if (config.enableUserAccounts === false) {
return res.json({ success: false, description: 'Registration is currently disabled.' })
}
const username = typeof req.body.username === 'string'
? req.body.username.trim()
: ''
if (username.length < self.user.min || username.length > self.user.max)
return res.json({ success: false, description: `Username must have ${self.user.min}-${self.user.max} characters.` })
if (username.length < self.user.min || username.length > self.user.max) {
return res.json({
success: false,
description: `Username must have ${self.user.min}-${self.user.max} characters.`
})
}
const password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (password.length < self.pass.min || password.length > self.pass.max)
return res.json({ success: false, description: `Password must have ${self.pass.min}-${self.pass.max} characters.` })
if (password.length < self.pass.min || password.length > self.pass.max) {
return res.json({
success: false,
description: `Password must have ${self.pass.min}-${self.pass.max} characters.`
})
}
try {
const user = await db.table('users')
.where('username', username)
.first()
if (user)
return res.json({ success: false, description: 'Username already exists.' })
if (user) return res.json({ success: false, description: 'Username already exists.' })
const hash = await bcrypt.hash(password, saltRounds)
const token = await tokens.generateUniqueToken()
if (!token)
return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
if (!token) {
return res.json({
success: false,
description: 'Sorry, we could not allocate a unique token. Try again?'
})
}
await db.table('users')
.insert({
@ -121,8 +132,12 @@ self.changePassword = async (req, res, next) => {
const password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (password.length < self.pass.min || password.length > self.pass.max)
return res.json({ success: false, description: `Password must have ${self.pass.min}-${self.pass.max} characters.` })
if (password.length < self.pass.min || password.length > self.pass.max) {
return res.json({
success: false,
description: `Password must have ${self.pass.min}-${self.pass.max} characters.`
})
}
try {
const hash = await bcrypt.hash(password, saltRounds)
@ -139,12 +154,13 @@ self.changePassword = async (req, res, next) => {
}
self.assertPermission = (user, target) => {
if (!target)
if (!target) {
throw new Error('Could not get user with the specified ID.')
else if (!perms.higher(user, target))
} else if (!perms.higher(user, target)) {
throw new Error('The user is in the same or higher group as you.')
else if (target.username === 'root')
} else if (target.username === 'root') {
throw new Error('Root user may not be tampered with.')
}
}
self.createUser = async (req, res, next) => {
@ -152,21 +168,28 @@ self.createUser = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
if (!isadmin)
return res.status(403).end()
if (!isadmin) return res.status(403).end()
const username = typeof req.body.username === 'string'
? req.body.username.trim()
: ''
if (username.length < self.user.min || username.length > self.user.max)
return res.json({ success: false, description: `Username must have ${self.user.min}-${self.user.max} characters.` })
if (username.length < self.user.min || username.length > self.user.max) {
return res.json({
success: false,
description: `Username must have ${self.user.min}-${self.user.max} characters.`
})
}
let password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (password.length) {
if (password.length < self.pass.min || password.length > self.pass.max)
return res.json({ success: false, description: `Password must have ${self.pass.min}-${self.pass.max} characters.` })
if (password.length < self.pass.min || password.length > self.pass.max) {
return res.json({
success: false,
description: `Password must have ${self.pass.min}-${self.pass.max} characters.`
})
}
} else {
password = randomstring.generate(self.pass.rand)
}
@ -186,14 +209,17 @@ self.createUser = async (req, res, next) => {
.where('username', username)
.first()
if (user)
return res.json({ success: false, description: 'Username already exists.' })
if (user) return res.json({ success: false, description: 'Username already exists.' })
const hash = await bcrypt.hash(password, saltRounds)
const token = await tokens.generateUniqueToken()
if (!token)
return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
if (!token) {
return res.json({
success: false,
description: 'Sorry, we could not allocate a unique token. Try again?'
})
}
await db.table('users')
.insert({
@ -219,12 +245,10 @@ self.editUser = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
if (!isadmin)
return res.status(403).end()
if (!isadmin) return res.status(403).end()
const id = parseInt(req.body.id)
if (isNaN(id))
return res.json({ success: false, description: 'No user specified.' })
if (isNaN(id)) return res.json({ success: false, description: 'No user specified.' })
try {
const target = await db.table('users')
@ -236,17 +260,20 @@ self.editUser = async (req, res, next) => {
if (req.body.username !== undefined) {
update.username = String(req.body.username).trim()
if (update.username.length < self.user.min || update.username.length > self.user.max)
if (update.username.length < self.user.min || update.username.length > self.user.max) {
throw new Error(`Username must have ${self.user.min}-${self.user.max} characters.`)
}
}
if (req.body.enabled !== undefined)
if (req.body.enabled !== undefined) {
update.enabled = Boolean(req.body.enabled)
}
if (req.body.group !== undefined) {
update.permission = perms.permissions[req.body.group]
if (typeof update.permission !== 'number' || update.permission < 0)
if (typeof update.permission !== 'number' || update.permission < 0) {
update.permission = target.permission
}
}
let password
@ -282,13 +309,11 @@ self.deleteUser = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
if (!isadmin)
return res.status(403).end()
if (!isadmin) return res.status(403).end()
const id = parseInt(req.body.id)
const purge = req.body.purge
if (isNaN(id))
return res.json({ success: false, description: 'No user specified.' })
if (isNaN(id)) return res.json({ success: false, description: 'No user specified.' })
try {
const target = await db.table('users')
@ -304,8 +329,7 @@ self.deleteUser = async (req, res, next) => {
const fileids = files.map(file => file.id)
if (purge) {
const failed = await utils.bulkDeleteFromDb('id', fileids, user)
if (failed.length)
return res.json({ success: false, failed })
if (failed.length) return res.json({ success: false, failed })
utils.invalidateStatsCache('uploads')
} else {
// Clear out userid attribute from the files
@ -315,7 +339,8 @@ self.deleteUser = async (req, res, next) => {
}
}
// TODO: Figure out obstacles of just deleting the albums
// TODO: Figure out why can't we just just delete the albums from DB
// DISCLAIMER: Upstream always had it coded this way for some reason
const albums = await db.table('albums')
.where('userid', id)
.where('enabled', 1)
@ -333,8 +358,7 @@ self.deleteUser = async (req, res, next) => {
try {
await paths.unlink(path.join(paths.zips, `${album.identifier}.zip`))
} catch (error) {
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
}))
}
@ -362,15 +386,13 @@ self.listUsers = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
if (!isadmin)
return res.status(403).end()
if (!isadmin) return res.status(403).end()
try {
const count = await db.table('users')
.count('id as count')
.then(rows => rows[0].count)
if (!count)
return res.json({ success: true, users: [], count })
if (!count) return res.json({ success: true, users: [], count })
let offset = Number(req.params.page)
if (isNaN(offset)) offset = 0

3
controllers/multerStorageController.js

@ -36,8 +36,9 @@ DiskStorage.prototype._handleFile = function _handleFile (req, file, cb) {
file._chunksData.stream = fs.createWriteStream(finalPath, { flags: 'a' })
file._chunksData.stream.on('error', onerror)
}
if (!file._chunksData.hasher)
if (!file._chunksData.hasher) {
file._chunksData.hasher = blake3.createHash()
}
outStream = file._chunksData.stream
hash = file._chunksData.hasher

12
controllers/pathsController.js

@ -20,8 +20,9 @@ const fsFuncs = [
'writeFile'
]
for (const fsFunc of fsFuncs)
for (const fsFunc of fsFuncs) {
self[fsFunc] = promisify(fs[fsFunc])
}
self.uploads = path.resolve(config.uploads.folder)
self.chunks = path.join(self.uploads, 'chunks')
@ -51,7 +52,7 @@ const verify = [
self.init = async () => {
// Check & create directories
for (const p of verify)
for (const p of verify) {
try {
await self.access(p)
} catch (err) {
@ -59,10 +60,10 @@ self.init = async () => {
throw err
} else {
const mkdir = await self.mkdir(p)
if (mkdir)
logger.log(`Created directory: ${p}`)
if (mkdir) logger.log(`Created directory: ${p}`)
}
}
}
// Purge any leftover in chunks directory
const uuidDirs = await self.readdir(self.chunks)
@ -74,8 +75,7 @@ self.init = async () => {
))
await self.rmdir(root)
}))
if (uuidDirs.length)
logger.log(`Purged ${uuidDirs.length} unfinished chunks`)
if (uuidDirs.length) logger.log(`Purged ${uuidDirs.length} unfinished chunks`)
}
module.exports = self

3
controllers/permissionController.js

@ -11,8 +11,7 @@ self.permissions = {
// returns true if user is in the group OR higher
self.is = (user, group) => {
// root bypass
if (user.username === 'root')
return true
if (user.username === 'root') return true
const permission = user.permission || 0
return permission >= self.permissions[group]

17
controllers/tokenController.js

@ -14,8 +14,7 @@ const self = {
self.generateUniqueToken = async () => {
for (let i = 0; i < self.tokenMaxTries; i++) {
const token = randomstring.generate(self.tokenLength)
if (self.onHold.has(token))
continue
if (self.onHold.has(token)) continue
// Put token on-hold (wait for it to be inserted to DB)
self.onHold.add(token)
@ -40,8 +39,7 @@ self.verify = async (req, res, next) => {
? req.body.token.trim()
: ''
if (!token)
return res.json({ success: false, description: 'No token provided.' })
if (!token) return res.json({ success: false, description: 'No token provided.' })
try {
const user = await db.table('users')
@ -49,8 +47,7 @@ self.verify = async (req, res, next) => {
.select('username', 'permission')
.first()
if (!user)
return res.json({ success: false, description: 'Invalid token.' })
if (!user) return res.json({ success: false, description: 'Invalid token.' })
const obj = {
success: true,
@ -76,8 +73,12 @@ self.change = async (req, res, next) => {
if (!user) return
const newToken = await self.generateUniqueToken()
if (!newToken)
return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
if (!newToken) {
return res.json({
success: false,
description: 'Sorry, we could not allocate a unique token. Try again?'
})
}
try {
await db.table('users')

429
controllers/uploadController.js
File diff suppressed because it is too large
View File

113
controllers/utilsController.js

@ -18,7 +18,9 @@ const self = {
chunkSize: config.uploads.scan.chunkSize || 64 * 1024,
groupBypass: config.uploads.scan.groupBypass || null,
whitelistExtensions: (Array.isArray(config.uploads.scan.whitelistExtensions) &&
config.uploads.scan.whitelistExtensions.length) ? config.uploads.scan.whitelistExtensions : null,
config.uploads.scan.whitelistExtensions.length)
? config.uploads.scan.whitelistExtensions
: null,
maxSize: (parseInt(config.uploads.scan.maxSize) * 1e6) || null
},
gitHash: null,
@ -92,14 +94,16 @@ self.extname = filename => {
}
// check against extensions that must be preserved
for (const extPreserve of extPreserves)
for (const extPreserve of extPreserves) {
if (lower.endsWith(extPreserve)) {
extname = extPreserve
break
}
}
if (!extname)
if (!extname) {
extname = lower.slice(lower.lastIndexOf('.') - lower.length) // path.extname(lower)
}
return extname + multi
}
@ -110,14 +114,12 @@ self.escape = string => {
// Copyright(c) 2015 Andreas Lubbe
// Copyright(c) 2015 Tiancheng "Timothy" Gu
if (!string)
return string
if (!string) return string
const str = String(string)
const match = /["'&<>]/.exec(str)
if (!match)
return str
if (!match) return str
let escape
let html = ''
@ -145,8 +147,9 @@ self.escape = string => {
continue
}
if (lastIndex !== index)
if (lastIndex !== index) {
html += str.substring(lastIndex, index)
}
lastIndex = index + 1
html += escape
@ -203,16 +206,16 @@ self.generateThumbs = async (name, extname, force) => {
// Check if thumbnail already exists
try {
const lstat = await paths.lstat(thumbname)
if (lstat.isSymbolicLink())
if (lstat.isSymbolicLink()) {
// Unlink if symlink (should be symlink to the placeholder)
await paths.unlink(thumbname)
else if (!force)
} else if (!force) {
// Continue only if it does not exist, unless forced to
return true
}
} catch (error) {
// Re-throw error
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
// Full path to input file
@ -257,12 +260,14 @@ self.generateThumbs = async (name, extname, force) => {
const metadata = await self.ffprobe(input)
const duration = parseInt(metadata.format.duration)
if (isNaN(duration))
if (isNaN(duration)) {
throw 'Warning: File does not have valid duration metadata'
}
const videoStream = metadata.streams && metadata.streams.find(s => s.codec_type === 'video')
if (!videoStream || !videoStream.width || !videoStream.height)
if (!videoStream || !videoStream.width || !videoStream.height) {
throw 'Warning: File does not have valid video stream metadata'
}
await new Promise((resolve, reject) => {
ffmpeg(input)
@ -287,10 +292,11 @@ self.generateThumbs = async (name, extname, force) => {
await paths.lstat(thumbname)
return true
} catch (err) {
if (err.code === 'ENOENT')
if (err.code === 'ENOENT') {
throw error || 'Warning: FFMPEG exited with empty output file'
else
} else {
throw error || err
}
}
})
} else {
@ -361,8 +367,7 @@ self.unlinkFile = async (filename, predb) => {
await paths.unlink(path.join(paths.uploads, filename))
} catch (error) {
// Return true if file does not exist
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
const identifier = filename.split('.')[0]
@ -375,26 +380,26 @@ self.unlinkFile = async (filename, predb) => {
}
const extname = self.extname(filename)
if (self.imageExts.includes(extname) || self.videoExts.includes(extname))
if (self.imageExts.includes(extname) || self.videoExts.includes(extname)) {
try {
await paths.unlink(path.join(paths.thumbs, `${identifier}.png`))
} catch (error) {
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
}
}
self.bulkDeleteFromDb = async (field, values, user) => {
// Always return an empty array on failure
if (!user || !['id', 'name'].includes(field) || !values.length)
return []
if (!user || !['id', 'name'].includes(field) || !values.length) return []
// SQLITE_LIMIT_VARIABLE_NUMBER, which defaults to 999
// Read more: https://www.sqlite.org/limits.html
const MAX_VARIABLES_CHUNK_SIZE = 999
const chunks = []
while (values.length)
while (values.length) {
chunks.push(values.splice(0, MAX_VARIABLES_CHUNK_SIZE))
}
const failed = []
const ismoderator = perms.is(user, 'moderator')
@ -407,8 +412,9 @@ self.bulkDeleteFromDb = async (field, values, user) => {
const files = await db.table('files')
.whereIn(field, chunk)
.where(function () {
if (!ismoderator)
if (!ismoderator) {
this.where('userid', user.id)
}
})
// Push files that could not be found in db
@ -435,17 +441,19 @@ self.bulkDeleteFromDb = async (field, values, user) => {
.del()
self.invalidateStatsCache('uploads')
if (self.idSet)
if (self.idSet) {
unlinked.forEach(file => {
const identifier = file.name.split('.')[0]
self.idSet.delete(identifier)
// logger.log(`Removed ${identifier} from identifiers cache (bulkDeleteFromDb)`)
})
}
// Push album ids
unlinked.forEach(file => {
if (file.albumid && !albumids.includes(file.albumid))
if (file.albumid && !albumids.includes(file.albumid)) {
albumids.push(file.albumid)
}
})
// Push unlinked files
@ -463,13 +471,16 @@ self.bulkDeleteFromDb = async (field, values, user) => {
}
// Purge Cloudflare's cache if necessary, but do not wait
if (config.cloudflare.purgeCache)
if (config.cloudflare.purgeCache) {
self.purgeCloudflareCache(unlinkeds.map(file => file.name), true, true)
.then(results => {
for (const result of results)
if (result.errors.length)
for (const result of results) {
if (result.errors.length) {
result.errors.forEach(error => logger.error(`[CF]: ${error}`))
}
}
})
}
}
} catch (error) {
logger.error(error)
@ -480,12 +491,15 @@ self.bulkDeleteFromDb = async (field, values, user) => {
self.purgeCloudflareCache = async (names, uploads, thumbs) => {
const errors = []
if (!cloudflareAuth)
if (!cloudflareAuth) {
errors.push('Cloudflare auth is incomplete or missing')
if (!Array.isArray(names) || !names.length)
}
if (!Array.isArray(names) || !names.length) {
errors.push('Names array is invalid or empty')
if (errors.length)
}
if (errors.length) {
return [{ success: false, files: [], errors }]
}
let domain = config.domain
if (!uploads) domain = config.homeDomain
@ -495,8 +509,9 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
if (uploads) {
const url = `${domain}/${name}`
const extname = self.extname(name)
if (thumbs && self.mayGenerateThumb(extname))
if (thumbs && self.mayGenerateThumb(extname)) {
thumbNames.push(`${domain}/thumbs/${name.slice(0, -extname.length)}.png`)
}
return url
} else {
return name === 'home' ? domain : `${domain}/${name}`
@ -509,8 +524,9 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
// TODO: Handle API rate limits
const MAX_LENGTH = 30
const chunks = []
while (names.length)
while (names.length) {
chunks.push(names.splice(0, MAX_LENGTH))
}
const url = `https://api.cloudflare.com/client/v4/zones/${config.cloudflare.zoneId}/purge_cache`
const results = []
@ -543,8 +559,9 @@ self.purgeCloudflareCache = async (names, uploads, thumbs) => {
const response = await purge.json()
result.success = response.success
if (Array.isArray(response.errors) && response.errors.length)
if (Array.isArray(response.errors) && response.errors.length) {
result.errors = response.errors.map(error => `${error.code}: ${error.message}`)
}
} catch (error) {
result.errors = [error.toString()]
}
@ -642,7 +659,7 @@ self.stats = async (req, res, next) => {
}
// Disk usage, only for Linux platform
if (os.platform === 'linux')
if (os.platform === 'linux') {
if (!statsCache.disk.cache && statsCache.disk.generating) {
stats.disk = false
} else if (((Date.now() - statsCache.disk.generatedAt) <= 60000) || statsCache.disk.generating) {
@ -727,8 +744,9 @@ self.stats = async (req, res, next) => {
stats.disk[basename] = parseInt(formatted[0])
// Add to types if necessary
if (!stats.disk._types.byte.includes(basename))
if (!stats.disk._types.byte.includes(basename)) {
stats.disk._types.byte.push(basename)
}
})
const stderr = []
@ -786,6 +804,7 @@ self.stats = async (req, res, next) => {
statsCache.disk.cache = stats.disk
statsCache.disk.generating = false
}
}
// Uploads
if (!statsCache.uploads.cache && statsCache.uploads.generating) {
@ -812,8 +831,9 @@ self.stats = async (req, res, next) => {
stats.uploads.total = uploads.length
stats.uploads.sizeInDb = uploads.reduce((acc, upload) => acc + parseInt(upload.size), 0)
// Add type information for the new column
if (!Array.isArray(stats.uploads._types.byte))
if (!Array.isArray(stats.uploads._types.byte)) {
stats.uploads._types.byte = []
}
stats.uploads._types.byte.push('sizeInDb')
} else {
stats.uploads.total = await db.table('files')
@ -823,16 +843,18 @@ self.stats = async (req, res, next) => {
stats.uploads.images = await db.table('files')
.where(function () {
for (const ext of self.imageExts)
for (const ext of self.imageExts) {
this.orWhere('name', 'like', `%${ext}`)
}
})
.count('id as count')
.then(rows => rows[0].count)
stats.uploads.videos = await db.table('files')
.where(function () {
for (const ext of self.videoExts)
for (const ext of self.videoExts) {
this.orWhere('name', 'like', `%${ext}`)
}
})
.count('id as count')
.then(rows => rows[0].count)
@ -870,16 +892,18 @@ self.stats = async (req, res, next) => {
const users = await db.table('users')
stats.users.total = users.length
for (const user of users) {
if (user.enabled === false || user.enabled === 0)
if (user.enabled === false || user.enabled === 0) {
stats.users.disabled++
}
// This may be inaccurate on installations with customized permissions
user.permission = user.permission || 0
for (const p of permissionKeys)
for (const p of permissionKeys) {
if (user.permission === perms.permissions[p]) {
stats.users[p]++
break
}
}
}
// Update cache
@ -926,8 +950,7 @@ self.stats = async (req, res, next) => {
stats.albums.zipGenerated++
} catch (error) {
// Re-throw error
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
}))

3
gulpfile.js

@ -29,8 +29,9 @@ const postcssPlugins = [
sass.compiler = sassCompiler
// Minify on production
if (process.env.NODE_ENV !== 'development')
if (process.env.NODE_ENV !== 'development') {
postcssPlugins.push(cssnano())
}
/** TASKS: LINT */

3
logger.js

@ -54,8 +54,9 @@ self.debug = (...args) => {
Object.assign(options, args[args.length - 1])
args.splice(args.length - 1, 1)
}
for (const arg of args)
for (const arg of args) {
console.log(inspect(arg, options))
}
}
module.exports = self

63
lolisafe.js

@ -36,11 +36,13 @@ safe.use(helmet({
hsts: false
}))
if (config.hsts instanceof Object && Object.keys(config.hsts).length)
if (config.hsts instanceof Object && Object.keys(config.hsts).length) {
safe.use(helmet.hsts(config.hsts))
}
if (config.trustProxy)
if (config.trustProxy) {
safe.set('trust proxy', 1)
}
// https://mozilla.github.io/nunjucks/api.html#configure
nunjucks.configure('views', {
@ -52,12 +54,14 @@ safe.set('view engine', 'njk')
safe.enable('view cache')
// Configure rate limits
if (Array.isArray(config.rateLimits) && config.rateLimits.length)
if (Array.isArray(config.rateLimits) && config.rateLimits.length) {
for (const rateLimit of config.rateLimits) {
const limiter = new RateLimit(rateLimit.config)
for (const route of rateLimit.routes)
for (const route of rateLimit.routes) {
safe.use(route, limiter)
}
}
}
safe.use(bodyParser.urlencoded({ extended: true }))
safe.use(bodyParser.json())
@ -117,24 +121,27 @@ if (config.cacheControl) {
// If using CDN, cache public pages in CDN
if (config.cacheControl !== 2) {
cdnPages.push('api/check')
for (const page of cdnPages)
for (const page of cdnPages) {
safe.use(`/${page === 'home' ? '' : page}`, (req, res, next) => {
res.set('Cache-Control', cacheControls.cdn)
next()
})
}
}
// If serving uploads with node
if (config.serveFilesWithNode)
if (config.serveFilesWithNode) {
initServeStaticUploads({
setHeaders: res => {
res.set('Access-Control-Allow-Origin', '*')
// If using CDN, cache uploads in CDN as well
// Use with cloudflare.purgeCache enabled in config file
if (config.cacheControl !== 2)
if (config.cacheControl !== 2) {
res.set('Cache-Control', cacheControls.cdn)
}
}
})
}
// Function for static assets.
// This requires the assets to use version in their query string,
@ -148,10 +155,11 @@ if (config.cacheControl) {
safe.use(['/api/album/zip'], (req, res, next) => {
res.set('Access-Control-Allow-Origin', '*')
const versionString = parseInt(req.query.v)
if (versionString > 0)
if (versionString > 0) {
res.set('Cache-Control', cacheControls.static)
else
} else {
res.set('Cache-Control', cacheControls.disable)
}
next()
})
} else if (config.serveFilesWithNode) {
@ -182,32 +190,36 @@ safe.use('/api', api)
// Re-map version strings if cache control is enabled (safe.fiery.me)
utils.versionStrings = {}
if (config.cacheControl) {
for (const type in versions)
for (const type in versions) {
utils.versionStrings[type] = `?_=${versions[type]}`
if (versions['1'])
}
if (versions['1']) {
utils.clientVersion = versions['1']
}
}
// Cookie Policy
if (config.cookiePolicy)
if (config.cookiePolicy) {
config.pages.push('cookiepolicy')
}
// Check for custom pages, otherwise fallback to Nunjucks templates
for (const page of config.pages) {
const customPage = path.join(paths.customPages, `${page}.html`)
if (!await paths.access(customPage).catch(() => true))
if (!await paths.access(customPage).catch(() => true)) {
safe.get(`/${page === 'home' ? '' : page}`, (req, res, next) => res.sendFile(customPage))
else if (page === 'home')
} else if (page === 'home') {
safe.get('/', (req, res, next) => res.render(page, {
config,
versions: utils.versionStrings,
gitHash: utils.gitHash
}))
else
} else {
safe.get(`/${page}`, (req, res, next) => res.render(page, {
config,
versions: utils.versionStrings
}))