Browse Source

Merge remote-tracking branch 'upstream/safe.fiery.me'

master
Kody 8 months ago
parent
commit
d9df2226b7
  1. 43
      .eslintrc.js
  2. 2
      .github/workflows/build.yml
  3. 19
      README.md
  4. 53
      config.sample.js
  5. 195
      controllers/albumsController.js
  6. 128
      controllers/authController.js
  7. 3
      controllers/multerStorageController.js
  8. 17
      controllers/pathsController.js
  9. 3
      controllers/permissionController.js
  10. 17
      controllers/tokenController.js
  11. 468
      controllers/uploadController.js
  12. 748
      controllers/utilsController.js
  13. 9
      database/db.js
  14. 3
      database/migration.js
  15. 2
      dist/css/album.css.map
  16. 2
      dist/css/dashboard.css
  17. 2
      dist/css/dashboard.css.map
  18. 2
      dist/css/home.css.map
  19. 2
      dist/css/player.css
  20. 1
      dist/css/player.css.map
  21. 4
      dist/css/style.css
  22. 2
      dist/css/style.css.map
  23. 2
      dist/css/sweetalert.css.map
  24. 2
      dist/css/thumbs.css.map
  25. 2
      dist/js/album.js.map
  26. 2
      dist/js/auth.js
  27. 2
      dist/js/auth.js.map
  28. 2
      dist/js/dashboard.js
  29. 2
      dist/js/dashboard.js.map
  30. 2
      dist/js/home.js
  31. 2
      dist/js/home.js.map
  32. 2
      dist/js/misc/newsfeed.js.map
  33. 2
      dist/js/misc/render.js.map
  34. 2
      dist/js/misc/utils.js.map
  35. 2
      dist/js/player.js
  36. 1
      dist/js/player.js.map
  37. 2
      dist/libs/fontello/fontello.css
  38. 2
      dist/libs/fontello/fontello.css.map
  39. 25
      gulpfile.js
  40. 3
      logger.js
  41. 111
      lolisafe.js
  42. 36
      package.json
  43. 21
      public/libs/bulma-collapsible/LICENSE
  44. 1
      public/libs/bulma-collapsible/bulma-collapsible.min.css
  45. 1
      public/libs/bulma-collapsible/bulma-collapsible.min.js
  46. 6
      public/libs/fontello/config.json
  47. BIN
      public/libs/fontello/fontello.eot
  48. 2
      public/libs/fontello/fontello.svg
  49. BIN
      public/libs/fontello/fontello.ttf
  50. BIN
      public/libs/fontello/fontello.woff
  51. BIN
      public/libs/fontello/fontello.woff2
  52. 13
      public/libs/video-js/LICENSE
  53. 114
      public/libs/video-js/font/VideoJS.svg
  54. BIN
      public/libs/video-js/font/VideoJS.ttf
  55. BIN
      public/libs/video-js/font/VideoJS.woff
  56. 89
      public/libs/video-js/lang/en.js
  57. 89
      public/libs/video-js/lang/en.json
  58. 1
      public/libs/video-js/video-js.min.css
  59. 25
      public/libs/video-js/video.min.js
  60. 13
      public/libs/videojs-seek-buttons/LICENSE
  61. 2
      public/libs/videojs-seek-buttons/videojs-seek-buttons.min.css
  62. 2
      public/libs/videojs-seek-buttons/videojs-seek-buttons.min.js
  63. 20
      public/libs/videojs-wavesurfer/LICENSE
  64. 3
      public/libs/videojs-wavesurfer/videojs.wavesurfer.min.css
  65. 8
      public/libs/videojs-wavesurfer/videojs.wavesurfer.min.js
  66. 29
      public/libs/wavesurfer-js/LICENSE
  67. 7
      public/libs/wavesurfer-js/wavesurfer.min.js
  68. 14
      routes/album.js
  69. 4
      routes/api.js
  70. 16
      routes/player.js
  71. 19
      scripts/README.md
  72. 19
      scripts/bump-versions.js
  73. 13
      scripts/cf-purge.js
  74. 43
      scripts/clean-up.js
  75. 24
      scripts/delete-expired.js
  76. 84
      scripts/rebuild-hashes.js
  77. 135
      scripts/thumbs.js
  78. 3
      src/README.md
  79. 11
      src/css/dashboard.scss
  80. 14
      src/css/player.scss
  81. 12
      src/css/style.scss
  82. 42
      src/js/.eslintrc.js
  83. 3
      src/js/album.js
  84. 32
      src/js/auth.js
  85. 683
      src/js/dashboard.js
  86. 229
      src/js/home.js
  87. 21
      src/js/misc/newsfeed.js
  88. 37
      src/js/misc/render.js
  89. 25
      src/js/misc/utils.js
  90. 186
      src/js/player.js
  91. 1
      src/libs/fontello/fontello.css
  92. 4
      src/versions.json
  93. 31
      views/album.njk
  94. 2
      views/auth.njk
  95. 9
      views/dashboard.njk
  96. 2
      views/home.njk
  97. 99
      views/player.njk
  98. 2142
      yarn.lock

43
.eslintrc.js

@ -1,37 +1,20 @@
module.exports = {
"root": true,
"parserOptions": {
"ecmaVersion": 9 // 2018
root: true,
parserOptions: {
ecmaVersion: 9 // 2018
},
"env": {
"node": true
env: {
node: true
},
"extends": [
"standard"
extends: [
'standard'
],
"rules": {
"curly": [
"error",
"multi",
"consistent"
rules: {
'no-throw-literal': 0,
'object-shorthand': [
'error',
'always'
],
"no-throw-literal": 0,
"no-var": "error",
"prefer-const": [
"error",
{
"destructuring": "any",
"ignoreReadBeforeAssign": false
}
],
"object-shorthand": [
"error",
"always"
],
"quotes": [
"error",
"single"
],
"standard/no-callback-literal": 0
'node/no-callback-literal': 0
}
}

2
.github/workflows/build.yml

@ -18,7 +18,7 @@ jobs:
- uses: actions/checkout@v2
- name: Use Node.js
uses: actions/setup-node@v1
uses: actions/setup-node@v2
with:
node-version: '12.x'

19
README.md

@ -94,16 +94,15 @@ To generate thumbnails for those files, you can use `yarn thumbs`.
```none
$ yarn thumbs
$ node ./scripts/thumbs.js
Generate thumbnails.
Usage :
Usage:
node scripts/thumbs.js <mode=1|2|3> [force=0|1] [verbose=0|1] [cfcache=0|1]
mode : 1 = images only, 2 = videos only, 3 = both images and videos
force : 0 = no force (default), 1 = overwrite existing thumbnails
verbose: 0 = only print missing thumbs (default), 1 = print all
cfcache: 0 = do not clear cloudflare cache (default), 1 = clear cloudflare cache
mode : 1 = images only, 2 = videos only, 3 = both images and videos
force : 0 = no force (default), 1 = overwrite existing thumbnails
verbose : 0 = only print missing thumbs (default), 1 = print all, 2 = print nothing
cfcache : 0 = do not clear cloudflare cache (default), 1 = clear cloudflare cache
```
For example, if you only want to generate thumbnails for image files without overwriting existing ones, you can run `yarn thumbs 1`, or if you want to generate thumbnails for both image and video files, while also overwriting existsing ones, you can run `yarn thumbs 3 1`.
@ -112,12 +111,12 @@ You will also need to use this script to overwrite existing thumbnails if you wa
## ClamAV support
This fork has an optional virus scanning support using [ClamAV](https://www.clamav.net/), through [clamdjs](https://github.com/NingLin-P/clamdjs) library.
This fork has an optional virus scanning support using [ClamAV](https://www.clamav.net/), utilizing [clamscan](https://github.com/kylefarris/clamscan) library (Linux and OS X only).
It will scan new files right after they are uploaded. It will then alert the uploaders of the virus names in ClamAV's database if their files are dirty.
Unfortunately, this will slow down uploads processing as it has to wait for scan results before responding the uploaders, however it's still highly recommended for public usage (or at least if you find Google Safe Search too annoying).
Unfortunately, this will slow down uploads processing as it has to wait for the scans before responding the uploaders. However, it's still highly recommended for public usage, or if you're like me who find the constant buzzing from Google Safe Search too annoying.
To enable this, make sure you have ClamAV daemon running, then fill in the daemon's IP and port into your config file.
To enable this, make sure you have [ClamAV installed](https://github.com/kylefarris/clamscan#to-use-local-binary-method-of-scanning), or additionally have [ClamAV daemon running](https://github.com/kylefarris/clamscan#to-use-clamav-using-tcp-sockets). Afterwards configure `uploads.scan` options, and more importantly its sub-option `clamOptions`. Read more about it in the `config.sample.js` file.
From the config file you can also choose to exclude certain extensions from being scanned to lessen the burden on your server.
Additionally, you can also configure usergroups bypass, extensions whitelist, and max file size, to lessen the burden on your server.

53
config.sample.js

@ -38,6 +38,14 @@ module.exports = {
*/
setContentDisposition: false,
/*
If you serve files with node, you can optionally choose to
override Content-Type header for certain extension names.
*/
overrideContentTypes: {
// 'text/plain': ['html', 'htm', 'shtml', 'xhtml']
},
/*
If you are serving your files with a different domain than your lolisafe homepage,
then fill this option with your lolisafe homepage, otherwise any falsy value.
@ -247,6 +255,12 @@ module.exports = {
timeout: 30 * 60 * 1000 // 30 minutes
},
/*
Folder where in-progress chunks should be kept temporarily.
NOTE: When set to falsy value, defaults to "chunks" subfolder within uploads folder.
*/
chunksFolder: null,
/*
Max file size allowed for upload by URLs. Needs to be in MB.
NOTE: Set to falsy value to disable upload by URLs.
@ -340,8 +354,7 @@ module.exports = {
temporaryUploadsInterval: 1 * 60000, // 1 minute
/*
Scan files using ClamAV through clamd.
https://github.com/NingLin-P/clamdjs#scannerscanfilepath-timeout-chunksize
Scan uploads for threats with ClamAV.
groupBypass: Name of the lowest ranked group whose files will not be scanned.
Lowest ranked meaning that group AND any groups higher than it are included.
@ -349,12 +362,6 @@ module.exports = {
*/
scan: {
enabled: false,
ip: '127.0.0.1',
port: 3310,
timeout: 180 * 1000,
chunkSize: 64 * 1024,
groupBypass: 'admin', // Other group names in controllers/permissionController.js
whitelistExtensions: null, /* [
'.webp',
@ -372,7 +379,27 @@ module.exports = {
'.mov',
'.mkv'
], */
maxSize: null // '25MB' // Needs to be in MB
// Make sure maxSize is no bigger than the max size you configured for your ClamAV
maxSize: null, // Needs to be in MB
// https://github.com/kylefarris/clamscan/tree/v1.3.3#getting-started
// Breaking options (do not use): remove_infected, quarantine_infected
// Untested options (may work): scan_log, debug_mode, file_list, scan_recursively
// Supported options: clamscan, clamdscan, preference
clamOptions: {
// clamscan: {},
clamdscan: {
// When both socket and host+port are specified, it will only use socket
socket: '/var/run/clamav/clamd.ctl',
host: '127.0.0.1',
port: 3310,
timeout: 1 * 60 * 1000, // 1 minute
multiscan: true,
reload_db: false,
active: true
},
preference: 'clamdscan'
}
},
/*
@ -554,14 +581,6 @@ module.exports = {
*/
cacheControl: false,
/*
Enable Linux-only extended disk stats in Dashboard's Statistics.
This will use "du" binary to query disk usage of each directories within uploads directory.
Disabled by default as I personally found it to be very slow with +100k uploads
with my ancient potato server.
*/
linuxDiskStats: false,
/*
Folder where to store logs.
NOTE: This is currently unused.

195
controllers/albumsController.js

@ -5,6 +5,7 @@ const randomstring = require('randomstring')
const Zip = require('jszip')
const paths = require('./pathsController')
const perms = require('./permissionController')
const uploadController = require('./uploadController')
const utils = require('./utilsController')
const config = require('./../config')
const logger = require('./../logger')
@ -29,14 +30,10 @@ const zipOptions = config.uploads.jsZipOptions
zipOptions.type = 'nodebuffer'
// Apply fallbacks for missing config values
if (zipOptions.streamFiles === undefined)
zipOptions.streamFiles = true
if (zipOptions.compression === undefined)
zipOptions.compression = 'DEFLATE'
if (zipOptions.compressionOptions === undefined)
zipOptions.compressionOptions = {}
if (zipOptions.compressionOptions.level === undefined)
zipOptions.compressionOptions.level = 1
if (zipOptions.streamFiles === undefined) zipOptions.streamFiles = true
if (zipOptions.compression === undefined) zipOptions.compression = 'DEFLATE'
if (zipOptions.compressionOptions === undefined) zipOptions.compressionOptions = {}
if (zipOptions.compressionOptions.level === undefined) zipOptions.compressionOptions.level = 1
self.zipEmitters = new Map()
@ -51,8 +48,7 @@ class ZipEmitter extends EventEmitter {
self.getUniqueRandomName = async () => {
for (let i = 0; i < utils.idMaxTries; i++) {
const identifier = randomstring.generate(config.uploads.albumIdentifierLength)
if (self.onHold.has(identifier))
continue
if (self.onHold.has(identifier)) continue
// Put token on-hold (wait for it to be inserted to DB)
self.onHold.add(identifier)
@ -78,17 +74,17 @@ self.list = async (req, res, next) => {
if (!user) return
const all = req.headers.all === '1'
const sidebar = req.headers.sidebar
const simple = req.headers.simple
const ismoderator = perms.is(user, 'moderator')
if (all && !ismoderator)
return res.status(403).end()
if (all && !ismoderator) return res.status(403).end()
const filter = function () {
if (!all)
if (!all) {
this.where({
enabled: 1,
userid: user.id
})
}
}
try {
@ -97,16 +93,14 @@ self.list = async (req, res, next) => {
.where(filter)
.count('id as count')
.then(rows => rows[0].count)
if (!count)
return res.json({ success: true, albums: [], count })
if (!count) return res.json({ success: true, albums: [], count })
const fields = ['id', 'name']
let albums
if (sidebar) {
if (simple) {
albums = await db.table('albums')
.where(filter)
.limit(9)
.select(fields)
return res.json({ success: true, albums, count })
@ -116,8 +110,7 @@ self.list = async (req, res, next) => {
else if (offset < 0) offset = Math.max(0, Math.ceil(count / 25) + offset)
fields.push('identifier', 'enabled', 'timestamp', 'editedAt', 'download', 'public', 'description')
if (all)
fields.push('userid')
if (all) fields.push('userid')
albums = await db.table('albums')
.where(filter)
@ -140,13 +133,14 @@ self.list = async (req, res, next) => {
.whereIn('albumid', Object.keys(albumids))
.select('albumid')
for (const upload of uploads)
if (albumids[upload.albumid])
for (const upload of uploads) {
if (albumids[upload.albumid]) {
albumids[upload.albumid].uploads++
}
}
// If we are not listing all albums, send response
if (!all)
return res.json({ success: true, albums, count, homeDomain })
if (!all) return res.json({ success: true, albums, count, homeDomain })
// Otherwise proceed to querying usernames
const userids = albums
@ -156,8 +150,7 @@ self.list = async (req, res, next) => {
})
// If there are no albums attached to a registered user, send response
if (userids.length === 0)
return res.json({ success: true, albums, count, homeDomain })
if (!userids.length) return res.json({ success: true, albums, count, homeDomain })
// Query usernames of user IDs from currently selected files
const usersTable = await db.table('users')
@ -165,8 +158,9 @@ self.list = async (req, res, next) => {
.select('id', 'username')
const users = {}
for (const user of usersTable)
for (const user of usersTable) {
users[user.id] = user.username
}
return res.json({ success: true, albums, count, users, homeDomain })
} catch (error) {
@ -183,8 +177,7 @@ self.create = async (req, res, next) => {
? utils.escape(req.body.name.trim().substring(0, self.titleMaxLength))
: ''
if (!name)
return res.json({ success: false, description: 'No album name specified.' })
if (!name) return res.json({ success: false, description: 'No album name specified.' })
try {
const album = await db.table('albums')
@ -195,8 +188,7 @@ self.create = async (req, res, next) => {
})
.first()
if (album)
return res.json({ success: false, description: 'There is already an album with that name.' })
if (album) return res.json({ success: false, description: 'There is already an album with that name.' })
const identifier = await self.getUniqueRandomName()
@ -235,8 +227,7 @@ self.disable = async (req, res, next) => {
const id = req.body.id
const purge = req.body.purge
if (!Number.isFinite(id))
return res.json({ success: false, description: 'No album specified.' })
if (!Number.isFinite(id)) return res.json({ success: false, description: 'No album specified.' })
try {
if (purge) {
@ -249,8 +240,7 @@ self.disable = async (req, res, next) => {
if (files.length) {
const ids = files.map(file => file.id)
const failed = await utils.bulkDeleteFromDb('id', ids, user)
if (failed.length)
return res.json({ success: false, failed })
if (failed.length) return res.json({ success: false, failed })
}
utils.invalidateStatsCache('uploads')
}
@ -291,24 +281,23 @@ self.edit = async (req, res, next) => {
const ismoderator = perms.is(user, 'moderator')
const id = parseInt(req.body.id)
if (isNaN(id))
return res.json({ success: false, description: 'No album specified.' })
if (isNaN(id)) return res.json({ success: false, description: 'No album specified.' })
const name = typeof req.body.name === 'string'
? utils.escape(req.body.name.trim().substring(0, self.titleMaxLength))
: ''
if (!name)
return res.json({ success: false, description: 'No name specified.' })
if (!name) return res.json({ success: false, description: 'No name specified.' })
const filter = function () {
this.where('id', id)
if (!ismoderator)
if (!ismoderator) {
this.andWhere({
enabled: 1,
userid: user.id
})
}
}
try {
@ -316,13 +305,14 @@ self.edit = async (req, res, next) => {
.where(filter)
.first()
if (!album)
if (!album) {
return res.json({ success: false, description: 'Could not get album with the specified ID.' })
else if (album.id !== id)
} else if (album.id !== id) {
return res.json({ success: false, description: 'Name already in use.' })
else if (req._old && (album.id === id))
} else if (req._old && (album.id === id)) {
// Old rename API
return res.json({ success: false, description: 'You did not specify a new name.' })
}
const update = {
name,
@ -333,11 +323,13 @@ self.edit = async (req, res, next) => {
: ''
}
if (ismoderator)
if (ismoderator && typeof req.body.enabled !== 'undefined') {
update.enabled = Boolean(req.body.enabled)
}
if (req.body.requestLink)
if (req.body.requestLink) {
update.identifier = await self.getUniqueRandomName()
}
await db.table('albums')
.where(filter)
@ -353,10 +345,9 @@ self.edit = async (req, res, next) => {
const oldZip = path.join(paths.zips, `${album.identifier}.zip`)
const newZip = path.join(paths.zips, `${update.identifier}.zip`)
await paths.rename(oldZip, newZip)
} catch (err) {
} catch (error) {
// Re-throw error
if (err.code !== 'ENOENT')
throw err
if (error.code !== 'ENOENT') throw error
}
return res.json({
@ -380,8 +371,9 @@ self.rename = async (req, res, next) => {
self.get = async (req, res, next) => {
const identifier = req.params.identifier
if (identifier === undefined)
if (identifier === undefined) {
return res.status(401).json({ success: false, description: 'No identifier provided.' })
}
try {
const album = await db.table('albums')
@ -391,16 +383,9 @@ self.get = async (req, res, next) => {
})
.first()
if (!album)
return res.json({
success: false,
description: 'Album not found.'
})
else if (album.public === 0)
return res.status(403).json({
success: false,
description: 'This album is not available for public.'
})
if (!album || album.public === 0) {
return res.status(404).json({ success: false, description: 'The album could not be found.' })
}
const title = album.name
const files = await db.table('files')
@ -409,16 +394,24 @@ self.get = async (req, res, next) => {
.orderBy('id', 'desc')
for (const file of files) {
file.file = `${config.domain}/${file.name}`
if (req._upstreamCompat) {
file.url = `${config.domain}/${file.name}`
} else {
file.file = `${config.domain}/${file.name}`
}
const extname = utils.extname(file.name)
if (utils.mayGenerateThumb(extname))
if (utils.mayGenerateThumb(extname)) {
file.thumb = `${config.domain}/thumbs/${file.name.slice(0, -extname.length)}.png`
if (req._upstreamCompat) file.thumbSquare = file.thumb
}
}
return res.json({
success: true,
description: 'Successfully retrieved files.',
title,
download: Boolean(album.download),
count: files.length,
files
})
@ -432,17 +425,19 @@ self.generateZip = async (req, res, next) => {
const versionString = parseInt(req.query.v)
const identifier = req.params.identifier
if (identifier === undefined)
if (identifier === undefined) {
return res.status(401).json({
success: false,
description: 'No identifier provided.'
})
}
if (!config.uploads.generateZips)
if (!config.uploads.generateZips) {
return res.status(401).json({
success: false,
description: 'Zip generation disabled.'
description: 'ZIP generation disabled.'
})
}
try {
const album = await db.table('albums')
@ -452,32 +447,35 @@ self.generateZip = async (req, res, next) => {
})
.first()
if (!album)
if (!album) {
return res.json({ success: false, description: 'Album not found.' })
else if (album.download === 0)
} else if (album.download === 0) {
return res.json({ success: false, description: 'Download for this album is disabled.' })
}
if ((isNaN(versionString) || versionString <= 0) && album.editedAt)
if ((isNaN(versionString) || versionString <= 0) && album.editedAt) {
return res.redirect(`${album.identifier}?v=${album.editedAt}`)
}
if (album.zipGeneratedAt > album.editedAt)
if (album.zipGeneratedAt > album.editedAt) {
try {
const filePath = path.join(paths.zips, `${identifier}.zip`)
await paths.access(filePath)
return res.download(filePath, `${album.name}.zip`)
} catch (error) {
// Re-throw error
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
}
if (self.zipEmitters.has(identifier)) {
logger.log(`Waiting previous zip task for album: ${identifier}.`)
return self.zipEmitters.get(identifier).once('done', (filePath, fileName, json) => {
if (filePath && fileName)
if (filePath && fileName) {
res.download(filePath, fileName)
else if (json)
} else if (json) {
res.json(json)
}
})
}
@ -554,13 +552,50 @@ self.generateZip = async (req, res, next) => {
}
}
self.listFiles = async (req, res, next) => {
if (req.params.page === undefined) {
// Map to /api/album/get, but with lolisafe upstream compatibility, when accessed with this API route
req.params.identifier = req.params.id
delete req.params.id
req._upstreamCompat = true
res._json = res.json
res.json = (body = {}) => {
// Rebuild JSON payload to match lolisafe upstream
const rebuild = {}
const maps = {
success: null,
description: 'message',
title: 'name',
download: 'downloadEnabled',
count: null
}
Object.keys(body).forEach(key => {
if (maps[key] !== undefined) {
if (maps[key]) rebuild[maps[key]] = body[key]
} else {
rebuild[key] = body[key]
}
})
if (rebuild.message) rebuild.message = rebuild.message.replace(/\.$/, '')
return res._json(rebuild)
}
return self.get(req, res, next)
} else {
return uploadController.list(req, res, next)
}
}
self.addFiles = async (req, res, next) => {
const user = await utils.authorize(req, res)
if (!user) return
const ids = req.body.ids
if (!Array.isArray(ids) || !ids.length)
if (!Array.isArray(ids) || !ids.length) {
return res.json({ success: false, description: 'No files specified.' })
}
let albumid = parseInt(req.body.albumid)
if (isNaN(albumid) || albumid < 0) albumid = null
@ -572,16 +607,18 @@ self.addFiles = async (req, res, next) => {
const album = await db.table('albums')
.where('id', albumid)
.where(function () {
if (user.username !== 'root')
if (user.username !== 'root') {
this.where('userid', user.id)
}
})
.first()
if (!album)
if (!album) {
return res.json({
success: false,
description: 'Album does not exist or it does not belong to the user.'
})
}
albumids.push(albumid)
}
@ -597,8 +634,9 @@ self.addFiles = async (req, res, next) => {
.update('albumid', albumid)
files.forEach(file => {
if (file.albumid && !albumids.includes(file.albumid))
if (file.albumid && !albumids.includes(file.albumid)) {
albumids.push(file.albumid)
}
})
await db.table('albums')
@ -609,13 +647,14 @@ self.addFiles = async (req, res, next) => {
return res.json({ success: true, failed })
} catch (error) {
logger.error(error)
if (failed.length === ids.length)
if (failed.length === ids.length) {
return res.json({
success: false,
description: `Could not ${albumid === null ? 'add' : 'remove'} any files ${albumid === null ? 'to' : 'from'} the album.`
})
else
} else {
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
}
}
}

128
controllers/authController.js

@ -34,31 +34,30 @@ self.verify = async (req, res, next) => {
const username = typeof req.body.username === 'string'
? req.body.username.trim()
: ''
if (!username)
return res.json({ success: false, description: 'No username provided.' })
if (!username) return res.json({ success: false, description: 'No username provided.' })
const password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (!password)
return res.json({ success: false, description: 'No password provided.' })
if (!password) return res.json({ success: false, description: 'No password provided.' })
try {
const user = await db.table('users')
.where('username', username)
.first()
if (!user)
return res.json({ success: false, description: 'Username does not exist.' })
if (!user) return res.json({ success: false, description: 'Username does not exist.' })
if (user.enabled === false || user.enabled === 0)
if (user.enabled === false || user.enabled === 0) {
return res.json({ success: false, description: 'This account has been disabled.' })
}
const result = await bcrypt.compare(password, user.password)
if (result === false)
if (result === false) {
return res.json({ success: false, description: 'Wrong password.' })
else
} else {
return res.json({ success: true, token: user.token })
}
} catch (error) {
logger.error(error)
return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
@ -66,34 +65,46 @@ self.verify = async (req, res, next) => {
}
self.register = async (req, res, next) => {
if (config.enableUserAccounts === false)
if (config.enableUserAccounts === false) {
return res.json({ success: false, description: 'Registration is currently disabled.' })
}
const username = typeof req.body.username === 'string'
? req.body.username.trim()
: ''
if (username.length < self.user.min || username.length > self.user.max)
return res.json({ success: false, description: `Username must have ${self.user.min}-${self.user.max} characters.` })
if (username.length < self.user.min || username.length > self.user.max) {
return res.json({
success: false,
description: `Username must have ${self.user.min}-${self.user.max} characters.`
})
}
const password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (password.length < self.pass.min || password.length > self.pass.max)
return res.json({ success: false, description: `Password must have ${self.pass.min}-${self.pass.max} characters.` })
if (password.length < self.pass.min || password.length > self.pass.max) {
return res.json({
success: false,
description: `Password must have ${self.pass.min}-${self.pass.max} characters.`
})
}
try {
const user = await db.table('users')
.where('username', username)
.first()
if (user)
return res.json({ success: false, description: 'Username already exists.' })
if (user) return res.json({ success: false, description: 'Username already exists.' })
const hash = await bcrypt.hash(password, saltRounds)
const token = await tokens.generateUniqueToken()
if (!token)
return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
if (!token) {
return res.json({
success: false,
description: 'Sorry, we could not allocate a unique token. Try again?'
})
}
await db.table('users')
.insert({
@ -121,8 +132,12 @@ self.changePassword = async (req, res, next) => {
const password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (password.length < self.pass.min || password.length > self.pass.max)
return res.json({ success: false, description: `Password must have ${self.pass.min}-${self.pass.max} characters.` })
if (password.length < self.pass.min || password.length > self.pass.max) {
return res.json({
success: false,
description: `Password must have ${self.pass.min}-${self.pass.max} characters.`
})
}
try {
const hash = await bcrypt.hash(password, saltRounds)
@ -139,12 +154,13 @@ self.changePassword = async (req, res, next) => {
}
self.assertPermission = (user, target) => {
if (!target)
if (!target) {
throw new Error('Could not get user with the specified ID.')
else if (!perms.higher(user, target))
} else if (!perms.higher(user, target)) {
throw new Error('The user is in the same or higher group as you.')
else if (target.username === 'root')
} else if (target.username === 'root') {
throw new Error('Root user may not be tampered with.')
}
}
self.createUser = async (req, res, next) => {
@ -152,21 +168,28 @@ self.createUser = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
if (!isadmin)
return res.status(403).end()
if (!isadmin) return res.status(403).end()
const username = typeof req.body.username === 'string'
? req.body.username.trim()
: ''
if (username.length < self.user.min || username.length > self.user.max)
return res.json({ success: false, description: `Username must have ${self.user.min}-${self.user.max} characters.` })
if (username.length < self.user.min || username.length > self.user.max) {
return res.json({
success: false,
description: `Username must have ${self.user.min}-${self.user.max} characters.`
})
}
let password = typeof req.body.password === 'string'
? req.body.password.trim()
: ''
if (password.length) {
if (password.length < self.pass.min || password.length > self.pass.max)
return res.json({ success: false, description: `Password must have ${self.pass.min}-${self.pass.max} characters.` })
if (password.length < self.pass.min || password.length > self.pass.max) {
return res.json({
success: false,
description: `Password must have ${self.pass.min}-${self.pass.max} characters.`
})
}
} else {
password = randomstring.generate(self.pass.rand)
}
@ -186,14 +209,17 @@ self.createUser = async (req, res, next) => {
.where('username', username)
.first()
if (user)
return res.json({ success: false, description: 'Username already exists.' })
if (user) return res.json({ success: false, description: 'Username already exists.' })
const hash = await bcrypt.hash(password, saltRounds)
const token = await tokens.generateUniqueToken()
if (!token)
return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
if (!token) {
return res.json({
success: false,
description: 'Sorry, we could not allocate a unique token. Try again?'
})
}
await db.table('users')
.insert({
@ -219,12 +245,10 @@ self.editUser = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
if (!isadmin)
return res.status(403).end()
if (!isadmin) return res.status(403).end()
const id = parseInt(req.body.id)
if (isNaN(id))
return res.json({ success: false, description: 'No user specified.' })
if (isNaN(id)) return res.json({ success: false, description: 'No user specified.' })
try {
const target = await db.table('users')
@ -236,17 +260,20 @@ self.editUser = async (req, res, next) => {
if (req.body.username !== undefined) {
update.username = String(req.body.username).trim()
if (update.username.length < self.user.min || update.username.length > self.user.max)
if (update.username.length < self.user.min || update.username.length > self.user.max) {
throw new Error(`Username must have ${self.user.min}-${self.user.max} characters.`)
}
}
if (req.body.enabled !== undefined)
if (req.body.enabled !== undefined) {
update.enabled = Boolean(req.body.enabled)
}
if (req.body.group !== undefined) {
update.permission = perms.permissions[req.body.group]
if (typeof update.permission !== 'number' || update.permission < 0)
if (typeof update.permission !== 'number' || update.permission < 0) {
update.permission = target.permission
}
}
let password
@ -282,13 +309,11 @@ self.deleteUser = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
if (!isadmin)
return res.status(403).end()
if (!isadmin) return res.status(403).end()
const id = parseInt(req.body.id)
const purge = req.body.purge
if (isNaN(id))
return res.json({ success: false, description: 'No user specified.' })
if (isNaN(id)) return res.json({ success: false, description: 'No user specified.' })
try {
const target = await db.table('users')
@ -304,8 +329,7 @@ self.deleteUser = async (req, res, next) => {
const fileids = files.map(file => file.id)
if (purge) {
const failed = await utils.bulkDeleteFromDb('id', fileids, user)
if (failed.length)
return res.json({ success: false, failed })
if (failed.length) return res.json({ success: false, failed })
utils.invalidateStatsCache('uploads')
} else {
// Clear out userid attribute from the files
@ -315,7 +339,8 @@ self.deleteUser = async (req, res, next) => {
}
}
// TODO: Figure out obstacles of just deleting the albums
// TODO: Figure out why can't we just just delete the albums from DB
// DISCLAIMER: Upstream always had it coded this way for some reason
const albums = await db.table('albums')
.where('userid', id)
.where('enabled', 1)
@ -333,8 +358,7 @@ self.deleteUser = async (req, res, next) => {
try {
await paths.unlink(path.join(paths.zips, `${album.identifier}.zip`))
} catch (error) {
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
}))
}
@ -362,15 +386,13 @@ self.listUsers = async (req, res, next) => {
if (!user) return
const isadmin = perms.is(user, 'admin')
if (!isadmin)
return res.status(403).end()
if (!isadmin) return res.status(403).end()
try {
const count = await db.table('users')
.count('id as count')
.then(rows => rows[0].count)
if (!count)
return res.json({ success: true, users: [], count })
if (!count) return res.json({ success: true, users: [], count })
let offset = Number(req.params.page)
if (isNaN(offset)) offset = 0

3
controllers/multerStorageController.js

@ -36,8 +36,9 @@ DiskStorage.prototype._handleFile = function _handleFile (req, file, cb) {
file._chunksData.stream = fs.createWriteStream(finalPath, { flags: 'a' })
file._chunksData.stream.on('error', onerror)
}
if (!file._chunksData.hasher)
if (!file._chunksData.hasher) {
file._chunksData.hasher = blake3.createHash()
}
outStream = file._chunksData.stream
hash = file._chunksData.hasher

17
controllers/pathsController.js

@ -9,6 +9,7 @@ const self = {}
// Promisify these fs functions
const fsFuncs = [
'access',
'copyFile',
'lstat',
'mkdir',
'readdir',
@ -20,11 +21,14 @@ const fsFuncs = [
'writeFile'
]
for (const fsFunc of fsFuncs)
for (const fsFunc of fsFuncs) {
self[fsFunc] = promisify(fs[fsFunc])
}
self.uploads = path.resolve(config.uploads.folder)
self.chunks = path.join(self.uploads, 'chunks')
self.chunks = config.uploads.chunksFolder
? path.resolve(config.uploads.chunksFolder)
: path.join(self.uploads, 'chunks')
self.thumbs = path.join(self.uploads, 'thumbs')
self.zips = path.join(self.uploads, 'zips')
@ -51,7 +55,7 @@ const verify = [
self.init = async () => {
// Check & create directories
for (const p of verify)
for (const p of verify) {
try {
await self.access(p)
} catch (err) {
@ -59,10 +63,10 @@ self.init = async () => {
throw err
} else {
const mkdir = await self.mkdir(p)
if (mkdir)
logger.log(`Created directory: ${p}`)
if (mkdir) logger.log(`Created directory: ${p}`)
}
}
}
// Purge any leftover in chunks directory
const uuidDirs = await self.readdir(self.chunks)
@ -74,8 +78,7 @@ self.init = async () => {
))
await self.rmdir(root)
}))
if (uuidDirs.length)
logger.log(`Purged ${uuidDirs.length} unfinished chunks`)
if (uuidDirs.length) logger.log(`Purged ${uuidDirs.length} unfinished chunks`)
}
module.exports = self

3
controllers/permissionController.js

@ -11,8 +11,7 @@ self.permissions = {
// returns true if user is in the group OR higher
self.is = (user, group) => {
// root bypass
if (user.username === 'root')
return true
if (user.username === 'root') return true
const permission = user.permission || 0
return permission >= self.permissions[group]

17
controllers/tokenController.js

@ -14,8 +14,7 @@ const self = {
self.generateUniqueToken = async () => {
for (let i = 0; i < self.tokenMaxTries; i++) {
const token = randomstring.generate(self.tokenLength)
if (self.onHold.has(token))
continue
if (self.onHold.has(token)) continue
// Put token on-hold (wait for it to be inserted to DB)
self.onHold.add(token)
@ -40,8 +39,7 @@ self.verify = async (req, res, next) => {
? req.body.token.trim()
: ''
if (!token)
return res.json({ success: false, description: 'No token provided.' })
if (!token) return res.json({ success: false, description: 'No token provided.' })
try {
const user = await db.table('users')
@ -49,8 +47,7 @@ self.verify = async (req, res, next) => {
.select('username', 'permission')
.first()
if (!user)
return res.json({ success: false, description: 'Invalid token.' })
if (!user) return res.json({ success: false, description: 'Invalid token.' })
const obj = {
success: true,
@ -76,8 +73,12 @@ self.change = async (req, res, next) => {
if (!user) return
const newToken = await self.generateUniqueToken()
if (!newToken)
return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
if (!newToken) {
return res.json({
success: false,
description: 'Sorry, we could not allocate a unique token. Try again?'
})
}
try {
await db.table('users')

468
controllers/uploadController.js

File diff suppressed because it is too large

748
controllers/utilsController.js

@ -1,5 +1,4 @@
const { promisify } = require('util')
const { spawn } = require('child_process')
const fetch = require('node-fetch')
const ffmpeg = require('fluent-ffmpeg')
const path = require('path')
@ -12,13 +11,14 @@ const logger = require('./../logger')
const db = require('knex')(config.database)
const self = {
clamd: {
scanner: null,
timeout: config.uploads.scan.timeout || 5000,
chunkSize: config.uploads.scan.chunkSize || 64 * 1024,
clamscan: {
instance: null,
version: null,
groupBypass: config.uploads.scan.groupBypass || null,
whitelistExtensions: (Array.isArray(config.uploads.scan.whitelistExtensions) &&
config.uploads.scan.whitelistExtensions.length) ? config.uploads.scan.whitelistExtensions : null,
config.uploads.scan.whitelistExtensions.length)
? config.uploads.scan.whitelistExtensions
: null,
maxSize: (parseInt(config.uploads.scan.maxSize) * 1e6) || null
},
gitHash: null,
@ -28,6 +28,7 @@ const self = {
imageExts: ['.gif', '.jpeg', '.jpg', '.png', '.svg', '.tif', '.tiff', '.webp'],
videoExts: ['.3g2', '.3gp', '.asf', '.avchd', '.avi', '.divx', '.evo', '.flv', '.h264', '.h265', '.hevc', '.m2p', '.m2ts', '.m4v', '.mk3d', '.mkv', '.mov', '.mp4', '.mpeg', '.mpg', '.mxf', '.ogg', '.ogv', '.ps', '.qt', '.rmvb', '.ts', '.vob', '.webm', '.wmv'],
audioExts: ['.flac', '.mp3', '.wav', '.wma'],
thumbsSize: config.uploads.generateThumbs.size || 200,
ffprobe: promisify(ffmpeg.ffprobe),
@ -36,28 +37,33 @@ const self = {
timezoneOffset: new Date().getTimezoneOffset()
}
const statsCache = {
const statsData = {
system: {
title: 'System',
cache: null,
generating: false,
generatedAt: 0
},
disk: {
fileSystems: {
title: 'File Systems',
cache: null,
generating: false,
generatedAt: 0
},
albums: {
uploads: {
title: 'Uploads',
cache: null,
generating: false,
generatedAt: 0
},
users: {
title: 'Users',
cache: null,
generating: false,
generatedAt: 0
},
uploads: {
albums: {
title: 'Albums',
cache: null,
generating: false,
generatedAt: 0
@ -92,14 +98,16 @@ self.extname = filename => {
}
// check against extensions that must be preserved
for (const extPreserve of extPreserves)
for (const extPreserve of extPreserves) {
if (lower.endsWith(extPreserve)) {
extname = extPreserve
break
}
}
if (!extname)
if (!extname) {
extname = lower.slice(lower.lastIndexOf('.') - lower.length) // path.extname(lower)
}
return extname + multi
}
@ -110,14 +118,12 @@ self.escape = string => {
// Copyright(c) 2015 Andreas Lubbe
// Copyright(c) 2015 Tiancheng "Timothy" Gu
if (!string)
return string
if (!string) return string
const str = String(string)
const match = /["'&<>]/.exec(str)
if (!match)
return str
if (!match) return str
let escape
let html = ''
@ -145,8 +151,9 @@ self.escape = string => {
continue
}
if (lastIndex !== index)
if (lastIndex !== index) {
html += str.substring(lastIndex, index)
}
lastIndex = index + 1
html += escape
@ -203,16 +210,16 @@ self.generateThumbs = async (name, extname, force) => {
// Check if thumbnail already exists
try {
const lstat = await paths.lstat(thumbname)
if (lstat.isSymbolicLink())
if (lstat.isSymbolicLink()) {
// Unlink if symlink (should be symlink to the placeholder)
await paths.unlink(thumbname)
else if (!force)
} else if (!force) {
// Continue only if it does not exist, unless forced to
return true
}
} catch (error) {
// Re-throw error
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
// Full path to input file
@ -257,12 +264,14 @@ self.generateThumbs = async (name, extname, force) => {
const metadata = await self.ffprobe(input)
const duration = parseInt(metadata.format.duration)
if (isNaN(duration))
if (isNaN(duration)) {
throw 'Warning: File does not have valid duration metadata'
}
const videoStream = metadata.streams && metadata.streams.find(s => s.codec_type === 'video')
if (!videoStream || !videoStream.width || !videoStream.height)
if (!videoStream || !videoStream.width || !videoStream.height) {
throw 'Warning: File does not have valid video stream metadata'
}
await new Promise((resolve, reject) => {
ffmpeg(input)
@ -287,10 +296,11 @@ self.generateThumbs = async (name, extname, force) => {
await paths.lstat(thumbname)
return true
} catch (err) {
if (err.code === 'ENOENT')
if (err.code === 'ENOENT') {
throw error || 'Warning: FFMPEG exited with empty output file'
else
} else {
throw error || err
}
}
})
} else {
@ -361,8 +371,7 @@ self.unlinkFile = async (filename, predb) => {
await paths.unlink(path.join(paths.uploads, filename))
} catch (error) {
// Return true if file does not exist
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
const identifier = filename.split('.')[0]
@ -375,26 +384,26 @@ self.unlinkFile = async (filename, predb) => {
}
const extname = self.extname(filename)
if (self.imageExts.includes(extname) || self.videoExts.includes(extname))
if (self.imageExts.includes(extname) || self.videoExts.includes(extname)) {
try {
await paths.unlink(path.join(paths.thumbs, `${identifier}.png`))
} catch (error) {
if (error.code !== 'ENOENT')
throw error
if (error.code !== 'ENOENT') throw error
}
}
}
self.bulkDeleteFromDb = async (field, values, user) => {
// Always return an empty array on failure
if (!user || !['id', 'name'].includes(field) || !values.length)
return []
if (!user || !['id', 'name'].includes(field) || !values.length) return []
// SQLITE_LIMIT_VARIABLE_NUMBER, which defaults to 999
// Read more: https://www.sqlite.org/limits.html
const MAX_VARIABLES_CHUNK_SIZE = 999
const chunks = []