summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/config/default.js8
-rw-r--r--lib/config/dockerSecret.js11
-rw-r--r--lib/config/environment.js9
-rw-r--r--lib/config/hackmdEnvironment.js5
-rw-r--r--lib/config/index.js26
-rw-r--r--lib/config/oldEnvironment.js2
-rw-r--r--lib/history.js7
-rw-r--r--lib/letter-avatars.js3
-rw-r--r--lib/logger.js2
-rw-r--r--lib/migrations/20150702001020-update-to-0_3_1.js1
-rw-r--r--lib/migrations/20160112220142-note-add-lastchange.js7
-rw-r--r--lib/migrations/20160420180355-note-add-alias.js1
-rw-r--r--lib/migrations/20160515114000-user-add-tokens.js1
-rw-r--r--lib/migrations/20160607060246-support-revision.js1
-rw-r--r--lib/migrations/20160703062241-support-authorship.js1
-rw-r--r--lib/migrations/20161009040430-support-delete-note.js1
-rw-r--r--lib/migrations/20161201050312-support-email-signin.js2
-rw-r--r--lib/migrations/20171009121200-longtext-for-mysql.js20
-rw-r--r--lib/migrations/20180209120907-longtext-of-authorship.js12
-rw-r--r--lib/migrations/20180306150303-fix-enum.js4
-rw-r--r--lib/models/author.js38
-rw-r--r--lib/models/index.js20
-rw-r--r--lib/models/note.js874
-rw-r--r--lib/models/revision.js379
-rw-r--r--lib/models/user.js235
-rw-r--r--lib/realtime.js30
-rw-r--r--lib/response.js35
-rw-r--r--lib/utils.js2
-rw-r--r--lib/web/auth/dropbox/index.js2
-rw-r--r--lib/web/auth/email/index.js14
-rw-r--r--lib/web/auth/facebook/index.js2
-rw-r--r--lib/web/auth/github/index.js2
-rw-r--r--lib/web/auth/gitlab/index.js2
-rw-r--r--lib/web/auth/google/index.js7
-rw-r--r--lib/web/auth/ldap/index.js8
-rw-r--r--lib/web/auth/mattermost/index.js14
-rw-r--r--lib/web/auth/oauth2/index.js2
-rw-r--r--lib/web/auth/openid/index.js8
-rw-r--r--lib/web/auth/saml/index.js9
-rw-r--r--lib/web/auth/twitter/index.js2
-rw-r--r--lib/web/auth/utils.js5
-rw-r--r--lib/web/historyRouter.js2
-rw-r--r--lib/web/imageRouter/azure.js8
-rw-r--r--lib/web/imageRouter/filesystem.js8
-rw-r--r--lib/web/imageRouter/imgur.js22
-rw-r--r--lib/web/imageRouter/index.js7
-rw-r--r--lib/web/imageRouter/lutim.js30
-rw-r--r--lib/web/imageRouter/minio.js6
-rw-r--r--lib/web/imageRouter/s3.js3
-rw-r--r--lib/web/middleware/tooBusy.js3
-rw-r--r--lib/web/noteRouter.js2
-rw-r--r--lib/web/statusRouter.js6
-rw-r--r--lib/web/userRouter.js2
-rw-r--r--lib/workers/dmpWorker.js11
54 files changed, 1006 insertions, 918 deletions
diff --git a/lib/config/default.js b/lib/config/default.js
index 9e401f38..12254d47 100644
--- a/lib/config/default.js
+++ b/lib/config/default.js
@@ -56,10 +56,15 @@ module.exports = {
// socket.io
heartbeatInterval: 5000,
heartbeatTimeout: 10000,
+ // too busy timeout
+ tooBusyLag: 70,
// document
documentMaxLength: 100000,
- // image upload setting, available options are imgur/s3/filesystem/azure
+ // image upload setting, available options are imgur/s3/filesystem/azure/lutim
imageUploadType: 'filesystem',
+ lutim: {
+ url: 'https://framapic.org/'
+ },
imgur: {
clientID: undefined
},
@@ -138,6 +143,7 @@ module.exports = {
idpCert: undefined,
issuer: undefined,
identifierFormat: 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress',
+ disableRequestedAuthnContext: false,
groupAttribute: undefined,
externalGroups: [],
requiredGroups: [],
diff --git a/lib/config/dockerSecret.js b/lib/config/dockerSecret.js
index fd66ddfe..50bf7fe2 100644
--- a/lib/config/dockerSecret.js
+++ b/lib/config/dockerSecret.js
@@ -13,11 +13,12 @@ function getSecret (secret) {
if (fs.existsSync(basePath)) {
module.exports = {
- sessionsecret: getSecret('sessionsecret'),
- sslkeypath: getSecret('sslkeypath'),
- sslcertpath: getSecret('sslcertpath'),
- sslcapath: getSecret('sslcapath'),
- dhparampath: getSecret('dhparampath'),
+ dbURL: getSecret('dbURL'),
+ sessionSecret: getSecret('sessionsecret'),
+ sslKeyPath: getSecret('sslkeypath'),
+ sslCertPath: getSecret('sslcertpath'),
+ sslCAPath: getSecret('sslcapath'),
+ dhParamPath: getSecret('dhparampath'),
s3: {
accessKeyId: getSecret('s3_acccessKeyId'),
secretAccessKey: getSecret('s3_secretAccessKey')
diff --git a/lib/config/environment.js b/lib/config/environment.js
index cdf87871..716f8b75 100644
--- a/lib/config/environment.js
+++ b/lib/config/environment.js
@@ -1,6 +1,6 @@
'use strict'
-const {toBooleanConfig, toArrayConfig, toIntegerConfig} = require('./utils')
+const { toBooleanConfig, toArrayConfig, toIntegerConfig } = require('./utils')
module.exports = {
sourceURL: process.env.CMD_SOURCE_URL,
@@ -14,7 +14,7 @@ module.exports = {
useSSL: toBooleanConfig(process.env.CMD_USESSL),
hsts: {
enable: toBooleanConfig(process.env.CMD_HSTS_ENABLE),
- maxAgeSeconds: process.env.CMD_HSTS_MAX_AGE,
+ maxAgeSeconds: toIntegerConfig(process.env.CMD_HSTS_MAX_AGE),
includeSubdomains: toBooleanConfig(process.env.CMD_HSTS_INCLUDE_SUBDOMAINS),
preload: toBooleanConfig(process.env.CMD_HSTS_PRELOAD)
},
@@ -33,6 +33,7 @@ module.exports = {
dbURL: process.env.CMD_DB_URL,
sessionSecret: process.env.CMD_SESSION_SECRET,
sessionLife: toIntegerConfig(process.env.CMD_SESSION_LIFE),
+ tooBusyLag: toIntegerConfig(process.env.CMD_TOOBUSY_LAG),
imageUploadType: process.env.CMD_IMAGE_UPLOAD_TYPE,
imgur: {
clientID: process.env.CMD_IMGUR_CLIENTID
@@ -50,6 +51,9 @@ module.exports = {
secure: toBooleanConfig(process.env.CMD_MINIO_SECURE),
port: toIntegerConfig(process.env.CMD_MINIO_PORT)
},
+ lutim: {
+ url: process.env.CMD_LUTIM_URL
+ },
s3bucket: process.env.CMD_S3_BUCKET,
azure: {
connectionString: process.env.CMD_AZURE_CONNECTION_STRING,
@@ -116,6 +120,7 @@ module.exports = {
idpCert: process.env.CMD_SAML_IDPCERT,
issuer: process.env.CMD_SAML_ISSUER,
identifierFormat: process.env.CMD_SAML_IDENTIFIERFORMAT,
+ disableRequestedAuthnContext: toBooleanConfig(process.env.CMD_SAML_DISABLEREQUESTEDAUTHNCONTEXT),
groupAttribute: process.env.CMD_SAML_GROUPATTRIBUTE,
externalGroups: toArrayConfig(process.env.CMD_SAML_EXTERNALGROUPS, '|', []),
requiredGroups: toArrayConfig(process.env.CMD_SAML_REQUIREDGROUPS, '|', []),
diff --git a/lib/config/hackmdEnvironment.js b/lib/config/hackmdEnvironment.js
index e1c11569..dcfda0bc 100644
--- a/lib/config/hackmdEnvironment.js
+++ b/lib/config/hackmdEnvironment.js
@@ -1,6 +1,6 @@
'use strict'
-const {toBooleanConfig, toArrayConfig, toIntegerConfig} = require('./utils')
+const { toBooleanConfig, toArrayConfig, toIntegerConfig } = require('./utils')
module.exports = {
domain: process.env.HMD_DOMAIN,
@@ -10,7 +10,7 @@ module.exports = {
useSSL: toBooleanConfig(process.env.HMD_USESSL),
hsts: {
enable: toBooleanConfig(process.env.HMD_HSTS_ENABLE),
- maxAgeSeconds: process.env.HMD_HSTS_MAX_AGE,
+ maxAgeSeconds: toIntegerConfig(process.env.HMD_HSTS_MAX_AGE),
includeSubdomains: toBooleanConfig(process.env.HMD_HSTS_INCLUDE_SUBDOMAINS),
preload: toBooleanConfig(process.env.HMD_HSTS_PRELOAD)
},
@@ -109,6 +109,7 @@ module.exports = {
idpCert: process.env.HMD_SAML_IDPCERT,
issuer: process.env.HMD_SAML_ISSUER,
identifierFormat: process.env.HMD_SAML_IDENTIFIERFORMAT,
+ disableRequestedAuthnContext: toBooleanConfig(process.env.HMD_SAML_DISABLEREQUESTEDAUTHNCONTEXT),
groupAttribute: process.env.HMD_SAML_GROUPATTRIBUTE,
externalGroups: toArrayConfig(process.env.HMD_SAML_EXTERNALGROUPS, '|', []),
requiredGroups: toArrayConfig(process.env.HMD_SAML_REQUIREDGROUPS, '|', []),
diff --git a/lib/config/index.js b/lib/config/index.js
index cbe6c39c..ee4817b3 100644
--- a/lib/config/index.js
+++ b/lib/config/index.js
@@ -4,11 +4,11 @@
const crypto = require('crypto')
const fs = require('fs')
const path = require('path')
-const {merge} = require('lodash')
+const { merge } = require('lodash')
const deepFreeze = require('deep-freeze')
-const {Environment, Permission} = require('./enum')
+const { Environment, Permission } = require('./enum')
const logger = require('../logger')
-const {getGitCommit, getGitHubURL} = require('./utils')
+const { getGitCommit, getGitHubURL } = require('./utils')
const appRootPath = path.resolve(__dirname, '../../')
const env = process.env.NODE_ENV || Environment.development
@@ -17,7 +17,7 @@ const debugConfig = {
}
// Get version string from package.json
-const {version, repository} = require(path.join(appRootPath, 'package.json'))
+const { version, repository } = require(path.join(appRootPath, 'package.json'))
const commitID = getGitCommit(appRootPath)
const sourceURL = getGitHubURL(repository.url, commitID || version)
@@ -128,7 +128,7 @@ if (config.gitlab && config.gitlab.version !== 'v4' && config.gitlab.version !==
config.gitlab.version = 'v4'
}
// If gitlab scope is api, enable snippets Export/import
-config.isGitlabSnippetsEnable = (!config.gitlab.scope || config.gitlab.scope === 'api')
+config.isGitlabSnippetsEnable = (!config.gitlab.scope || config.gitlab.scope === 'api') && config.isGitLabEnable
// Only update i18n files in development setups
config.updateI18nFiles = (env === Environment.development)
@@ -152,20 +152,20 @@ for (let i = keys.length; i--;) {
// Notify users about the prefix change and inform them they use legacy prefix for environment variables
if (Object.keys(process.env).toString().indexOf('HMD_') !== -1) {
- logger.warn('Using legacy HMD prefix for environment variables. Please change your variables in future. For details see: https://github.com/hackmdio/codimd#environment-variables-will-overwrite-other-server-configs')
+ logger.warn('Using legacy HMD prefix for environment variables. Please change your variables in future. For details see: https://github.com/codimd/server#environment-variables-will-overwrite-other-server-configs')
}
// Generate session secret if it stays on default values
if (config.sessionSecret === 'secret') {
logger.warn('Session secret not set. Using random generated one. Please set `sessionSecret` in your config.js file. All users will be logged out.')
config.sessionSecret = crypto.randomBytes(Math.ceil(config.sessionSecretLen / 2)) // generate crypto graphic random number
- .toString('hex') // convert to hexadecimal format
- .slice(0, config.sessionSecretLen) // return required number of characters
+ .toString('hex') // convert to hexadecimal format
+ .slice(0, config.sessionSecretLen) // return required number of characters
}
// Validate upload upload providers
-if (['filesystem', 's3', 'minio', 'imgur', 'azure'].indexOf(config.imageUploadType) === -1) {
- logger.error('"imageuploadtype" is not correctly set. Please use "filesystem", "s3", "minio", "azure" or "imgur". Defaulting to "filesystem"')
+if (['filesystem', 's3', 'minio', 'imgur', 'azure', 'lutim'].indexOf(config.imageUploadType) === -1) {
+ logger.error('"imageuploadtype" is not correctly set. Please use "filesystem", "s3", "minio", "azure", "lutim" or "imgur". Defaulting to "filesystem"')
config.imageUploadType = 'filesystem'
}
@@ -189,6 +189,12 @@ switch (config.imageUploadType) {
]
}
+// Disable PDF export due to security issue
+if (config.allowPDFExport) {
+ config.allowPDFExport = false
+ logger.warn('PDF export was disabled for this release to mitigate a critical security issue. This feature will hopefully become available again in future releases.')
+}
+
// generate correct path
config.sslCAPath.forEach(function (capath, i, array) {
array[i] = path.resolve(appRootPath, capath)
diff --git a/lib/config/oldEnvironment.js b/lib/config/oldEnvironment.js
index a3b13cb9..06047553 100644
--- a/lib/config/oldEnvironment.js
+++ b/lib/config/oldEnvironment.js
@@ -1,6 +1,6 @@
'use strict'
-const {toBooleanConfig} = require('./utils')
+const { toBooleanConfig } = require('./utils')
module.exports = {
debug: toBooleanConfig(process.env.DEBUG),
diff --git a/lib/history.js b/lib/history.js
index 9c389bfa..88a7ee05 100644
--- a/lib/history.js
+++ b/lib/history.js
@@ -4,7 +4,6 @@
var LZString = require('lz-string')
// core
-var config = require('./config')
var logger = require('./logger')
var response = require('./response')
var models = require('./models')
@@ -56,9 +55,7 @@ function getHistory (userid, callback) {
}
history = parseHistoryToObject(history)
}
- if (config.debug) {
- logger.info('read history success: ' + user.id)
- }
+ logger.debug(`read history success: ${user.id}`)
return callback(null, history)
}).catch(function (err) {
logger.error('read history failed: ' + err)
@@ -140,7 +137,7 @@ function historyPost (req, res) {
var noteId = req.params.noteId
if (!noteId) {
if (typeof req.body['history'] === 'undefined') return response.errorBadRequest(res)
- if (config.debug) { logger.info('SERVER received history from [' + req.user.id + ']: ' + req.body.history) }
+ logger.debug(`SERVER received history from [${req.user.id}]: ${req.body.history}`)
try {
var history = JSON.parse(req.body.history)
} catch (err) {
diff --git a/lib/letter-avatars.js b/lib/letter-avatars.js
index 7d463950..6fb1888a 100644
--- a/lib/letter-avatars.js
+++ b/lib/letter-avatars.js
@@ -30,13 +30,14 @@ exports.generateAvatarURL = function (name, email = '', big = true) {
if (typeof email !== 'string') {
email = '' + name + '@example.com'
}
+ name = encodeURIComponent(name)
let hash = crypto.createHash('md5')
hash.update(email.toLowerCase())
let hexDigest = hash.digest('hex')
if (email !== '' && config.allowGravatar) {
- photo = 'https://www.gravatar.com/avatar/' + hexDigest;
+ photo = 'https://cdn.libravatar.org/avatar/' + hexDigest
if (big) {
photo += '?s=400'
} else {
diff --git a/lib/logger.js b/lib/logger.js
index 5ef1860a..6d4bf69a 100644
--- a/lib/logger.js
+++ b/lib/logger.js
@@ -1,5 +1,5 @@
'use strict'
-const {createLogger, format, transports} = require('winston')
+const { createLogger, format, transports } = require('winston')
const logger = createLogger({
level: 'debug',
diff --git a/lib/migrations/20150702001020-update-to-0_3_1.js b/lib/migrations/20150702001020-update-to-0_3_1.js
index e1a88661..b941048e 100644
--- a/lib/migrations/20150702001020-update-to-0_3_1.js
+++ b/lib/migrations/20150702001020-update-to-0_3_1.js
@@ -22,6 +22,7 @@ module.exports = {
})
}).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: shortid' || error.message === "ER_DUP_FIELDNAME: Duplicate column name 'shortid'" || error.message === 'column "shortid" of relation "Notes" already exists') {
+ // eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
diff --git a/lib/migrations/20160112220142-note-add-lastchange.js b/lib/migrations/20160112220142-note-add-lastchange.js
index 87e3ff19..69781cef 100644
--- a/lib/migrations/20160112220142-note-add-lastchange.js
+++ b/lib/migrations/20160112220142-note-add-lastchange.js
@@ -9,6 +9,7 @@ module.exports = {
})
}).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: lastchangeuserId' || error.message === "ER_DUP_FIELDNAME: Duplicate column name 'lastchangeuserId'" || error.message === 'column "lastchangeuserId" of relation "Notes" already exists') {
+ // eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
@@ -18,8 +19,8 @@ module.exports = {
down: function (queryInterface, Sequelize) {
return queryInterface.removeColumn('Notes', 'lastchangeAt')
- .then(function () {
- return queryInterface.removeColumn('Notes', 'lastchangeuserId')
- })
+ .then(function () {
+ return queryInterface.removeColumn('Notes', 'lastchangeuserId')
+ })
}
}
diff --git a/lib/migrations/20160420180355-note-add-alias.js b/lib/migrations/20160420180355-note-add-alias.js
index 45d53e69..82941a91 100644
--- a/lib/migrations/20160420180355-note-add-alias.js
+++ b/lib/migrations/20160420180355-note-add-alias.js
@@ -9,6 +9,7 @@ module.exports = {
})
}).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: alias' || error.message === "ER_DUP_FIELDNAME: Duplicate column name 'alias'" || error.message === 'column "alias" of relation "Notes" already exists') {
+ // eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
diff --git a/lib/migrations/20160515114000-user-add-tokens.js b/lib/migrations/20160515114000-user-add-tokens.js
index 435ae9cb..e47ef5a4 100644
--- a/lib/migrations/20160515114000-user-add-tokens.js
+++ b/lib/migrations/20160515114000-user-add-tokens.js
@@ -5,6 +5,7 @@ module.exports = {
return queryInterface.addColumn('Users', 'refreshToken', Sequelize.STRING)
}).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: accessToken' || error.message === "ER_DUP_FIELDNAME: Duplicate column name 'accessToken'" || error.message === 'column "accessToken" of relation "Users" already exists') {
+ // eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
diff --git a/lib/migrations/20160607060246-support-revision.js b/lib/migrations/20160607060246-support-revision.js
index 547f89b8..b318ea44 100644
--- a/lib/migrations/20160607060246-support-revision.js
+++ b/lib/migrations/20160607060246-support-revision.js
@@ -17,6 +17,7 @@ module.exports = {
})
}).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: savedAt' | error.message === "ER_DUP_FIELDNAME: Duplicate column name 'savedAt'" || error.message === 'column "savedAt" of relation "Notes" already exists') {
+ // eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
diff --git a/lib/migrations/20160703062241-support-authorship.js b/lib/migrations/20160703062241-support-authorship.js
index f452b1a7..86054f1c 100644
--- a/lib/migrations/20160703062241-support-authorship.js
+++ b/lib/migrations/20160703062241-support-authorship.js
@@ -18,6 +18,7 @@ module.exports = {
})
}).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: authorship' || error.message === "ER_DUP_FIELDNAME: Duplicate column name 'authorship'" || error.message === 'column "authorship" of relation "Notes" already exists') {
+ // eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
diff --git a/lib/migrations/20161009040430-support-delete-note.js b/lib/migrations/20161009040430-support-delete-note.js
index 56a336ac..b7ee72c3 100644
--- a/lib/migrations/20161009040430-support-delete-note.js
+++ b/lib/migrations/20161009040430-support-delete-note.js
@@ -3,6 +3,7 @@ module.exports = {
up: function (queryInterface, Sequelize) {
return queryInterface.addColumn('Notes', 'deletedAt', Sequelize.DATE).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: deletedAt' || error.message === "ER_DUP_FIELDNAME: Duplicate column name 'deletedAt'" || error.message === 'column "deletedAt" of relation "Notes" already exists') {
+ // eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
diff --git a/lib/migrations/20161201050312-support-email-signin.js b/lib/migrations/20161201050312-support-email-signin.js
index 26bc09ea..5c9fbf85 100644
--- a/lib/migrations/20161201050312-support-email-signin.js
+++ b/lib/migrations/20161201050312-support-email-signin.js
@@ -4,6 +4,7 @@ module.exports = {
return queryInterface.addColumn('Users', 'email', Sequelize.TEXT).then(function () {
return queryInterface.addColumn('Users', 'password', Sequelize.TEXT).catch(function (error) {
if (error.message === "ER_DUP_FIELDNAME: Duplicate column name 'password'" || error.message === 'column "password" of relation "Users" already exists') {
+ // eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
@@ -11,6 +12,7 @@ module.exports = {
})
}).catch(function (error) {
if (error.message === 'SQLITE_ERROR: duplicate column name: email' || error.message === "ER_DUP_FIELDNAME: Duplicate column name 'email'" || error.message === 'column "email" of relation "Users" already exists') {
+ // eslint-disable-next-line no-console
console.log('Migration has already run… ignoring.')
} else {
throw error
diff --git a/lib/migrations/20171009121200-longtext-for-mysql.js b/lib/migrations/20171009121200-longtext-for-mysql.js
index 2a7d0d3a..96bf7e87 100644
--- a/lib/migrations/20171009121200-longtext-for-mysql.js
+++ b/lib/migrations/20171009121200-longtext-for-mysql.js
@@ -1,16 +1,16 @@
'use strict'
module.exports = {
- up: function (queryInterface, Sequelize) {
- queryInterface.changeColumn('Notes', 'content', {type: Sequelize.TEXT('long')})
- queryInterface.changeColumn('Revisions', 'patch', {type: Sequelize.TEXT('long')})
- queryInterface.changeColumn('Revisions', 'content', {type: Sequelize.TEXT('long')})
- queryInterface.changeColumn('Revisions', 'lastContent', {type: Sequelize.TEXT('long')})
+ up: async function (queryInterface, Sequelize) {
+ await queryInterface.changeColumn('Notes', 'content', { type: Sequelize.TEXT('long') })
+ await queryInterface.changeColumn('Revisions', 'patch', { type: Sequelize.TEXT('long') })
+ await queryInterface.changeColumn('Revisions', 'content', { type: Sequelize.TEXT('long') })
+ await queryInterface.changeColumn('Revisions', 'lastContent', { type: Sequelize.TEXT('long') })
},
- down: function (queryInterface, Sequelize) {
- queryInterface.changeColumn('Notes', 'content', {type: Sequelize.TEXT})
- queryInterface.changeColumn('Revisions', 'patch', {type: Sequelize.TEXT})
- queryInterface.changeColumn('Revisions', 'content', {type: Sequelize.TEXT})
- queryInterface.changeColumn('Revisions', 'lastContent', {type: Sequelize.TEXT})
+ down: async function (queryInterface, Sequelize) {
+ await queryInterface.changeColumn('Notes', 'content', { type: Sequelize.TEXT })
+ await queryInterface.changeColumn('Revisions', 'patch', { type: Sequelize.TEXT })
+ await queryInterface.changeColumn('Revisions', 'content', { type: Sequelize.TEXT })
+ await queryInterface.changeColumn('Revisions', 'lastContent', { type: Sequelize.TEXT })
}
}
diff --git a/lib/migrations/20180209120907-longtext-of-authorship.js b/lib/migrations/20180209120907-longtext-of-authorship.js
index 4cca5444..04810009 100644
--- a/lib/migrations/20180209120907-longtext-of-authorship.js
+++ b/lib/migrations/20180209120907-longtext-of-authorship.js
@@ -1,13 +1,13 @@
'use strict'
module.exports = {
- up: function (queryInterface, Sequelize) {
- queryInterface.changeColumn('Notes', 'authorship', {type: Sequelize.TEXT('long')})
- queryInterface.changeColumn('Revisions', 'authorship', {type: Sequelize.TEXT('long')})
+ up: async function (queryInterface, Sequelize) {
+ await queryInterface.changeColumn('Notes', 'authorship', { type: Sequelize.TEXT('long') })
+ await queryInterface.changeColumn('Revisions', 'authorship', { type: Sequelize.TEXT('long') })
},
- down: function (queryInterface, Sequelize) {
- queryInterface.changeColumn('Notes', 'authorship', {type: Sequelize.TEXT})
- queryInterface.changeColumn('Revisions', 'authorship', {type: Sequelize.TEXT})
+ down: async function (queryInterface, Sequelize) {
+ await queryInterface.changeColumn('Notes', 'authorship', { type: Sequelize.TEXT })
+ await queryInterface.changeColumn('Revisions', 'authorship', { type: Sequelize.TEXT })
}
}
diff --git a/lib/migrations/20180306150303-fix-enum.js b/lib/migrations/20180306150303-fix-enum.js
index 0ee58a94..620a4229 100644
--- a/lib/migrations/20180306150303-fix-enum.js
+++ b/lib/migrations/20180306150303-fix-enum.js
@@ -2,10 +2,10 @@
module.exports = {
up: function (queryInterface, Sequelize) {
- queryInterface.changeColumn('Notes', 'permission', {type: Sequelize.ENUM('freely', 'editable', 'limited', 'locked', 'protected', 'private')})
+ return queryInterface.changeColumn('Notes', 'permission', { type: Sequelize.ENUM('freely', 'editable', 'limited', 'locked', 'protected', 'private') })
},
down: function (queryInterface, Sequelize) {
- queryInterface.changeColumn('Notes', 'permission', {type: Sequelize.ENUM('freely', 'editable', 'locked', 'private')})
+ return queryInterface.changeColumn('Notes', 'permission', { type: Sequelize.ENUM('freely', 'editable', 'locked', 'private') })
}
}
diff --git a/lib/models/author.js b/lib/models/author.js
index 03f832a4..e65791cb 100644
--- a/lib/models/author.js
+++ b/lib/models/author.js
@@ -18,25 +18,25 @@ module.exports = function (sequelize, DataTypes) {
unique: true,
fields: ['noteId', 'userId']
}
- ],
- classMethods: {
- associate: function (models) {
- Author.belongsTo(models.Note, {
- foreignKey: 'noteId',
- as: 'note',
- constraints: false,
- onDelete: 'CASCADE',
- hooks: true
- })
- Author.belongsTo(models.User, {
- foreignKey: 'userId',
- as: 'user',
- constraints: false,
- onDelete: 'CASCADE',
- hooks: true
- })
- }
- }
+ ]
})
+
+ Author.associate = function (models) {
+ Author.belongsTo(models.Note, {
+ foreignKey: 'noteId',
+ as: 'note',
+ constraints: false,
+ onDelete: 'CASCADE',
+ hooks: true
+ })
+ Author.belongsTo(models.User, {
+ foreignKey: 'userId',
+ as: 'user',
+ constraints: false,
+ onDelete: 'CASCADE',
+ hooks: true
+ })
+ }
+
return Author
}
diff --git a/lib/models/index.js b/lib/models/index.js
index ef70475e..88c1b168 100644
--- a/lib/models/index.js
+++ b/lib/models/index.js
@@ -3,14 +3,16 @@
var fs = require('fs')
var path = require('path')
var Sequelize = require('sequelize')
-const {cloneDeep} = require('lodash')
+const { cloneDeep } = require('lodash')
// core
var config = require('../config')
var logger = require('../logger')
var dbconfig = cloneDeep(config.db)
-dbconfig.logging = config.debug ? logger.info : false
+dbconfig.logging = config.debug ? (data) => {
+ logger.info(data)
+} : false
var sequelize = null
@@ -39,13 +41,13 @@ sequelize.processData = processData
var db = {}
fs.readdirSync(__dirname)
- .filter(function (file) {
- return (file.indexOf('.') !== 0) && (file !== 'index.js')
- })
- .forEach(function (file) {
- var model = sequelize.import(path.join(__dirname, file))
- db[model.name] = model
- })
+ .filter(function (file) {
+ return (file.indexOf('.') !== 0) && (file !== 'index.js')
+ })
+ .forEach(function (file) {
+ var model = sequelize.import(path.join(__dirname, file))
+ db[model.name] = model
+ })
Object.keys(db).forEach(function (modelName) {
if ('associate' in db[modelName]) {
diff --git a/lib/models/note.js b/lib/models/note.js
index 0e8dd4dd..3a8ccb67 100644
--- a/lib/models/note.js
+++ b/lib/models/note.js
@@ -86,486 +86,492 @@ module.exports = function (sequelize, DataTypes) {
}
}, {
paranoid: false,
- classMethods: {
- associate: function (models) {
- Note.belongsTo(models.User, {
- foreignKey: 'ownerId',
- as: 'owner',
- constraints: false,
- onDelete: 'CASCADE',
- hooks: true
- })
- Note.belongsTo(models.User, {
- foreignKey: 'lastchangeuserId',
- as: 'lastchangeuser',
- constraints: false
- })
- Note.hasMany(models.Revision, {
- foreignKey: 'noteId',
- constraints: false
- })
- Note.hasMany(models.Author, {
- foreignKey: 'noteId',
- as: 'authors',
- constraints: false
+ hooks: {
+ beforeCreate: function (note, options) {
+ return new Promise(function (resolve, reject) {
+ // if no content specified then use default note
+ if (!note.content) {
+ var body = null
+ let filePath = null
+ if (!note.alias) {
+ filePath = config.defaultNotePath
+ } else {
+ filePath = path.join(config.docsPath, note.alias + '.md')
+ }
+ if (Note.checkFileExist(filePath)) {
+ var fsCreatedTime = moment(fs.statSync(filePath).ctime)
+ body = fs.readFileSync(filePath, 'utf8')
+ note.title = Note.parseNoteTitle(body)
+ note.content = body
+ if (filePath !== config.defaultNotePath) {
+ note.createdAt = fsCreatedTime
+ }
+ }
+ }
+ // if no permission specified and have owner then give default permission in config, else default permission is freely
+ if (!note.permission) {
+ if (note.ownerId) {
+ note.permission = config.defaultPermission
+ } else {
+ note.permission = 'freely'
+ }
+ }
+ return resolve(note)
})
},
- checkFileExist: function (filePath) {
- try {
- return fs.statSync(filePath).isFile()
- } catch (err) {
- return false
- }
- },
- encodeNoteId: function (id) {
- // remove dashes in UUID and encode in url-safe base64
- let str = id.replace(/-/g, '')
- let hexStr = Buffer.from(str, 'hex')
- return base64url.encode(hexStr)
- },
- decodeNoteId: function (encodedId) {
- // decode from url-safe base64
- let id = base64url.toBuffer(encodedId).toString('hex')
- // add dashes between the UUID string parts
- let idParts = []
- idParts.push(id.substr(0, 8))
- idParts.push(id.substr(8, 4))
- idParts.push(id.substr(12, 4))
- idParts.push(id.substr(16, 4))
- idParts.push(id.substr(20, 12))
- return idParts.join('-')
- },
- checkNoteIdValid: function (id) {
- var uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i
- var result = id.match(uuidRegex)
- if (result && result.length === 1) { return true } else { return false }
- },
- parseNoteId: function (noteId, callback) {
- async.series({
- parseNoteIdByAlias: function (_callback) {
- // try to parse note id by alias (e.g. doc)
- Note.findOne({
- where: {
- alias: noteId
- }
- }).then(function (note) {
- if (note) {
- let filePath = path.join(config.docsPath, noteId + '.md')
- if (Note.checkFileExist(filePath)) {
- // if doc in filesystem have newer modified time than last change time
- // then will update the doc in db
- var fsModifiedTime = moment(fs.statSync(filePath).mtime)
- var dbModifiedTime = moment(note.lastchangeAt || note.createdAt)
- var body = fs.readFileSync(filePath, 'utf8')
- var contentLength = body.length
- var title = Note.parseNoteTitle(body)
- if (fsModifiedTime.isAfter(dbModifiedTime) && note.content !== body) {
+ afterCreate: function (note, options, callback) {
+ return new Promise(function (resolve, reject) {
+ sequelize.models.Revision.saveNoteRevision(note, function (err, revision) {
+ if (err) {
+ return reject(err)
+ }
+ return resolve(note)
+ })
+ })
+ }
+ }
+ })
+
+ Note.associate = function (models) {
+ Note.belongsTo(models.User, {
+ foreignKey: 'ownerId',
+ as: 'owner',
+ constraints: false,
+ onDelete: 'CASCADE',
+ hooks: true
+ })
+ Note.belongsTo(models.User, {
+ foreignKey: 'lastchangeuserId',
+ as: 'lastchangeuser',
+ constraints: false
+ })
+ Note.hasMany(models.Revision, {
+ foreignKey: 'noteId',
+ constraints: false
+ })
+ Note.hasMany(models.Author, {
+ foreignKey: 'noteId',
+ as: 'authors',
+ constraints: false
+ })
+ }
+ Note.checkFileExist = function (filePath) {
+ try {
+ return fs.statSync(filePath).isFile()
+ } catch (err) {
+ return false
+ }
+ }
+ Note.encodeNoteId = function (id) {
+ // remove dashes in UUID and encode in url-safe base64
+ let str = id.replace(/-/g, '')
+ let hexStr = Buffer.from(str, 'hex')
+ return base64url.encode(hexStr)
+ }
+ Note.decodeNoteId = function (encodedId) {
+ // decode from url-safe base64
+ let id = base64url.toBuffer(encodedId).toString('hex')
+ // add dashes between the UUID string parts
+ let idParts = []
+ idParts.push(id.substr(0, 8))
+ idParts.push(id.substr(8, 4))
+ idParts.push(id.substr(12, 4))
+ idParts.push(id.substr(16, 4))
+ idParts.push(id.substr(20, 12))
+ return idParts.join('-')
+ }
+ Note.checkNoteIdValid = function (id) {
+ var uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i
+ var result = id.match(uuidRegex)
+ if (result && result.length === 1) { return true } else { return false }
+ }
+ Note.parseNoteId = function (noteId, callback) {
+ async.series({
+ parseNoteIdByAlias: function (_callback) {
+ // try to parse note id by alias (e.g. doc)
+ Note.findOne({
+ where: {
+ alias: noteId
+ }
+ }).then(function (note) {
+ if (note) {
+ let filePath = path.join(config.docsPath, noteId + '.md')
+ if (Note.checkFileExist(filePath)) {
+ // if doc in filesystem have newer modified time than last change time
+ // then will update the doc in db
+ var fsModifiedTime = moment(fs.statSync(filePath).mtime)
+ var dbModifiedTime = moment(note.lastchangeAt || note.createdAt)
+ var body = fs.readFileSync(filePath, 'utf8')
+ var contentLength = body.length
+ var title = Note.parseNoteTitle(body)
+ if (fsModifiedTime.isAfter(dbModifiedTime) && note.content !== body) {
+ note.update({
+ title: title,
+ content: body,
+ lastchangeAt: fsModifiedTime
+ }).then(function (note) {
+ sequelize.models.Revision.saveNoteRevision(note, function (err, revision) {
+ if (err) return _callback(err, null)
+ // update authorship on after making revision of docs
+ var patch = dmp.patch_fromText(revision.patch)
+ var operations = Note.transformPatchToOperations(patch, contentLength)
+ var authorship = note.authorship
+ for (let i = 0; i < operations.length; i++) {
+ authorship = Note.updateAuthorshipByOperation(operations[i], null, authorship)
+ }
note.update({
- title: title,
- content: body,
- lastchangeAt: fsModifiedTime
+ authorship: authorship
}).then(function (note) {
- sequelize.models.Revision.saveNoteRevision(note, function (err, revision) {
- if (err) return _callback(err, null)
- // update authorship on after making revision of docs
- var patch = dmp.patch_fromText(revision.patch)
- var operations = Note.transformPatchToOperations(patch, contentLength)
- var authorship = note.authorship
- for (let i = 0; i < operations.length; i++) {
- authorship = Note.updateAuthorshipByOperation(operations[i], null, authorship)
- }
- note.update({
- authorship: authorship
- }).then(function (note) {
- return callback(null, note.id)
- }).catch(function (err) {
- return _callback(err, null)
- })
- })
+ return callback(null, note.id)
}).catch(function (err) {
return _callback(err, null)
})
- } else {
- return callback(null, note.id)
- }
- } else {
- return callback(null, note.id)
- }
- } else {
- var filePath = path.join(config.docsPath, noteId + '.md')
- if (Note.checkFileExist(filePath)) {
- Note.create({
- alias: noteId,
- owner: null,
- permission: 'locked'
- }).then(function (note) {
- return callback(null, note.id)
- }).catch(function (err) {
- return _callback(err, null)
})
- } else {
- return _callback(null, null)
- }
- }
- }).catch(function (err) {
- return _callback(err, null)
- })
- },
- // parse note id by LZString is deprecated, here for compability
- parseNoteIdByLZString: function (_callback) {
- // Calculate minimal string length for an UUID that is encoded
- // base64 encoded and optimize comparsion by using -1
- // this should make a lot of LZ-String parsing errors obsolete
- // as we can assume that a nodeId that is 48 chars or longer is a
- // noteID.
- const base64UuidLength = ((4 * 36) / 3) - 1
- if (!(noteId.length > base64UuidLength)) {
- return _callback(null, null)
- }
- // try to parse note id by LZString Base64
- try {
- var id = LZString.decompressFromBase64(noteId)
- if (id && Note.checkNoteIdValid(id)) { return callback(null, id) } else { return _callback(null, null) }
- } catch (err) {
- if (err.message === 'Cannot read property \'charAt\' of undefined') {
- logger.warning('Looks like we can not decode "' + noteId + '" with LZString. Can be ignored.')
- } else {
- logger.error(err)
- }
- return _callback(null, null)
- }
- },
- parseNoteIdByBase64Url: function (_callback) {
- // try to parse note id by base64url
- try {
- var id = Note.decodeNoteId(noteId)
- if (id && Note.checkNoteIdValid(id)) { return callback(null, id) } else { return _callback(null, null) }
- } catch (err) {
- logger.error(err)
- return _callback(null, null)
- }
- },
- parseNoteIdByShortId: function (_callback) {
- // try to parse note id by shortId
- try {
- if (shortId.isValid(noteId)) {
- Note.findOne({
- where: {
- shortid: noteId
- }
- }).then(function (note) {
- if (!note) return _callback(null, null)
- return callback(null, note.id)
}).catch(function (err) {
return _callback(err, null)
})
} else {
- return _callback(null, null)
+ return callback(null, note.id)
}
- } catch (err) {
- return _callback(err, null)
+ } else {
+ return callback(null, note.id)
+ }
+ } else {
+ var filePath = path.join(config.docsPath, noteId + '.md')
+ if (Note.checkFileExist(filePath)) {
+ Note.create({
+ alias: noteId,
+ owner: null,
+ permission: 'locked'
+ }).then(function (note) {
+ return callback(null, note.id)
+ }).catch(function (err) {
+ return _callback(err, null)
+ })
+ } else {
+ return _callback(null, null)
}
}
- }, function (err, result) {
- if (err) {
- logger.error(err)
- return callback(err, null)
- }
- return callback(null, null)
+ }).catch(function (err) {
+ return _callback(err, null)
})
},
- parseNoteInfo: function (body) {
- var parsed = Note.extractMeta(body)
- var $ = cheerio.load(md.render(parsed.markdown))
- return {
- title: Note.extractNoteTitle(parsed.meta, $),
- tags: Note.extractNoteTags(parsed.meta, $)
+ // parse note id by LZString is deprecated, here for compability
+ parseNoteIdByLZString: function (_callback) {
+ // Calculate minimal string length for an UUID that is encoded
+ // base64 encoded and optimize comparsion by using -1
+ // this should make a lot of LZ-String parsing errors obsolete
+ // as we can assume that a nodeId that is 48 chars or longer is a
+ // noteID.
+ const base64UuidLength = ((4 * 36) / 3) - 1
+ if (!(noteId.length > base64UuidLength)) {
+ return _callback(null, null)
}
- },
- parseNoteTitle: function (body) {
- var parsed = Note.extractMeta(body)
- var $ = cheerio.load(md.render(parsed.markdown))
- return Note.extractNoteTitle(parsed.meta, $)
- },
- extractNoteTitle: function (meta, $) {
- var title = ''
- if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) {
- title = meta.title
- } else {
- var h1s = $('h1')
- if (h1s.length > 0 && h1s.first().text().split('\n').length === 1) { title = S(h1s.first().text()).stripTags().s }
- }
- if (!title) title = 'Untitled'
- return title
- },
- generateDescription: function (markdown) {
- return markdown.substr(0, 100).replace(/(?:\r\n|\r|\n)/g, ' ')
- },
- decodeTitle: function (title) {
- return title || 'Untitled'
- },
- generateWebTitle: function (title) {
- title = !title || title === 'Untitled' ? 'CodiMD - Collaborative markdown notes' : title + ' - CodiMD'
- return title
- },
- extractNoteTags: function (meta, $) {
- var tags = []
- var rawtags = []
- if (meta.tags && (typeof meta.tags === 'string' || typeof meta.tags === 'number')) {
- var metaTags = ('' + meta.tags).split(',')
- for (let i = 0; i < metaTags.length; i++) {
- var text = metaTags[i].trim()
- if (text) rawtags.push(text)
+ // try to parse note id by LZString Base64
+ try {
+ var id = LZString.decompressFromBase64(noteId)
+ if (id && Note.checkNoteIdValid(id)) { return callback(null, id) } else { return _callback(null, null) }
+ } catch (err) {
+ if (err.message === 'Cannot read property \'charAt\' of undefined') {
+ logger.warning('Looks like we can not decode "' + noteId + '" with LZString. Can be ignored.')
+ } else {
+ logger.error(err)
}
- } else {
- var h6s = $('h6')
- h6s.each(function (key, value) {
- if (/^tags/gmi.test($(value).text())) {
- var codes = $(value).find('code')
- for (let i = 0; i < codes.length; i++) {
- var text = S($(codes[i]).text().trim()).stripTags().s
- if (text) rawtags.push(text)
- }
- }
- })
+ return _callback(null, null)
}
- for (let i = 0; i < rawtags.length; i++) {
- var found = false
- for (let j = 0; j < tags.length; j++) {
- if (tags[j] === rawtags[i]) {
- found = true
- break
- }
- }
- if (!found) { tags.push(rawtags[i]) }
+ },
+ parseNoteIdByBase64Url: function (_callback) {
+ // try to parse note id by base64url
+ try {
+ var id = Note.decodeNoteId(noteId)
+ if (id && Note.checkNoteIdValid(id)) { return callback(null, id) } else { return _callback(null, null) }
+ } catch (err) {
+ logger.error(err)
+ return _callback(null, null)
}
- return tags
},
- extractMeta: function (content) {
- var obj = null
+ parseNoteIdByShortId: function (_callback) {
+ // try to parse note id by shortId
try {
- obj = metaMarked(content)
- if (!obj.markdown) obj.markdown = ''
- if (!obj.meta) obj.meta = {}
+ if (shortId.isValid(noteId)) {
+ Note.findOne({
+ where: {
+ shortid: noteId
+ }
+ }).then(function (note) {
+ if (!note) return _callback(null, null)
+ return callback(null, note.id)
+ }).catch(function (err) {
+ return _callback(err, null)
+ })
+ } else {
+ return _callback(null, null)
+ }
} catch (err) {
- obj = {
- markdown: content,
- meta: {}
+ return _callback(err, null)
+ }
+ }
+ }, function (err, result) {
+ if (err) {
+ logger.error(err)
+ return callback(err, null)
+ }
+ return callback(null, null)
+ })
+ }
+ Note.parseNoteInfo = function (body) {
+ var parsed = Note.extractMeta(body)
+ var $ = cheerio.load(md.render(parsed.markdown))
+ return {
+ title: Note.extractNoteTitle(parsed.meta, $),
+ tags: Note.extractNoteTags(parsed.meta, $)
+ }
+ }
+ Note.parseNoteTitle = function (body) {
+ var parsed = Note.extractMeta(body)
+ var $ = cheerio.load(md.render(parsed.markdown))
+ return Note.extractNoteTitle(parsed.meta, $)
+ }
+ Note.extractNoteTitle = function (meta, $) {
+ var title = ''
+ if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) {
+ title = meta.title
+ } else {
+ var h1s = $('h1')
+ if (h1s.length > 0 && h1s.first().text().split('\n').length === 1) { title = S(h1s.first().text()).stripTags().s }
+ }
+ if (!title) title = 'Untitled'
+ return title
+ }
+ Note.generateDescription = function (markdown) {
+ return markdown.substr(0, 100).replace(/(?:\r\n|\r|\n)/g, ' ')
+ }
+ Note.decodeTitle = function (title) {
+ return title || 'Untitled'
+ }
+ Note.generateWebTitle = function (title) {
+ title = !title || title === 'Untitled' ? 'CodiMD - Collaborative markdown notes' : title + ' - CodiMD'
+ return title
+ }
+ Note.extractNoteTags = function (meta, $) {
+ var tags = []
+ var rawtags = []
+ if (meta.tags && (typeof meta.tags === 'string' || typeof meta.tags === 'number')) {
+ var metaTags = ('' + meta.tags).split(',')
+ for (let i = 0; i < metaTags.length; i++) {
+ var text = metaTags[i].trim()
+ if (text) rawtags.push(text)
+ }
+ } else {
+ var h6s = $('h6')
+ h6s.each(function (key, value) {
+ if (/^tags/gmi.test($(value).text())) {
+ var codes = $(value).find('code')
+ for (let i = 0; i < codes.length; i++) {
+ var text = S($(codes[i]).text().trim()).stripTags().s
+ if (text) rawtags.push(text)
}
}
- return obj
- },
- parseMeta: function (meta) {
- var _meta = {}
- if (meta) {
- if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) { _meta.title = meta.title }
- if (meta.description && (typeof meta.description === 'string' || typeof meta.description === 'number')) { _meta.description = meta.description }
- if (meta.robots && (typeof meta.robots === 'string' || typeof meta.robots === 'number')) { _meta.robots = meta.robots }
- if (meta.GA && (typeof meta.GA === 'string' || typeof meta.GA === 'number')) { _meta.GA = meta.GA }
- if (meta.disqus && (typeof meta.disqus === 'string' || typeof meta.disqus === 'number')) { _meta.disqus = meta.disqus }
- if (meta.slideOptions && (typeof meta.slideOptions === 'object')) { _meta.slideOptions = meta.slideOptions }
+ })
+ }
+ for (let i = 0; i < rawtags.length; i++) {
+ var found = false
+ for (let j = 0; j < tags.length; j++) {
+ if (tags[j] === rawtags[i]) {
+ found = true
+ break
}
- return _meta
- },
- updateAuthorshipByOperation: function (operation, userId, authorships) {
- var index = 0
- var timestamp = Date.now()
- for (let i = 0; i < operation.length; i++) {
- var op = operation[i]
- if (ot.TextOperation.isRetain(op)) {
- index += op
- } else if (ot.TextOperation.isInsert(op)) {
- let opStart = index
- let opEnd = index + op.length
- var inserted = false
- // authorship format: [userId, startPos, endPos, createdAt, updatedAt]
- if (authorships.length <= 0) authorships.push([userId, opStart, opEnd, timestamp, timestamp])
- else {
- for (let j = 0; j < authorships.length; j++) {
- let authorship = authorships[j]
- if (!inserted) {
- let nextAuthorship = authorships[j + 1] || -1
- if ((nextAuthorship !== -1 && nextAuthorship[1] >= opEnd) || j >= authorships.length - 1) {
- if (authorship[1] < opStart && authorship[2] > opStart) {
- // divide
- let postLength = authorship[2] - opStart
- authorship[2] = opStart
- authorship[4] = timestamp
- authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
- authorships.splice(j + 2, 0, [authorship[0], opEnd, opEnd + postLength, authorship[3], timestamp])
- j += 2
- inserted = true
- } else if (authorship[1] >= opStart) {
- authorships.splice(j, 0, [userId, opStart, opEnd, timestamp, timestamp])
- j += 1
- inserted = true
- } else if (authorship[2] <= opStart) {
- authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
- j += 1
- inserted = true
- }
- }
- }
- if (authorship[1] >= opStart) {
- authorship[1] += op.length
- authorship[2] += op.length
- }
- }
- }
- index += op.length
- } else if (ot.TextOperation.isDelete(op)) {
- let opStart = index
- let opEnd = index - op
- if (operation.length === 1) {
- authorships = []
- } else if (authorships.length > 0) {
- for (let j = 0; j < authorships.length; j++) {
- let authorship = authorships[j]
- if (authorship[1] >= opStart && authorship[1] <= opEnd && authorship[2] >= opStart && authorship[2] <= opEnd) {
- authorships.splice(j, 1)
- j -= 1
- } else if (authorship[1] < opStart && authorship[1] < opEnd && authorship[2] > opStart && authorship[2] > opEnd) {
- authorship[2] += op
- authorship[4] = timestamp
- } else if (authorship[2] >= opStart && authorship[2] <= opEnd) {
+ }
+ if (!found) { tags.push(rawtags[i]) }
+ }
+ return tags
+ }
+ Note.extractMeta = function (content) {
+ var obj = null
+ try {
+ obj = metaMarked(content)
+ if (!obj.markdown) obj.markdown = ''
+ if (!obj.meta) obj.meta = {}
+ } catch (err) {
+ obj = {
+ markdown: content,
+ meta: {}
+ }
+ }
+ return obj
+ }
+ Note.parseMeta = function (meta) {
+ var _meta = {}
+ if (meta) {
+ if (meta.title && (typeof meta.title === 'string' || typeof meta.title === 'number')) { _meta.title = meta.title }
+ if (meta.description && (typeof meta.description === 'string' || typeof meta.description === 'number')) { _meta.description = meta.description }
+ if (meta.robots && (typeof meta.robots === 'string' || typeof meta.robots === 'number')) { _meta.robots = meta.robots }
+ if (meta.GA && (typeof meta.GA === 'string' || typeof meta.GA === 'number')) { _meta.GA = meta.GA }
+ if (meta.disqus && (typeof meta.disqus === 'string' || typeof meta.disqus === 'number')) { _meta.disqus = meta.disqus }
+ if (meta.slideOptions && (typeof meta.slideOptions === 'object')) { _meta.slideOptions = meta.slideOptions }
+ }
+ return _meta
+ }
+ Note.updateAuthorshipByOperation = function (operation, userId, authorships) {
+ var index = 0
+ var timestamp = Date.now()
+ for (let i = 0; i < operation.length; i++) {
+ var op = operation[i]
+ if (ot.TextOperation.isRetain(op)) {
+ index += op
+ } else if (ot.TextOperation.isInsert(op)) {
+ let opStart = index
+ let opEnd = index + op.length
+ var inserted = false
+ // authorship format: [userId, startPos, endPos, createdAt, updatedAt]
+ if (authorships.length <= 0) authorships.push([userId, opStart, opEnd, timestamp, timestamp])
+ else {
+ for (let j = 0; j < authorships.length; j++) {
+ let authorship = authorships[j]
+ if (!inserted) {
+ let nextAuthorship = authorships[j + 1] || -1
+ if ((nextAuthorship !== -1 && nextAuthorship[1] >= opEnd) || j >= authorships.length - 1) {
+ if (authorship[1] < opStart && authorship[2] > opStart) {
+ // divide
+ let postLength = authorship[2] - opStart
authorship[2] = opStart
authorship[4] = timestamp
- } else if (authorship[1] >= opStart && authorship[1] <= opEnd) {
- authorship[1] = opEnd
- authorship[4] = timestamp
- }
- if (authorship[1] >= opEnd) {
- authorship[1] += op
- authorship[2] += op
+ authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
+ authorships.splice(j + 2, 0, [authorship[0], opEnd, opEnd + postLength, authorship[3], timestamp])
+ j += 2
+ inserted = true
+ } else if (authorship[1] >= opStart) {
+ authorships.splice(j, 0, [userId, opStart, opEnd, timestamp, timestamp])
+ j += 1
+ inserted = true
+ } else if (authorship[2] <= opStart) {
+ authorships.splice(j + 1, 0, [userId, opStart, opEnd, timestamp, timestamp])
+ j += 1
+ inserted = true
}
}
}
- index += op
- }
- }
- // merge
- for (let j = 0; j < authorships.length; j++) {
- let authorship = authorships[j]
- for (let k = j + 1; k < authorships.length; k++) {
- let nextAuthorship = authorships[k]
- if (nextAuthorship && authorship[0] === nextAuthorship[0] && authorship[2] === nextAuthorship[1]) {
- let minTimestamp = Math.min(authorship[3], nextAuthorship[3])
- let maxTimestamp = Math.max(authorship[3], nextAuthorship[3])
- authorships.splice(j, 1, [authorship[0], authorship[1], nextAuthorship[2], minTimestamp, maxTimestamp])
- authorships.splice(k, 1)
- j -= 1
- break
+ if (authorship[1] >= opStart) {
+ authorship[1] += op.length
+ authorship[2] += op.length
}
}
}
- // clear
- for (let j = 0; j < authorships.length; j++) {
- let authorship = authorships[j]
- if (!authorship[0]) {
- authorships.splice(j, 1)
- j -= 1
- }
- }
- return authorships
- },
- transformPatchToOperations: function (patch, contentLength) {
- var operations = []
- if (patch.length > 0) {
- // calculate original content length
- for (let j = patch.length - 1; j >= 0; j--) {
- var p = patch[j]
- for (let i = 0; i < p.diffs.length; i++) {
- var diff = p.diffs[i]
- switch (diff[0]) {
- case 1: // insert
- contentLength -= diff[1].length
- break
- case -1: // delete
- contentLength += diff[1].length
- break
- }
+ index += op.length
+ } else if (ot.TextOperation.isDelete(op)) {
+ let opStart = index
+ let opEnd = index - op
+ if (operation.length === 1) {
+ authorships = []
+ } else if (authorships.length > 0) {
+ for (let j = 0; j < authorships.length; j++) {
+ let authorship = authorships[j]
+ if (authorship[1] >= opStart && authorship[1] <= opEnd && authorship[2] >= opStart && authorship[2] <= opEnd) {
+ authorships.splice(j, 1)
+ j -= 1
+ } else if (authorship[1] < opStart && authorship[1] < opEnd && authorship[2] > opStart && authorship[2] > opEnd) {
+ authorship[2] += op
+ authorship[4] = timestamp
+ } else if (authorship[2] >= opStart && authorship[2] <= opEnd) {
+ authorship[2] = opStart
+ authorship[4] = timestamp
+ } else if (authorship[1] >= opStart && authorship[1] <= opEnd) {
+ authorship[1] = opEnd
+ authorship[4] = timestamp
}
- }
- // generate operations
- var bias = 0
- var lengthBias = 0
- for (let j = 0; j < patch.length; j++) {
- var operation = []
- let p = patch[j]
- var currIndex = p.start1
- var currLength = contentLength - bias
- for (let i = 0; i < p.diffs.length; i++) {
- let diff = p.diffs[i]
- switch (diff[0]) {
- case 0: // retain
- if (i === 0) {
- // first
- operation.push(currIndex + diff[1].length)
- } else if (i !== p.diffs.length - 1) {
- // mid
- operation.push(diff[1].length)
- } else {
- // last
- operation.push(currLength + lengthBias - currIndex)
- }
- currIndex += diff[1].length
- break
- case 1: // insert
- operation.push(diff[1])
- lengthBias += diff[1].length
- currIndex += diff[1].length
- break
- case -1: // delete
- operation.push(-diff[1].length)
- bias += diff[1].length
- currIndex += diff[1].length
- break
- }
+ if (authorship[1] >= opEnd) {
+ authorship[1] += op
+ authorship[2] += op
}
- operations.push(operation)
}
}
- return operations
+ index += op
}
- },
- hooks: {
- beforeCreate: function (note, options, callback) {
- // if no content specified then use default note
- if (!note.content) {
- var body = null
- let filePath = null
- if (!note.alias) {
- filePath = config.defaultNotePath
- } else {
- filePath = path.join(config.docsPath, note.alias + '.md')
- }
- if (Note.checkFileExist(filePath)) {
- var fsCreatedTime = moment(fs.statSync(filePath).ctime)
- body = fs.readFileSync(filePath, 'utf8')
- note.title = Note.parseNoteTitle(body)
- note.content = body
- if (filePath !== config.defaultNotePath) {
- note.createdAt = fsCreatedTime
- }
+ }
+ // merge
+ for (let j = 0; j < authorships.length; j++) {
+ let authorship = authorships[j]
+ for (let k = j + 1; k < authorships.length; k++) {
+ let nextAuthorship = authorships[k]
+ if (nextAuthorship && authorship[0] === nextAuthorship[0] && authorship[2] === nextAuthorship[1]) {
+ let minTimestamp = Math.min(authorship[3], nextAuthorship[3])
+ let maxTimestamp = Math.max(authorship[3], nextAuthorship[3])
+ authorships.splice(j, 1, [authorship[0], authorship[1], nextAuthorship[2], minTimestamp, maxTimestamp])
+ authorships.splice(k, 1)
+ j -= 1
+ break
+ }
+ }
+ }
+ // clear
+ for (let j = 0; j < authorships.length; j++) {
+ let authorship = authorships[j]
+ if (!authorship[0]) {
+ authorships.splice(j, 1)
+ j -= 1
+ }
+ }
+ return authorships
+ }
+ Note.transformPatchToOperations = function (patch, contentLength) {
+ var operations = []
+ if (patch.length > 0) {
+ // calculate original content length
+ for (let j = patch.length - 1; j >= 0; j--) {
+ var p = patch[j]
+ for (let i = 0; i < p.diffs.length; i++) {
+ var diff = p.diffs[i]
+ switch (diff[0]) {
+ case 1: // insert
+ contentLength -= diff[1].length
+ break
+ case -1: // delete
+ contentLength += diff[1].length
+ break
}
}
- // if no permission specified and have owner then give default permission in config, else default permission is freely
- if (!note.permission) {
- if (note.ownerId) {
- note.permission = config.defaultPermission
- } else {
- note.permission = 'freely'
+ }
+ // generate operations
+ var bias = 0
+ var lengthBias = 0
+ for (let j = 0; j < patch.length; j++) {
+ var operation = []
+ let p = patch[j]
+ var currIndex = p.start1
+ var currLength = contentLength - bias
+ for (let i = 0; i < p.diffs.length; i++) {
+ let diff = p.diffs[i]
+ switch (diff[0]) {
+ case 0: // retain
+ if (i === 0) {
+ // first
+ operation.push(currIndex + diff[1].length)
+ } else if (i !== p.diffs.length - 1) {
+ // mid
+ operation.push(diff[1].length)
+ } else {
+ // last
+ operation.push(currLength + lengthBias - currIndex)
+ }
+ currIndex += diff[1].length
+ break
+ case 1: // insert
+ operation.push(diff[1])
+ lengthBias += diff[1].length
+ currIndex += diff[1].length
+ break
+ case -1: // delete
+ operation.push(-diff[1].length)
+ bias += diff[1].length
+ currIndex += diff[1].length
+ break
}
}
- return callback(null, note)
- },
- afterCreate: function (note, options, callback) {
- sequelize.models.Revision.saveNoteRevision(note, function (err, revision) {
- callback(err, note)
- })
+ operations.push(operation)
}
}
- })
+ return operations
+ }
return Note
}
diff --git a/lib/models/revision.js b/lib/models/revision.js
index 4ee080da..dbd76e4e 100644
--- a/lib/models/revision.js
+++ b/lib/models/revision.js
@@ -7,8 +7,9 @@ var childProcess = require('child_process')
var shortId = require('shortid')
var path = require('path')
+var Op = Sequelize.Op
+
// core
-var config = require('../config')
var logger = require('../logger')
var dmpWorker = createDmpWorker()
@@ -18,7 +19,7 @@ function createDmpWorker () {
var worker = childProcess.fork(path.resolve(__dirname, '../workers/dmpWorker.js'), {
stdio: 'ignore'
})
- if (config.debug) logger.info('dmp worker process started')
+ logger.debug('dmp worker process started')
worker.on('message', function (data) {
if (!data || !data.msg || !data.cacheKey) {
return logger.error('dmp worker error: not enough data on message')
@@ -36,7 +37,7 @@ function createDmpWorker () {
})
worker.on('close', function (code) {
dmpWorker = null
- if (config.debug) logger.info('dmp worker process exited with code ' + code)
+ logger.debug(`dmp worker process exited with code ${code}`)
})
return worker
}
@@ -97,214 +98,212 @@ module.exports = function (sequelize, DataTypes) {
this.setDataValue('authorship', value ? JSON.stringify(value) : value)
}
}
- }, {
- classMethods: {
- associate: function (models) {
- Revision.belongsTo(models.Note, {
- foreignKey: 'noteId',
- as: 'note',
- constraints: false,
- onDelete: 'CASCADE',
- hooks: true
- })
- },
- getNoteRevisions: function (note, callback) {
- Revision.findAll({
- where: {
- noteId: note.id
- },
- order: [['createdAt', 'DESC']]
- }).then(function (revisions) {
- var data = []
- for (var i = 0, l = revisions.length; i < l; i++) {
- var revision = revisions[i]
- data.push({
- time: moment(revision.createdAt).valueOf(),
- length: revision.length
- })
- }
- callback(null, data)
- }).catch(function (err) {
- callback(err, null)
- })
+ })
+
+ Revision.associate = function (models) {
+ Revision.belongsTo(models.Note, {
+ foreignKey: 'noteId',
+ as: 'note',
+ constraints: false,
+ onDelete: 'CASCADE',
+ hooks: true
+ })
+ }
+ Revision.getNoteRevisions = function (note, callback) {
+ Revision.findAll({
+ where: {
+ noteId: note.id
},
- getPatchedNoteRevisionByTime: function (note, time, callback) {
- // find all revisions to prepare for all possible calculation
- Revision.findAll({
- where: {
- noteId: note.id
- },
- order: [['createdAt', 'DESC']]
- }).then(function (revisions) {
- if (revisions.length <= 0) return callback(null, null)
- // measure target revision position
- Revision.count({
- where: {
- noteId: note.id,
- createdAt: {
- $gte: time
- }
- },
- order: [['createdAt', 'DESC']]
- }).then(function (count) {
- if (count <= 0) return callback(null, null)
- sendDmpWorker({
- msg: 'get revision',
- revisions: revisions,
- count: count
- }, callback)
- }).catch(function (err) {
- return callback(err, null)
- })
- }).catch(function (err) {
- return callback(err, null)
+ order: [['createdAt', 'DESC']]
+ }).then(function (revisions) {
+ var data = []
+ for (var i = 0, l = revisions.length; i < l; i++) {
+ var revision = revisions[i]
+ data.push({
+ time: moment(revision.createdAt).valueOf(),
+ length: revision.length
})
+ }
+ callback(null, data)
+ }).catch(function (err) {
+ callback(err, null)
+ })
+ }
+ Revision.getPatchedNoteRevisionByTime = function (note, time, callback) {
+ // find all revisions to prepare for all possible calculation
+ Revision.findAll({
+ where: {
+ noteId: note.id
},
- checkAllNotesRevision: function (callback) {
- Revision.saveAllNotesRevision(function (err, notes) {
- if (err) return callback(err, null)
- if (!notes || notes.length <= 0) {
- return callback(null, notes)
- } else {
- Revision.checkAllNotesRevision(callback)
+ order: [['createdAt', 'DESC']]
+ }).then(function (revisions) {
+ if (revisions.length <= 0) return callback(null, null)
+ // measure target revision position
+ Revision.count({
+ where: {
+ noteId: note.id,
+ createdAt: {
+ [Op.gte]: time
}
- })
- },
- saveAllNotesRevision: function (callback) {
- sequelize.models.Note.findAll({
- // query all notes that need to save for revision
- where: {
- $and: [
- {
- lastchangeAt: {
- $or: {
- $eq: null,
- $and: {
- $ne: null,
- $gt: sequelize.col('createdAt')
- }
- }
- }
- },
- {
- savedAt: {
- $or: {
- $eq: null,
- $lt: sequelize.col('lastchangeAt')
- }
+ },
+ order: [['createdAt', 'DESC']]
+ }).then(function (count) {
+ if (count <= 0) return callback(null, null)
+ sendDmpWorker({
+ msg: 'get revision',
+ revisions: revisions,
+ count: count
+ }, callback)
+ }).catch(function (err) {
+ return callback(err, null)
+ })
+ }).catch(function (err) {
+ return callback(err, null)
+ })
+ }
+ Revision.checkAllNotesRevision = function (callback) {
+ Revision.saveAllNotesRevision(function (err, notes) {
+ if (err) return callback(err, null)
+ if (!notes || notes.length <= 0) {
+ return callback(null, notes)
+ } else {
+ Revision.checkAllNotesRevision(callback)
+ }
+ })
+ }
+ Revision.saveAllNotesRevision = function (callback) {
+ sequelize.models.Note.findAll({
+ // query all notes that need to save for revision
+ where: {
+ [Op.and]: [
+ {
+ lastchangeAt: {
+ [Op.or]: {
+ [Op.eq]: null,
+ [Op.and]: {
+ [Op.ne]: null,
+ [Op.gt]: sequelize.col('createdAt')
}
}
- ]
- }
- }).then(function (notes) {
- if (notes.length <= 0) return callback(null, notes)
- var savedNotes = []
- async.each(notes, function (note, _callback) {
- // revision saving policy: note not been modified for 5 mins or not save for 10 mins
- if (note.lastchangeAt && note.savedAt) {
- var lastchangeAt = moment(note.lastchangeAt)
- var savedAt = moment(note.savedAt)
- if (moment().isAfter(lastchangeAt.add(5, 'minutes'))) {
- savedNotes.push(note)
- Revision.saveNoteRevision(note, _callback)
- } else if (lastchangeAt.isAfter(savedAt.add(10, 'minutes'))) {
- savedNotes.push(note)
- Revision.saveNoteRevision(note, _callback)
- } else {
- return _callback(null, null)
- }
- } else {
- savedNotes.push(note)
- Revision.saveNoteRevision(note, _callback)
}
- }, function (err) {
- if (err) {
- return callback(err, null)
+ },
+ {
+ savedAt: {
+ [Op.or]: {
+ [Op.eq]: null,
+ [Op.lt]: sequelize.col('lastchangeAt')
+ }
}
- // return null when no notes need saving at this moment but have delayed tasks to be done
- var result = ((savedNotes.length === 0) && (notes.length > savedNotes.length)) ? null : savedNotes
- return callback(null, result)
- })
+ }
+ ]
+ }
+ }).then(function (notes) {
+ if (notes.length <= 0) return callback(null, notes)
+ var savedNotes = []
+ async.each(notes, function (note, _callback) {
+ // revision saving policy: note not been modified for 5 mins or not save for 10 mins
+ if (note.lastchangeAt && note.savedAt) {
+ var lastchangeAt = moment(note.lastchangeAt)
+ var savedAt = moment(note.savedAt)
+ if (moment().isAfter(lastchangeAt.add(5, 'minutes'))) {
+ savedNotes.push(note)
+ Revision.saveNoteRevision(note, _callback)
+ } else if (lastchangeAt.isAfter(savedAt.add(10, 'minutes'))) {
+ savedNotes.push(note)
+ Revision.saveNoteRevision(note, _callback)
+ } else {
+ return _callback(null, null)
+ }
+ } else {
+ savedNotes.push(note)
+ Revision.saveNoteRevision(note, _callback)
+ }
+ }, function (err) {
+ if (err) {
+ return callback(err, null)
+ }
+ // return null when no notes need saving at this moment but have delayed tasks to be done
+ var result = ((savedNotes.length === 0) && (notes.length > savedNotes.length)) ? null : savedNotes
+ return callback(null, result)
+ })
+ }).catch(function (err) {
+ return callback(err, null)
+ })
+ }
+ Revision.saveNoteRevision = function (note, callback) {
+ Revision.findAll({
+ where: {
+ noteId: note.id
+ },
+ order: [['createdAt', 'DESC']]
+ }).then(function (revisions) {
+ if (revisions.length <= 0) {
+ // if no revision available
+ Revision.create({
+ noteId: note.id,
+ lastContent: note.content ? note.content : '',
+ length: note.content ? note.content.length : 0,
+ authorship: note.authorship
+ }).then(function (revision) {
+ Revision.finishSaveNoteRevision(note, revision, callback)
}).catch(function (err) {
return callback(err, null)
})
- },
- saveNoteRevision: function (note, callback) {
- Revision.findAll({
- where: {
- noteId: note.id
- },
- order: [['createdAt', 'DESC']]
- }).then(function (revisions) {
- if (revisions.length <= 0) {
- // if no revision available
- Revision.create({
- noteId: note.id,
- lastContent: note.content ? note.content : '',
- length: note.content ? note.content.length : 0,
- authorship: note.authorship
+ } else {
+ var latestRevision = revisions[0]
+ var lastContent = latestRevision.content || latestRevision.lastContent
+ var content = note.content
+ sendDmpWorker({
+ msg: 'create patch',
+ lastDoc: lastContent,
+ currDoc: content
+ }, function (err, patch) {
+ if (err) logger.error('save note revision error', err)
+ if (!patch) {
+ // if patch is empty (means no difference) then just update the latest revision updated time
+ latestRevision.changed('updatedAt', true)
+ latestRevision.update({
+ updatedAt: Date.now()
}).then(function (revision) {
Revision.finishSaveNoteRevision(note, revision, callback)
}).catch(function (err) {
return callback(err, null)
})
} else {
- var latestRevision = revisions[0]
- var lastContent = latestRevision.content || latestRevision.lastContent
- var content = note.content
- sendDmpWorker({
- msg: 'create patch',
- lastDoc: lastContent,
- currDoc: content
- }, function (err, patch) {
- if (err) logger.error('save note revision error', err)
- if (!patch) {
- // if patch is empty (means no difference) then just update the latest revision updated time
- latestRevision.changed('updatedAt', true)
- latestRevision.update({
- updatedAt: Date.now()
- }).then(function (revision) {
- Revision.finishSaveNoteRevision(note, revision, callback)
- }).catch(function (err) {
- return callback(err, null)
- })
- } else {
- Revision.create({
- noteId: note.id,
- patch: patch,
- content: note.content,
- length: note.content.length,
- authorship: note.authorship
- }).then(function (revision) {
- // clear last revision content to reduce db size
- latestRevision.update({
- content: null
- }).then(function () {
- Revision.finishSaveNoteRevision(note, revision, callback)
- }).catch(function (err) {
- return callback(err, null)
- })
- }).catch(function (err) {
- return callback(err, null)
- })
- }
+ Revision.create({
+ noteId: note.id,
+ patch: patch,
+ content: note.content,
+ length: note.content.length,
+ authorship: note.authorship
+ }).then(function (revision) {
+ // clear last revision content to reduce db size
+ latestRevision.update({
+ content: null
+ }).then(function () {
+ Revision.finishSaveNoteRevision(note, revision, callback)
+ }).catch(function (err) {
+ return callback(err, null)
+ })
+ }).catch(function (err) {
+ return callback(err, null)
})
}
- }).catch(function (err) {
- return callback(err, null)
- })
- },
- finishSaveNoteRevision: function (note, revision, callback) {
- note.update({
- savedAt: revision.updatedAt
- }).then(function () {
- return callback(null, revision)
- }).catch(function (err) {
- return callback(err, null)
})
}
- }
- })
+ }).catch(function (err) {
+ return callback(err, null)
+ })
+ }
+ Revision.finishSaveNoteRevision = function (note, revision, callback) {
+ note.update({
+ savedAt: revision.updatedAt
+ }).then(function () {
+ return callback(null, revision)
+ }).catch(function (err) {
+ return callback(err, null)
+ })
+ }
return Revision
}
diff --git a/lib/models/user.js b/lib/models/user.js
index 648db73e..50c78108 100644
--- a/lib/models/user.js
+++ b/lib/models/user.js
@@ -1,11 +1,20 @@
'use strict'
// external modules
-var Sequelize = require('sequelize')
-var scrypt = require('@mlink/scrypt')
+const Sequelize = require('sequelize')
+const crypto = require('crypto')
+if (!crypto.scrypt) {
+ // polyfill for node.js 8.0, see https://github.com/chrisveness/scrypt-kdf#openssl-implementation
+ const scryptAsync = require('scrypt-async')
+ crypto.scrypt = function (password, salt, keylen, options, callback) {
+ const opt = Object.assign({}, options, { dkLen: keylen })
+ scryptAsync(password, salt, opt, (derivedKey) => callback(null, Buffer.from(derivedKey)))
+ }
+}
+const scrypt = require('scrypt-kdf')
// core
-var logger = require('../logger')
-var {generateAvatarURL} = require('../letter-avatars')
+const logger = require('../logger')
+const { generateAvatarURL } = require('../letter-avatars')
module.exports = function (sequelize, DataTypes) {
var User = sequelize.define('User', {
@@ -41,117 +50,125 @@ module.exports = function (sequelize, DataTypes) {
}
},
password: {
- type: Sequelize.TEXT,
- set: function (value) {
- var hash = scrypt.kdfSync(value, scrypt.paramsSync(0.1)).toString('hex')
- this.setDataValue('password', hash)
+ type: Sequelize.TEXT
+ }
+ })
+
+ User.prototype.verifyPassword = function (attempt) {
+ return scrypt.verify(Buffer.from(this.password, 'hex'), attempt)
+ }
+
+ User.associate = function (models) {
+ User.hasMany(models.Note, {
+ foreignKey: 'ownerId',
+ constraints: false
+ })
+ User.hasMany(models.Note, {
+ foreignKey: 'lastchangeuserId',
+ constraints: false
+ })
+ }
+ User.getProfile = function (user) {
+ if (!user) {
+ return null
+ }
+ return user.profile ? User.parseProfile(user.profile) : (user.email ? User.parseProfileByEmail(user.email) : null)
+ }
+ User.parseProfile = function (profile) {
+ try {
+ profile = JSON.parse(profile)
+ } catch (err) {
+ logger.error(err)
+ profile = null
+ }
+ if (profile) {
+ profile = {
+ name: profile.displayName || profile.username,
+ photo: User.parsePhotoByProfile(profile),
+ biggerphoto: User.parsePhotoByProfile(profile, true)
}
}
- }, {
- instanceMethods: {
- verifyPassword: function (attempt) {
- if (scrypt.verifyKdfSync(Buffer.from(this.password, 'hex'), attempt)) {
- return this
+ return profile
+ }
+ User.parsePhotoByProfile = function (profile, bigger) {
+ var photo = null
+ switch (profile.provider) {
+ case 'facebook':
+ photo = 'https://graph.facebook.com/' + profile.id + '/picture'
+ if (bigger) photo += '?width=400'
+ else photo += '?width=96'
+ break
+ case 'twitter':
+ photo = 'https://twitter.com/' + profile.username + '/profile_image'
+ if (bigger) photo += '?size=original'
+ else photo += '?size=bigger'
+ break
+ case 'github':
+ photo = 'https://avatars.githubusercontent.com/u/' + profile.id
+ if (bigger) photo += '?s=400'
+ else photo += '?s=96'
+ break
+ case 'gitlab':
+ photo = profile.avatarUrl
+ if (photo) {
+ if (bigger) photo = photo.replace(/(\?s=)\d*$/i, '$1400')
+ else photo = photo.replace(/(\?s=)\d*$/i, '$196')
} else {
- return false
- }
- }
- },
- classMethods: {
- associate: function (models) {
- User.hasMany(models.Note, {
- foreignKey: 'ownerId',
- constraints: false
- })
- User.hasMany(models.Note, {
- foreignKey: 'lastchangeuserId',
- constraints: false
- })
- },
- getProfile: function (user) {
- if (!user) {
- return null
- }
- return user.profile ? User.parseProfile(user.profile) : (user.email ? User.parseProfileByEmail(user.email) : null)
- },
- parseProfile: function (profile) {
- try {
- profile = JSON.parse(profile)
- } catch (err) {
- logger.error(err)
- profile = null
+ photo = generateAvatarURL(profile.username)
}
- if (profile) {
- profile = {
- name: profile.displayName || profile.username,
- photo: User.parsePhotoByProfile(profile),
- biggerphoto: User.parsePhotoByProfile(profile, true)
- }
- }
- return profile
- },
- parsePhotoByProfile: function (profile, bigger) {
- var photo = null
- switch (profile.provider) {
- case 'facebook':
- photo = 'https://graph.facebook.com/' + profile.id + '/picture'
- if (bigger) photo += '?width=400'
- else photo += '?width=96'
- break
- case 'twitter':
- photo = 'https://twitter.com/' + profile.username + '/profile_image'
- if (bigger) photo += '?size=original'
- else photo += '?size=bigger'
- break
- case 'github':
- photo = 'https://avatars.githubusercontent.com/u/' + profile.id
- if (bigger) photo += '?s=400'
- else photo += '?s=96'
- break
- case 'gitlab':
- photo = profile.avatarUrl
- if (photo) {
- if (bigger) photo = photo.replace(/(\?s=)\d*$/i, '$1400')
- else photo = photo.replace(/(\?s=)\d*$/i, '$196')
- } else {
- photo = generateAvatarURL(profile.username)
- }
- break
- case 'mattermost':
- photo = profile.avatarUrl
- if (photo) {
- if (bigger) photo = photo.replace(/(\?s=)\d*$/i, '$1400')
- else photo = photo.replace(/(\?s=)\d*$/i, '$196')
- } else {
- photo = generateAvatarURL(profile.username)
- }
- break
- case 'dropbox':
- photo = generateAvatarURL('', profile.emails[0].value, bigger)
- break
- case 'google':
- photo = profile.photos[0].value
- if (bigger) photo = photo.replace(/(\?sz=)\d*$/i, '$1400')
- else photo = photo.replace(/(\?sz=)\d*$/i, '$196')
- break
- case 'ldap':
- photo = generateAvatarURL(profile.username, profile.emails[0], bigger)
- break
- case 'saml':
- photo = generateAvatarURL(profile.username, profile.emails[0], bigger)
- break
- }
- return photo
- },
- parseProfileByEmail: function (email) {
- return {
- name: email.substring(0, email.lastIndexOf('@')),
- photo: generateAvatarURL('', email, false),
- biggerphoto: generateAvatarURL('', email, true)
+ break
+ case 'mattermost':
+ photo = profile.avatarUrl
+ if (photo) {
+ if (bigger) photo = photo.replace(/(\?s=)\d*$/i, '$1400')
+ else photo = photo.replace(/(\?s=)\d*$/i, '$196')
+ } else {
+ photo = generateAvatarURL(profile.username)
}
- }
+ break
+ case 'dropbox':
+ photo = generateAvatarURL('', profile.emails[0].value, bigger)
+ break
+ case 'google':
+ photo = profile.photos[0].value
+ if (bigger) photo = photo.replace(/(\?sz=)\d*$/i, '$1400')
+ else photo = photo.replace(/(\?sz=)\d*$/i, '$196')
+ break
+ case 'ldap':
+ photo = generateAvatarURL(profile.username, profile.emails[0], bigger)
+ break
+ case 'saml':
+ photo = generateAvatarURL(profile.username, profile.emails[0], bigger)
+ break
+ default:
+ photo = generateAvatarURL(profile.username)
+ break
}
- })
+ return photo
+ }
+ User.parseProfileByEmail = function (email) {
+ return {
+ name: email.substring(0, email.lastIndexOf('@')),
+ photo: generateAvatarURL('', email, false),
+ biggerphoto: generateAvatarURL('', email, true)
+ }
+ }
+
+ function updatePasswordHashHook (user, options) {
+ // suggested way to hash passwords to be able to do this asynchronously:
+ // @see https://github.com/sequelize/sequelize/issues/1821#issuecomment-44265819
+
+ if (!user.changed('password')) {
+ return Promise.resolve()
+ }
+
+ return scrypt.kdf(user.getDataValue('password'), { logN: 15 }).then(keyBuf => {
+ user.setDataValue('password', keyBuf.toString('hex'))
+ })
+ }
+
+ User.beforeCreate(updatePasswordHashHook)
+ User.beforeUpdate(updatePasswordHashHook)
return User
}
diff --git a/lib/realtime.js b/lib/realtime.js
index d04ffdc2..d85d728e 100644
--- a/lib/realtime.js
+++ b/lib/realtime.js
@@ -49,7 +49,7 @@ function secure (socket, next) {
if (handshakeData.sessionID &&
handshakeData.cookie[config.sessionName] &&
handshakeData.cookie[config.sessionName] !== handshakeData.sessionID) {
- if (config.debug) { logger.info('AUTH success cookie: ' + handshakeData.sessionID) }
+ logger.debug(`AUTH success cookie: ${handshakeData.sessionID}`)
return next()
} else {
next(new Error('AUTH failed: Cookie is invalid.'))
@@ -82,7 +82,7 @@ setInterval(function () {
async.each(Object.keys(notes), function (key, callback) {
var note = notes[key]
if (note.server.isDirty) {
- if (config.debug) logger.info('updater found dirty note: ' + key)
+ logger.debug(`updater found dirty note: ${key}`)
note.server.isDirty = false
updateNote(note, function (err, _note) {
// handle when note already been clean up
@@ -182,7 +182,7 @@ setInterval(function () {
var socket = realtime.io.sockets.connected[key]
if ((!socket && users[key]) ||
(socket && (!socket.rooms || socket.rooms.length <= 0))) {
- if (config.debug) { logger.info('cleaner found redundant user: ' + key) }
+ logger.debug(`cleaner found redundant user: ${key}`)
if (!socket) {
socket = {
id: key
@@ -429,11 +429,11 @@ function finishConnection (socket, noteId, socketId) {
if (config.debug) {
let noteId = socket.noteId
- logger.info('SERVER connected a client to [' + noteId + ']:')
- logger.info(JSON.stringify(user))
- // logger.info(notes);
+ logger.debug(`SERVER connected a client to [${noteId}]:`)
+ logger.debug(JSON.stringify(user))
+ logger.debug(notes)
getStatus(function (data) {
- logger.info(JSON.stringify(data))
+ logger.debug(JSON.stringify(data))
})
}
}
@@ -541,10 +541,8 @@ function disconnect (socket) {
if (isDisconnectBusy) return
isDisconnectBusy = true
- if (config.debug) {
- logger.info('SERVER disconnected a client')
- logger.info(JSON.stringify(users[socket.id]))
- }
+ logger.debug('SERVER disconnected a client')
+ logger.debug(JSON.stringify(users[socket.id]))
if (users[socket.id]) {
delete users[socket.id]
@@ -574,9 +572,9 @@ function disconnect (socket) {
delete note.server
delete notes[noteId]
if (config.debug) {
- // logger.info(notes);
+ logger.debug(notes)
getStatus(function (data) {
- logger.info(JSON.stringify(data))
+ logger.debug(JSON.stringify(data))
})
}
})
@@ -595,9 +593,9 @@ function disconnect (socket) {
if (disconnectSocketQueue.length > 0) { disconnect(disconnectSocketQueue[0]) }
if (config.debug) {
- // logger.info(notes);
+ logger.debug(notes)
getStatus(function (data) {
- logger.info(JSON.stringify(data))
+ logger.debug(JSON.stringify(data))
})
}
}
@@ -774,7 +772,7 @@ function connection (socket) {
var noteId = socket.noteId
var user = users[socket.id]
if (!noteId || !notes[noteId] || !user) return
- if (config.debug) { logger.info('SERVER received [' + noteId + '] user status from [' + socket.id + ']: ' + JSON.stringify(data)) }
+ logger.debug(`SERVER received [${noteId}] user status from [${socket.id}]: ${JSON.stringify(data)}`)
if (data) {
user.idle = data.idle
user.type = data.type
diff --git a/lib/response.js b/lib/response.js
index 8191e74f..6450bdf5 100644
--- a/lib/response.js
+++ b/lib/response.js
@@ -18,12 +18,12 @@ var utils = require('./utils')
// public
var response = {
errorForbidden: function (res) {
- const {req} = res
+ const { req } = res
if (req.user) {
responseError(res, '403', 'Forbidden', 'oh no.')
} else {
req.flash('error', 'You are not allowed to access this page. Maybe try logging in?')
- res.redirect(config.serverURL)
+ res.redirect(config.serverURL + '/')
}
},
errorNotFound: function (res) {
@@ -70,6 +70,7 @@ function showIndex (req, res, next) {
signin: authStatus,
infoMessage: req.flash('info'),
errorMessage: req.flash('error'),
+ imprint: fs.existsSync(path.join(config.docsPath, 'imprint.md')),
privacyStatement: fs.existsSync(path.join(config.docsPath, 'privacy.md')),
termsOfUse: fs.existsSync(path.join(config.docsPath, 'terms-of-use.md')),
deleteToken: deleteToken
@@ -226,7 +227,8 @@ function showPublishNote (req, res, next) {
robots: meta.robots || false, // default allow robots
GA: meta.GA,
disqus: meta.disqus,
- cspNonce: res.locals.nonce
+ cspNonce: res.locals.nonce,
+ dnt: req.headers.dnt
}
return renderPublish(data, res)
}).catch(function (err) {
@@ -427,7 +429,7 @@ function publishNoteActions (req, res, next) {
actionDownload(req, res, note)
break
case 'edit':
- res.redirect(config.serverURL + '/' + (note.alias ? note.alias : models.Note.encodeNoteId(note.id)))
+ res.redirect(config.serverURL + '/' + (note.alias ? note.alias : models.Note.encodeNoteId(note.id)) + '?both')
break
default:
res.redirect(config.serverURL + '/s/' + note.shortid)
@@ -441,7 +443,7 @@ function publishSlideActions (req, res, next) {
var action = req.params.action
switch (action) {
case 'edit':
- res.redirect(config.serverURL + '/' + (note.alias ? note.alias : models.Note.encodeNoteId(note.id)))
+ res.redirect(config.serverURL + '/' + (note.alias ? note.alias : models.Note.encodeNoteId(note.id)) + '?both')
break
default:
res.redirect(config.serverURL + '/p/' + note.shortid)
@@ -549,16 +551,16 @@ function gitlabActionProjects (req, res, note) {
ret.accesstoken = user.accessToken
ret.profileid = user.profileid
request(
- config.gitlab.baseURL + '/api/' + config.gitlab.version + '/projects?membership=yes&per_page=100&access_token=' + user.accessToken,
- function (error, httpResponse, body) {
- if (!error && httpResponse.statusCode === 200) {
- ret.projects = JSON.parse(body)
- return res.send(ret)
- } else {
- return res.send(ret)
- }
- }
- )
+ config.gitlab.baseURL + '/api/' + config.gitlab.version + '/projects?membership=yes&per_page=100&access_token=' + user.accessToken,
+ function (error, httpResponse, body) {
+ if (!error && httpResponse.statusCode === 200) {
+ ret.projects = JSON.parse(body)
+ return res.send(ret)
+ } else {
+ return res.send(ret)
+ }
+ }
+ )
}).catch(function (err) {
logger.error('gitlab action projects failed: ' + err)
return response.errorInternalError(res)
@@ -608,7 +610,8 @@ function showPublishSlide (req, res, next) {
robots: meta.robots || false, // default allow robots
GA: meta.GA,
disqus: meta.disqus,
- cspNonce: res.locals.nonce
+ cspNonce: res.locals.nonce,
+ dnt: req.headers.dnt
}
return renderPublishSlide(data, res)
}).catch(function (err) {
diff --git a/lib/utils.js b/lib/utils.js
index 247f85f2..1725f6e8 100644
--- a/lib/utils.js
+++ b/lib/utils.js
@@ -21,6 +21,8 @@ exports.getImageMimeType = function getImageMimeType (imagePath) {
return 'image/png'
case 'tiff':
return 'image/tiff'
+ case 'svg':
+ return 'image/svg+xml'
default:
return undefined
}
diff --git a/lib/web/auth/dropbox/index.js b/lib/web/auth/dropbox/index.js
index c44607da..1cfabd29 100644
--- a/lib/web/auth/dropbox/index.js
+++ b/lib/web/auth/dropbox/index.js
@@ -4,7 +4,7 @@ const Router = require('express').Router
const passport = require('passport')
const DropboxStrategy = require('passport-dropbox-oauth2').Strategy
const config = require('../../../config')
-const {setReturnToFromReferer, passportGeneralCallback} = require('../utils')
+const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
let dropboxAuth = module.exports = Router()
diff --git a/lib/web/auth/email/index.js b/lib/web/auth/email/index.js
index f7e58d46..32e21428 100644
--- a/lib/web/auth/email/index.js
+++ b/lib/web/auth/email/index.js
@@ -7,8 +7,8 @@ const LocalStrategy = require('passport-local').Strategy
const config = require('../../../config')
const models = require('../../../models')
const logger = require('../../../logger')
-const {setReturnToFromReferer} = require('../utils')
-const {urlencodedParser} = require('../../utils')
+const { setReturnToFromReferer } = require('../utils')
+const { urlencodedParser } = require('../../utils')
const response = require('../../../response')
let emailAuth = module.exports = Router()
@@ -23,8 +23,14 @@ passport.use(new LocalStrategy({
}
}).then(function (user) {
if (!user) return done(null, false)
- if (!user.verifyPassword(password)) return done(null, false)
- return done(null, user)
+ user.verifyPassword(password).then(verified => {
+ if (verified) {
+ return done(null, user)
+ } else {
+ logger.warn('invalid password given for %s', user.email)
+ return done(null, false)
+ }
+ })
}).catch(function (err) {
logger.error(err)
return done(err)
diff --git a/lib/web/auth/facebook/index.js b/lib/web/auth/facebook/index.js
index b2364989..418ddeee 100644
--- a/lib/web/auth/facebook/index.js
+++ b/lib/web/auth/facebook/index.js
@@ -5,7 +5,7 @@ const passport = require('passport')
const FacebookStrategy = require('passport-facebook').Strategy
const config = require('../../../config')
-const {setReturnToFromReferer, passportGeneralCallback} = require('../utils')
+const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
let facebookAuth = module.exports = Router()
diff --git a/lib/web/auth/github/index.js b/lib/web/auth/github/index.js
index ece634ba..afa5fa31 100644
--- a/lib/web/auth/github/index.js
+++ b/lib/web/auth/github/index.js
@@ -5,7 +5,7 @@ const passport = require('passport')
const GithubStrategy = require('passport-github').Strategy
const config = require('../../../config')
const response = require('../../../response')
-const {setReturnToFromReferer, passportGeneralCallback} = require('../utils')
+const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
let githubAuth = module.exports = Router()
diff --git a/lib/web/auth/gitlab/index.js b/lib/web/auth/gitlab/index.js
index 38436024..4cebbc10 100644
--- a/lib/web/auth/gitlab/index.js
+++ b/lib/web/auth/gitlab/index.js
@@ -5,7 +5,7 @@ const passport = require('passport')
const GitlabStrategy = require('passport-gitlab2').Strategy
const config = require('../../../config')
const response = require('../../../response')
-const {setReturnToFromReferer, passportGeneralCallback} = require('../utils')
+const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
let gitlabAuth = module.exports = Router()
diff --git a/lib/web/auth/google/index.js b/lib/web/auth/google/index.js
index 60282cf5..ad9bcd7a 100644
--- a/lib/web/auth/google/index.js
+++ b/lib/web/auth/google/index.js
@@ -4,21 +4,22 @@ const Router = require('express').Router
const passport = require('passport')
var GoogleStrategy = require('passport-google-oauth20').Strategy
const config = require('../../../config')
-const {setReturnToFromReferer, passportGeneralCallback} = require('../utils')
+const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
let googleAuth = module.exports = Router()
passport.use(new GoogleStrategy({
clientID: config.google.clientID,
clientSecret: config.google.clientSecret,
- callbackURL: config.serverURL + '/auth/google/callback'
+ callbackURL: config.serverURL + '/auth/google/callback',
+ userProfileURL: 'https://www.googleapis.com/oauth2/v3/userinfo'
}, passportGeneralCallback))
googleAuth.get('/auth/google', function (req, res, next) {
setReturnToFromReferer(req)
passport.authenticate('google', { scope: ['profile'] })(req, res, next)
})
- // google auth callback
+// google auth callback
googleAuth.get('/auth/google/callback',
passport.authenticate('google', {
successReturnToOrRedirect: config.serverURL + '/',
diff --git a/lib/web/auth/ldap/index.js b/lib/web/auth/ldap/index.js
index 8d71c18e..96143664 100644
--- a/lib/web/auth/ldap/index.js
+++ b/lib/web/auth/ldap/index.js
@@ -6,8 +6,8 @@ const LDAPStrategy = require('passport-ldapauth')
const config = require('../../../config')
const models = require('../../../models')
const logger = require('../../../logger')
-const {setReturnToFromReferer} = require('../utils')
-const {urlencodedParser} = require('../../utils')
+const { setReturnToFromReferer } = require('../utils')
+const { urlencodedParser } = require('../../utils')
const response = require('../../../response')
let ldapAuth = module.exports = Router()
@@ -66,11 +66,11 @@ passport.use(new LDAPStrategy({
}
if (needSave) {
user.save().then(function () {
- if (config.debug) { logger.debug('user login: ' + user.id) }
+ logger.debug(`user login: ${user.id}`)
return done(null, user)
})
} else {
- if (config.debug) { logger.debug('user login: ' + user.id) }
+ logger.debug(`user login: ${user.id}`)
return done(null, user)
}
}
diff --git a/lib/web/auth/mattermost/index.js b/lib/web/auth/mattermost/index.js
index 63a4886f..48d6d297 100644
--- a/lib/web/auth/mattermost/index.js
+++ b/lib/web/auth/mattermost/index.js
@@ -5,7 +5,7 @@ const passport = require('passport')
const Mattermost = require('mattermost')
const OAuthStrategy = require('passport-oauth2').Strategy
const config = require('../../../config')
-const {setReturnToFromReferer, passportGeneralCallback} = require('../utils')
+const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
const mattermost = new Mattermost.Client()
@@ -24,12 +24,12 @@ mattermostStrategy.userProfile = (accessToken, done) => {
mattermost.token = accessToken
mattermost.useHeaderToken()
mattermost.getMe(
- (data) => {
- done(null, data)
- },
- (err) => {
- done(err)
- }
+ (data) => {
+ done(null, data)
+ },
+ (err) => {
+ done(err)
+ }
)
}
diff --git a/lib/web/auth/oauth2/index.js b/lib/web/auth/oauth2/index.js
index 57ab9b9a..78434271 100644
--- a/lib/web/auth/oauth2/index.js
+++ b/lib/web/auth/oauth2/index.js
@@ -4,7 +4,7 @@ const Router = require('express').Router
const passport = require('passport')
const { Strategy, InternalOAuthError } = require('passport-oauth2')
const config = require('../../../config')
-const {setReturnToFromReferer, passportGeneralCallback} = require('../utils')
+const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
let oauth2Auth = module.exports = Router()
diff --git a/lib/web/auth/openid/index.js b/lib/web/auth/openid/index.js
index c45c6d71..b0a28bec 100644
--- a/lib/web/auth/openid/index.js
+++ b/lib/web/auth/openid/index.js
@@ -6,8 +6,8 @@ const OpenIDStrategy = require('@passport-next/passport-openid').Strategy
const config = require('../../../config')
const models = require('../../../models')
const logger = require('../../../logger')
-const {urlencodedParser} = require('../../utils')
-const {setReturnToFromReferer} = require('../utils')
+const { urlencodedParser } = require('../../utils')
+const { setReturnToFromReferer } = require('../utils')
let openIDAuth = module.exports = Router()
@@ -33,11 +33,11 @@ passport.use(new OpenIDStrategy({
}
if (needSave) {
user.save().then(function () {
- if (config.debug) { logger.info('user login: ' + user.id) }
+ logger.debug(`user login: ${user.id}`)
return done(null, user)
})
} else {
- if (config.debug) { logger.info('user login: ' + user.id) }
+ logger.debug(`user login: ${user.id}`)
return done(null, user)
}
}
diff --git a/lib/web/auth/saml/index.js b/lib/web/auth/saml/index.js
index b8d98340..40a6f8b3 100644
--- a/lib/web/auth/saml/index.js
+++ b/lib/web/auth/saml/index.js
@@ -6,7 +6,7 @@ const SamlStrategy = require('passport-saml').Strategy
const config = require('../../../config')
const models = require('../../../models')
const logger = require('../../../logger')
-const {urlencodedParser} = require('../../utils')
+const { urlencodedParser } = require('../../utils')
const fs = require('fs')
const intersection = function (array1, array2) { return array1.filter((n) => array2.includes(n)) }
@@ -17,7 +17,8 @@ passport.use(new SamlStrategy({
entryPoint: config.saml.idpSsoUrl,
issuer: config.saml.issuer || config.serverURL,
cert: fs.readFileSync(config.saml.idpCert, 'utf-8'),
- identifierFormat: config.saml.identifierFormat
+ identifierFormat: config.saml.identifierFormat,
+ disableRequestedAuthnContext: config.saml.disableRequestedAuthnContext
}, function (user, done) {
// check authorization if needed
if (config.saml.externalGroups && config.saml.groupAttribute) {
@@ -61,11 +62,11 @@ passport.use(new SamlStrategy({
}
if (needSave) {
user.save().then(function () {
- if (config.debug) { logger.debug('user login: ' + user.id) }
+ logger.debug(`user login: ${user.id}`)
return done(null, user)
})
} else {
- if (config.debug) { logger.debug('user login: ' + user.id) }
+ logger.debug(`user login: ${user.id}`)
return done(null, user)
}
}
diff --git a/lib/web/auth/twitter/index.js b/lib/web/auth/twitter/index.js
index c1860d93..5aba20ff 100644
--- a/lib/web/auth/twitter/index.js
+++ b/lib/web/auth/twitter/index.js
@@ -5,7 +5,7 @@ const passport = require('passport')
const TwitterStrategy = require('passport-twitter').Strategy
const config = require('../../../config')
-const {setReturnToFromReferer, passportGeneralCallback} = require('../utils')
+const { setReturnToFromReferer, passportGeneralCallback } = require('../utils')
let twitterAuth = module.exports = Router()
diff --git a/lib/web/auth/utils.js b/lib/web/auth/utils.js
index ff7a1237..141a0d6f 100644
--- a/lib/web/auth/utils.js
+++ b/lib/web/auth/utils.js
@@ -1,7 +1,6 @@
'use strict'
const models = require('../../models')
-const config = require('../../config')
const logger = require('../../logger')
exports.setReturnToFromReferer = function setReturnToFromReferer (req) {
@@ -38,11 +37,11 @@ exports.passportGeneralCallback = function callback (accessToken, refreshToken,
}
if (needSave) {
user.save().then(function () {
- if (config.debug) { logger.info('user login: ' + user.id) }
+ logger.debug(`user login: ${user.id}`)
return done(null, user)
})
} else {
- if (config.debug) { logger.info('user login: ' + user.id) }
+ logger.debug(`user login: ${user.id}`)
return done(null, user)
}
}
diff --git a/lib/web/historyRouter.js b/lib/web/historyRouter.js
index 1b22c232..fa426bbb 100644
--- a/lib/web/historyRouter.js
+++ b/lib/web/historyRouter.js
@@ -2,7 +2,7 @@
const Router = require('express').Router
-const {urlencodedParser} = require('./utils')
+const { urlencodedParser } = require('./utils')
const history = require('../history')
const historyRouter = module.exports = Router()
diff --git a/lib/web/imageRouter/azure.js b/lib/web/imageRouter/azure.js
index cc98e5fc..22ee5585 100644
--- a/lib/web/imageRouter/azure.js
+++ b/lib/web/imageRouter/azure.js
@@ -7,13 +7,13 @@ const logger = require('../../logger')
const azure = require('azure-storage')
exports.uploadImage = function (imagePath, callback) {
- if (!imagePath || typeof imagePath !== 'string') {
- callback(new Error('Image path is missing or wrong'), null)
+ if (!callback || typeof callback !== 'function') {
+ logger.error('Callback has to be a function')
return
}
- if (!callback || typeof callback !== 'function') {
- logger.error('Callback has to be a function')
+ if (!imagePath || typeof imagePath !== 'string') {
+ callback(new Error('Image path is missing or wrong'), null)
return
}
diff --git a/lib/web/imageRouter/filesystem.js b/lib/web/imageRouter/filesystem.js
index 7c876d66..3ba09e88 100644
--- a/lib/web/imageRouter/filesystem.js
+++ b/lib/web/imageRouter/filesystem.js
@@ -6,13 +6,13 @@ const config = require('../../config')
const logger = require('../../logger')
exports.uploadImage = function (imagePath, callback) {
- if (!imagePath || typeof imagePath !== 'string') {
- callback(new Error('Image path is missing or wrong'), null)
+ if (!callback || typeof callback !== 'function') {
+ logger.error('Callback has to be a function')
return
}
- if (!callback || typeof callback !== 'function') {
- logger.error('Callback has to be a function')
+ if (!imagePath || typeof imagePath !== 'string') {
+ callback(new Error('Image path is missing or wrong'), null)
return
}
diff --git a/lib/web/imageRouter/imgur.js b/lib/web/imageRouter/imgur.js
index 2a20002c..dcb03a7a 100644
--- a/lib/web/imageRouter/imgur.js
+++ b/lib/web/imageRouter/imgur.js
@@ -5,24 +5,22 @@ const logger = require('../../logger')
const imgur = require('imgur')
exports.uploadImage = function (imagePath, callback) {
- if (!imagePath || typeof imagePath !== 'string') {
- callback(new Error('Image path is missing or wrong'), null)
+ if (!callback || typeof callback !== 'function') {
+ logger.error('Callback has to be a function')
return
}
- if (!callback || typeof callback !== 'function') {
- logger.error('Callback has to be a function')
+ if (!imagePath || typeof imagePath !== 'string') {
+ callback(new Error('Image path is missing or wrong'), null)
return
}
imgur.setClientId(config.imgur.clientID)
imgur.uploadFile(imagePath)
- .then(function (json) {
- if (config.debug) {
- logger.info('SERVER uploadimage success: ' + JSON.stringify(json))
- }
- callback(null, json.data.link.replace(/^http:\/\//i, 'https://'))
- }).catch(function (err) {
- callback(new Error(err), null)
- })
+ .then(function (json) {
+ logger.debug(`SERVER uploadimage success: ${JSON.stringify(json)}`)
+ callback(null, json.data.link.replace(/^http:\/\//i, 'https://'))
+ }).catch(function (err) {
+ callback(new Error(err), null)
+ })
}
diff --git a/lib/web/imageRouter/index.js b/lib/web/imageRouter/index.js
index f3c2decf..0b59218b 100644
--- a/lib/web/imageRouter/index.js
+++ b/lib/web/imageRouter/index.js
@@ -21,18 +21,19 @@ imageRouter.post('/uploadimage', function (req, res) {
form.parse(req, function (err, fields, files) {
if (err || !files.image || !files.image.path) {
+ logger.error(`formidable error: ${err}`)
response.errorForbidden(res)
} else {
- if (config.debug) {
- logger.info('SERVER received uploadimage: ' + JSON.stringify(files.image))
- }
+ logger.debug(`SERVER received uploadimage: ${JSON.stringify(files.image)}`)
const uploadProvider = require('./' + config.imageUploadType)
+ logger.debug(`imageRouter: Uploading ${files.image.path} using ${config.imageUploadType}`)
uploadProvider.uploadImage(files.image.path, function (err, url) {
if (err !== null) {
logger.error(err)
return res.status(500).end('upload image error')
}
+ logger.debug(`SERVER sending ${url} to client`)
res.send({
link: url
})
diff --git a/lib/web/imageRouter/lutim.js b/lib/web/imageRouter/lutim.js
new file mode 100644
index 00000000..61930ad6
--- /dev/null
+++ b/lib/web/imageRouter/lutim.js
@@ -0,0 +1,30 @@
+'use strict'
+const config = require('../../config')
+const logger = require('../../logger')
+
+const lutim = require('lutim')
+
+exports.uploadImage = function (imagePath, callback) {
+ if (!callback || typeof callback !== 'function') {
+ logger.error('Callback has to be a function')
+ return
+ }
+
+ if (!imagePath || typeof imagePath !== 'string') {
+ callback(new Error('Image path is missing or wrong'), null)
+ return
+ }
+
+ if (config.lutim && config.lutim.url) {
+ lutim.setAPIUrl(config.lutim.url)
+ logger.debug(`Set lutim URL to ${lutim.getApiUrl()}`)
+ }
+
+ lutim.uploadImage(imagePath)
+ .then(function (json) {
+ logger.debug(`SERVER uploadimage success: ${JSON.stringify(json)}`)
+ callback(null, lutim.getAPIUrl() + json.msg.short)
+ }).catch(function (err) {
+ callback(new Error(err), null)
+ })
+}
diff --git a/lib/web/imageRouter/minio.js b/lib/web/imageRouter/minio.js
index b921c2d2..fe43f76f 100644
--- a/lib/web/imageRouter/minio.js
+++ b/lib/web/imageRouter/minio.js
@@ -3,7 +3,7 @@ const fs = require('fs')
const path = require('path')
const config = require('../../config')
-const {getImageMimeType} = require('../../utils')
+const { getImageMimeType } = require('../../utils')
const logger = require('../../logger')
const Minio = require('minio')
@@ -40,7 +40,9 @@ exports.uploadImage = function (imagePath, callback) {
callback(new Error(err), null)
return
}
- callback(null, `${protocol}://${config.minio.endPoint}:${config.minio.port}/${config.s3bucket}/${key}`)
+ let hidePort = [80, 443].includes(config.minio.port)
+ let urlPort = hidePort ? '' : `:${config.minio.port}`
+ callback(null, `${protocol}://${config.minio.endPoint}${urlPort}/${config.s3bucket}/${key}`)
})
})
}
diff --git a/lib/web/imageRouter/s3.js b/lib/web/imageRouter/s3.js
index f2a5a5df..2bf08cc7 100644
--- a/lib/web/imageRouter/s3.js
+++ b/lib/web/imageRouter/s3.js
@@ -3,7 +3,7 @@ const fs = require('fs')
const path = require('path')
const config = require('../../config')
-const {getImageMimeType} = require('../../utils')
+const { getImageMimeType } = require('../../utils')
const logger = require('../../logger')
const AWS = require('aws-sdk')
@@ -35,6 +35,7 @@ exports.uploadImage = function (imagePath, callback) {
const mimeType = getImageMimeType(imagePath)
if (mimeType) { params.ContentType = mimeType }
+ logger.debug(`S3 object parameters: ${JSON.stringify(params)}`)
s3.putObject(params, function (err, data) {
if (err) {
callback(new Error(err), null)
diff --git a/lib/web/middleware/tooBusy.js b/lib/web/middleware/tooBusy.js
index f1b72330..49efbe37 100644
--- a/lib/web/middleware/tooBusy.js
+++ b/lib/web/middleware/tooBusy.js
@@ -3,6 +3,9 @@
const toobusy = require('toobusy-js')
const response = require('../../response')
+const config = require('../../config')
+
+toobusy.maxLag(config.tooBusyLag)
module.exports = function (req, res, next) {
if (toobusy()) {
diff --git a/lib/web/noteRouter.js b/lib/web/noteRouter.js
index 41bf5f73..bac2cf88 100644
--- a/lib/web/noteRouter.js
+++ b/lib/web/noteRouter.js
@@ -4,7 +4,7 @@ const Router = require('express').Router
const response = require('../response')
-const {markdownParser} = require('./utils')
+const { markdownParser } = require('./utils')
const noteRouter = module.exports = Router()
diff --git a/lib/web/statusRouter.js b/lib/web/statusRouter.js
index 2b9cb65f..da69e62c 100644
--- a/lib/web/statusRouter.js
+++ b/lib/web/statusRouter.js
@@ -8,7 +8,7 @@ const config = require('../config')
const models = require('../models')
const logger = require('../logger')
-const {urlencodedParser} = require('./utils')
+const { urlencodedParser } = require('./utils')
const statusRouter = module.exports = Router()
@@ -68,9 +68,7 @@ statusRouter.post('/temp', urlencodedParser, function (req, res) {
if (!data) {
response.errorForbidden(res)
} else {
- if (config.debug) {
- logger.info('SERVER received temp from [' + host + ']: ' + req.body.data)
- }
+ logger.debug(`SERVER received temp from [${host}]: ${req.body.data}`)
models.Temp.create({
data: data
}).then(function (temp) {
diff --git a/lib/web/userRouter.js b/lib/web/userRouter.js
index ca364422..73b519ec 100644
--- a/lib/web/userRouter.js
+++ b/lib/web/userRouter.js
@@ -8,7 +8,7 @@ const response = require('../response')
const config = require('../config')
const models = require('../models')
const logger = require('../logger')
-const {generateAvatar} = require('../letter-avatars')
+const { generateAvatar } = require('../letter-avatars')
const UserRouter = module.exports = Router()
diff --git a/lib/workers/dmpWorker.js b/lib/workers/dmpWorker.js
index 60db0a12..ca68b4ab 100644
--- a/lib/workers/dmpWorker.js
+++ b/lib/workers/dmpWorker.js
@@ -4,7 +4,6 @@ var DiffMatchPatch = require('diff-match-patch')
var dmp = new DiffMatchPatch()
// core
-var config = require('../config')
var logger = require('../logger')
process.on('message', function (data) {
@@ -61,10 +60,8 @@ function createPatch (lastDoc, currDoc) {
var patch = dmp.patch_make(lastDoc, diff)
patch = dmp.patch_toText(patch)
var msEnd = (new Date()).getTime()
- if (config.debug) {
- logger.info(patch)
- logger.info((msEnd - msStart) + 'ms')
- }
+ logger.debug(patch)
+ logger.debug((msEnd - msStart) + 'ms')
return patch
}
@@ -123,9 +120,7 @@ function getRevision (revisions, count) {
authorship: authorship
}
var msEnd = (new Date()).getTime()
- if (config.debug) {
- logger.info((msEnd - msStart) + 'ms')
- }
+ logger.debug((msEnd - msStart) + 'ms')
return data
}